本文整理汇总了Java中org.elasticsearch.hadoop.rest.EsHadoopParsingException类的典型用法代码示例。如果您正苦于以下问题:Java EsHadoopParsingException类的具体用法?Java EsHadoopParsingException怎么用?Java EsHadoopParsingException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
EsHadoopParsingException类属于org.elasticsearch.hadoop.rest包,在下文中一共展示了EsHadoopParsingException类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: read
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //导入依赖的package包/类
protected Object read(String fieldName, Token t, String fieldMapping) {
if (t == Token.START_ARRAY) {
return list(fieldName, fieldMapping);
}
// handle nested nodes first
else if (t == Token.START_OBJECT) {
return map(fieldMapping);
}
FieldType esType = mapping(fieldMapping);
if (t.isValue()) {
String rawValue = parser.text();
try {
return parseValue(esType);
} catch (Exception ex) {
throw new EsHadoopParsingException(String.format(Locale.ROOT, "Cannot parse value [%s] for field [%s]", rawValue, fieldName), ex);
}
}
return null;
}
示例2: authWorks
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //导入依赖的package包/类
@Test public void authWorks() throws InterruptedException {
es.enqueue(new MockResponse()); // let the HEAD request pass, so we can trap the header value
es.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AT_START)); // kill the job
ElasticsearchDependenciesJob job = ElasticsearchDependenciesJob.builder()
.username("foo")
.password("bar")
.hosts(es.url("").toString())
.build();
try {
job.run();
} catch (EsHadoopParsingException e) {
// this is ok as we aren't trying to emulate the whole server
}
assertThat(es.takeRequest().getHeader("Authorization"))
.isEqualTo("Basic " + encodeBase64String("foo:bar".getBytes(Util.UTF_8)).trim());
}
示例3: authWorksWithSsl
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //导入依赖的package包/类
@Test public void authWorksWithSsl() throws InterruptedException {
es.useHttps(SslClient.localhost().socketFactory, false);
es.enqueue(new MockResponse()); // let the HEAD request pass, so we can trap the header value
es.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AT_START)); // kill the job
ElasticsearchDependenciesJob.Builder builder = ElasticsearchDependenciesJob.builder()
.username("foo")
.password("bar")
.hosts(es.url("").toString());
// temporarily hack-in self-signed until https://github.com/openzipkin/zipkin/issues/1683
builder.sparkProperties.put("es.net.ssl.cert.allow.self.signed", "true");
ElasticsearchDependenciesJob job = builder.build();
try {
job.run();
} catch (EsHadoopParsingException e) {
// this is ok as we aren't trying to emulate the whole server
}
assertThat(es.takeRequest().getHeader("Authorization"))
.isEqualTo("Basic " + encodeBase64String("foo:bar".getBytes(Util.UTF_8)).trim());
}
示例4: read
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //导入依赖的package包/类
protected Object read(String fieldName, Token t, String fieldMapping) {
if (t == Token.START_ARRAY) {
return list(fieldName, fieldMapping);
}
// handle nested nodes first
else if (t == Token.START_OBJECT) {
return map(fieldMapping);
}
FieldType esType = mapping(fieldMapping);
if (t.isValue()) {
String rawValue = parser.text();
try {
if (isArrayField(fieldMapping)) {
return singletonList(fieldMapping, parseValue(esType));
} else {
return parseValue(esType);
}
} catch (Exception ex) {
throw new EsHadoopParsingException(String.format(Locale.ROOT, "Cannot parse value [%s] for field [%s]", rawValue, fieldName), ex);
}
}
return null;
}
示例5: readListItem
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //导入依赖的package包/类
protected Object readListItem(String fieldName, Token t, String fieldMapping) {
if (t == Token.START_ARRAY) {
return list(fieldName, fieldMapping);
}
// handle nested nodes first
else if (t == Token.START_OBJECT) {
return map(fieldMapping);
}
FieldType esType = mapping(fieldMapping);
if (t.isValue()) {
String rawValue = parser.text();
try {
return parseValue(esType);
} catch (Exception ex) {
throw new EsHadoopParsingException(String.format(Locale.ROOT, "Cannot parse value [%s] for field [%s]", rawValue, fieldName), ex);
}
}
return null;
}