本文整理匯總了Java中org.elasticsearch.hadoop.rest.EsHadoopParsingException類的典型用法代碼示例。如果您正苦於以下問題:Java EsHadoopParsingException類的具體用法?Java EsHadoopParsingException怎麽用?Java EsHadoopParsingException使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
EsHadoopParsingException類屬於org.elasticsearch.hadoop.rest包,在下文中一共展示了EsHadoopParsingException類的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: read
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //導入依賴的package包/類
protected Object read(String fieldName, Token t, String fieldMapping) {
if (t == Token.START_ARRAY) {
return list(fieldName, fieldMapping);
}
// handle nested nodes first
else if (t == Token.START_OBJECT) {
return map(fieldMapping);
}
FieldType esType = mapping(fieldMapping);
if (t.isValue()) {
String rawValue = parser.text();
try {
return parseValue(esType);
} catch (Exception ex) {
throw new EsHadoopParsingException(String.format(Locale.ROOT, "Cannot parse value [%s] for field [%s]", rawValue, fieldName), ex);
}
}
return null;
}
示例2: authWorks
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //導入依賴的package包/類
@Test public void authWorks() throws InterruptedException {
es.enqueue(new MockResponse()); // let the HEAD request pass, so we can trap the header value
es.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AT_START)); // kill the job
ElasticsearchDependenciesJob job = ElasticsearchDependenciesJob.builder()
.username("foo")
.password("bar")
.hosts(es.url("").toString())
.build();
try {
job.run();
} catch (EsHadoopParsingException e) {
// this is ok as we aren't trying to emulate the whole server
}
assertThat(es.takeRequest().getHeader("Authorization"))
.isEqualTo("Basic " + encodeBase64String("foo:bar".getBytes(Util.UTF_8)).trim());
}
示例3: authWorksWithSsl
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //導入依賴的package包/類
@Test public void authWorksWithSsl() throws InterruptedException {
es.useHttps(SslClient.localhost().socketFactory, false);
es.enqueue(new MockResponse()); // let the HEAD request pass, so we can trap the header value
es.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AT_START)); // kill the job
ElasticsearchDependenciesJob.Builder builder = ElasticsearchDependenciesJob.builder()
.username("foo")
.password("bar")
.hosts(es.url("").toString());
// temporarily hack-in self-signed until https://github.com/openzipkin/zipkin/issues/1683
builder.sparkProperties.put("es.net.ssl.cert.allow.self.signed", "true");
ElasticsearchDependenciesJob job = builder.build();
try {
job.run();
} catch (EsHadoopParsingException e) {
// this is ok as we aren't trying to emulate the whole server
}
assertThat(es.takeRequest().getHeader("Authorization"))
.isEqualTo("Basic " + encodeBase64String("foo:bar".getBytes(Util.UTF_8)).trim());
}
示例4: read
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //導入依賴的package包/類
protected Object read(String fieldName, Token t, String fieldMapping) {
if (t == Token.START_ARRAY) {
return list(fieldName, fieldMapping);
}
// handle nested nodes first
else if (t == Token.START_OBJECT) {
return map(fieldMapping);
}
FieldType esType = mapping(fieldMapping);
if (t.isValue()) {
String rawValue = parser.text();
try {
if (isArrayField(fieldMapping)) {
return singletonList(fieldMapping, parseValue(esType));
} else {
return parseValue(esType);
}
} catch (Exception ex) {
throw new EsHadoopParsingException(String.format(Locale.ROOT, "Cannot parse value [%s] for field [%s]", rawValue, fieldName), ex);
}
}
return null;
}
示例5: readListItem
import org.elasticsearch.hadoop.rest.EsHadoopParsingException; //導入依賴的package包/類
protected Object readListItem(String fieldName, Token t, String fieldMapping) {
if (t == Token.START_ARRAY) {
return list(fieldName, fieldMapping);
}
// handle nested nodes first
else if (t == Token.START_OBJECT) {
return map(fieldMapping);
}
FieldType esType = mapping(fieldMapping);
if (t.isValue()) {
String rawValue = parser.text();
try {
return parseValue(esType);
} catch (Exception ex) {
throw new EsHadoopParsingException(String.format(Locale.ROOT, "Cannot parse value [%s] for field [%s]", rawValue, fieldName), ex);
}
}
return null;
}