本文整理汇总了Java中org.elasticsearch.spark.rdd.api.java.JavaEsSpark.saveJsonToEs方法的典型用法代码示例。如果您正苦于以下问题:Java JavaEsSpark.saveJsonToEs方法的具体用法?Java JavaEsSpark.saveJsonToEs怎么用?Java JavaEsSpark.saveJsonToEs使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.elasticsearch.spark.rdd.api.java.JavaEsSpark
的用法示例。
在下文中一共展示了JavaEsSpark.saveJsonToEs方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testEsRDDWriteAsJsonMultiWrite
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void testEsRDDWriteAsJsonMultiWrite() throws Exception {
String json1 = "{\"reason\" : \"business\",\"airport\" : \"SFO\"}";
String json2 = "{\"participants\" : 5,\"airport\" : \"OTP\"}";
JavaRDD<String> stringRDD = sc.parallelize(ImmutableList.of(json1, json2));
JavaEsSpark.saveJsonToEs(stringRDD, "spark-test/json-{airport}");
JavaEsSpark.saveJsonToEs(stringRDD, "spark-test/json1-{airport}", Collections.<String, String> emptyMap());
JavaEsSpark.saveJsonToEs(stringRDD, ImmutableMap.of(ES_RESOURCE, "spark-test/json2-{airport}"));
byte[] json1BA = json1.getBytes();
byte[] json2BA = json2.getBytes();
JavaRDD<byte[]> byteRDD = sc.parallelize(ImmutableList.of(json1BA, json2BA));
JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test/json-ba-{airport}");
JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test/json-ba1-{airport}", Collections.<String, String> emptyMap());
JavaEsSpark.saveJsonByteArrayToEs(byteRDD, ImmutableMap.of(ES_RESOURCE, "spark-test/json-ba2-{airport}"));
assertTrue(RestUtils.exists("spark-test/json-SFO"));
assertTrue(RestUtils.exists("spark-test/json-OTP"));
assertTrue(RestUtils.exists("spark-test/json1-SFO"));
assertTrue(RestUtils.exists("spark-test/json1-OTP"));
assertTrue(RestUtils.exists("spark-test/json2-SFO"));
assertTrue(RestUtils.exists("spark-test/json2-OTP"));
assertTrue(RestUtils.exists("spark-test/json-ba-SFO"));
assertTrue(RestUtils.exists("spark-test/json-ba-OTP"));
assertTrue(RestUtils.exists("spark-test/json-ba1-SFO"));
assertTrue(RestUtils.exists("spark-test/json-ba1-OTP"));
assertTrue(RestUtils.exists("spark-test/json-ba2-SFO"));
assertTrue(RestUtils.exists("spark-test/json-ba2-OTP"));
assertThat(RestUtils.get("spark-test/json-SFO/_search?"), containsString("business"));
assertThat(RestUtils.get("spark-test/json-OTP/_search?"), containsString("participants"));
}
示例2: store
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
private void store(JavaSparkContext javaSparkContext, List<Dependency> dependencyLinks, String resource) {
if (dependencyLinks.isEmpty()) {
return;
}
String json;
try {
ObjectMapper objectMapper = new ObjectMapper();
json = objectMapper.writeValueAsString(new ElasticsearchDependencies(dependencyLinks, day));
} catch (JsonProcessingException e) {
throw new IllegalStateException("Could not serialize dependencies", e);
}
JavaEsSpark.saveJsonToEs(javaSparkContext.parallelize(Collections.singletonList(json)), resource);
}
示例3: testEsRDDWriteAsJsonMultiWrite
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void testEsRDDWriteAsJsonMultiWrite() throws Exception {
String json1 = "{\"reason\" : \"business\",\"airport\" : \"SFO\"}";
String json2 = "{\"participants\" : 5,\"airport\" : \"OTP\"}";
JavaRDD<String> stringRDD = sc.parallelize(ImmutableList.of(json1, json2));
JavaEsSpark.saveJsonToEs(stringRDD, "spark-test-json-{airport}/data");
JavaEsSpark.saveJsonToEs(stringRDD, "spark-test-json1-{airport}/data", Collections.<String, String> emptyMap());
JavaEsSpark.saveJsonToEs(stringRDD, ImmutableMap.of(ES_RESOURCE, "spark-test-json2-{airport}/data"));
byte[] json1BA = json1.getBytes();
byte[] json2BA = json2.getBytes();
JavaRDD<byte[]> byteRDD = sc.parallelize(ImmutableList.of(json1BA, json2BA));
JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test-json-ba-{airport}/data");
JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test-json-ba1-{airport}/data", Collections.<String, String> emptyMap());
JavaEsSpark.saveJsonByteArrayToEs(byteRDD, ImmutableMap.of(ES_RESOURCE, "spark-test-json-ba2-{airport}/data"));
assertTrue(RestUtils.exists("spark-test-json-SFO/data"));
assertTrue(RestUtils.exists("spark-test-json-OTP/data"));
assertTrue(RestUtils.exists("spark-test-json1-SFO/data"));
assertTrue(RestUtils.exists("spark-test-json1-OTP/data"));
assertTrue(RestUtils.exists("spark-test-json2-SFO/data"));
assertTrue(RestUtils.exists("spark-test-json2-OTP/data"));
assertTrue(RestUtils.exists("spark-test-json-ba-SFO/data"));
assertTrue(RestUtils.exists("spark-test-json-ba-OTP/data"));
assertTrue(RestUtils.exists("spark-test-json-ba1-SFO/data"));
assertTrue(RestUtils.exists("spark-test-json-ba1-OTP/data"));
assertTrue(RestUtils.exists("spark-test-json-ba2-SFO/data"));
assertTrue(RestUtils.exists("spark-test-json-ba2-OTP/data"));
assertThat(RestUtils.get("spark-test-json-SFO/data/_search?"), containsString("business"));
assertThat(RestUtils.get("spark-test-json-OTP/data/_search?"), containsString("participants"));
}
示例4: importHttpfile
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void importHttpfile(String httplogpath) {
// import http logs
JavaRDD<String> accessLogs = spark.sc.textFile(httplogpath, this.partition).map(s -> ApacheAccessLog.parseFromLogLine(s)).filter(ApacheAccessLog::checknull);
JavaEsSpark.saveJsonToEs(accessLogs, logIndex + "/" + this.httpType);
}
示例5: importFtpfile
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void importFtpfile(String ftplogpath) {
// import ftp logs
JavaRDD<String> ftpLogs = spark.sc.textFile(ftplogpath, this.partition).map(s -> FtpLog.parseFromLogLine(s)).filter(FtpLog::checknull);
JavaEsSpark.saveJsonToEs(ftpLogs, logIndex + "/" + this.ftpType);
}