当前位置: 首页>>代码示例>>Java>>正文


Java JavaEsSpark.saveJsonToEs方法代码示例

本文整理汇总了Java中org.elasticsearch.spark.rdd.api.java.JavaEsSpark.saveJsonToEs方法的典型用法代码示例。如果您正苦于以下问题:Java JavaEsSpark.saveJsonToEs方法的具体用法?Java JavaEsSpark.saveJsonToEs怎么用?Java JavaEsSpark.saveJsonToEs使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.elasticsearch.spark.rdd.api.java.JavaEsSpark的用法示例。


在下文中一共展示了JavaEsSpark.saveJsonToEs方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testEsRDDWriteAsJsonMultiWrite

import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void testEsRDDWriteAsJsonMultiWrite() throws Exception {
  String json1 = "{\"reason\" : \"business\",\"airport\" : \"SFO\"}";
  String json2 = "{\"participants\" : 5,\"airport\" : \"OTP\"}";

  JavaRDD<String> stringRDD = sc.parallelize(ImmutableList.of(json1, json2));
  JavaEsSpark.saveJsonToEs(stringRDD, "spark-test/json-{airport}");
  JavaEsSpark.saveJsonToEs(stringRDD, "spark-test/json1-{airport}", Collections.<String, String> emptyMap());
  JavaEsSpark.saveJsonToEs(stringRDD, ImmutableMap.of(ES_RESOURCE, "spark-test/json2-{airport}"));

  byte[] json1BA = json1.getBytes();
  byte[] json2BA = json2.getBytes();

  JavaRDD<byte[]> byteRDD = sc.parallelize(ImmutableList.of(json1BA, json2BA));
  JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test/json-ba-{airport}");
  JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test/json-ba1-{airport}", Collections.<String, String> emptyMap());
  JavaEsSpark.saveJsonByteArrayToEs(byteRDD, ImmutableMap.of(ES_RESOURCE, "spark-test/json-ba2-{airport}"));

  assertTrue(RestUtils.exists("spark-test/json-SFO"));
  assertTrue(RestUtils.exists("spark-test/json-OTP"));

  assertTrue(RestUtils.exists("spark-test/json1-SFO"));
  assertTrue(RestUtils.exists("spark-test/json1-OTP"));

  assertTrue(RestUtils.exists("spark-test/json2-SFO"));
  assertTrue(RestUtils.exists("spark-test/json2-OTP"));

  assertTrue(RestUtils.exists("spark-test/json-ba-SFO"));
  assertTrue(RestUtils.exists("spark-test/json-ba-OTP"));

  assertTrue(RestUtils.exists("spark-test/json-ba1-SFO"));
  assertTrue(RestUtils.exists("spark-test/json-ba1-OTP"));

  assertTrue(RestUtils.exists("spark-test/json-ba2-SFO"));
  assertTrue(RestUtils.exists("spark-test/json-ba2-OTP"));

  assertThat(RestUtils.get("spark-test/json-SFO/_search?"), containsString("business"));
  assertThat(RestUtils.get("spark-test/json-OTP/_search?"), containsString("participants"));
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:39,代码来源:AbstractJavaEsSparkTest.java

示例2: store

import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
private void store(JavaSparkContext javaSparkContext, List<Dependency> dependencyLinks, String resource) {
  if (dependencyLinks.isEmpty()) {
    return;
  }

  String json;
  try {
    ObjectMapper objectMapper = new ObjectMapper();
    json = objectMapper.writeValueAsString(new ElasticsearchDependencies(dependencyLinks, day));
  } catch (JsonProcessingException e) {
    throw new IllegalStateException("Could not serialize dependencies", e);
  }

  JavaEsSpark.saveJsonToEs(javaSparkContext.parallelize(Collections.singletonList(json)), resource);
}
 
开发者ID:jaegertracing,项目名称:spark-dependencies,代码行数:16,代码来源:ElasticsearchDependenciesJob.java

示例3: testEsRDDWriteAsJsonMultiWrite

import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void testEsRDDWriteAsJsonMultiWrite() throws Exception {
  String json1 = "{\"reason\" : \"business\",\"airport\" : \"SFO\"}";
  String json2 = "{\"participants\" : 5,\"airport\" : \"OTP\"}";

  JavaRDD<String> stringRDD = sc.parallelize(ImmutableList.of(json1, json2));
  JavaEsSpark.saveJsonToEs(stringRDD, "spark-test-json-{airport}/data");
  JavaEsSpark.saveJsonToEs(stringRDD, "spark-test-json1-{airport}/data", Collections.<String, String> emptyMap());
  JavaEsSpark.saveJsonToEs(stringRDD, ImmutableMap.of(ES_RESOURCE, "spark-test-json2-{airport}/data"));

  byte[] json1BA = json1.getBytes();
  byte[] json2BA = json2.getBytes();

  JavaRDD<byte[]> byteRDD = sc.parallelize(ImmutableList.of(json1BA, json2BA));
  JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test-json-ba-{airport}/data");
  JavaEsSpark.saveJsonByteArrayToEs(byteRDD, "spark-test-json-ba1-{airport}/data", Collections.<String, String> emptyMap());
  JavaEsSpark.saveJsonByteArrayToEs(byteRDD, ImmutableMap.of(ES_RESOURCE, "spark-test-json-ba2-{airport}/data"));

  assertTrue(RestUtils.exists("spark-test-json-SFO/data"));
  assertTrue(RestUtils.exists("spark-test-json-OTP/data"));

  assertTrue(RestUtils.exists("spark-test-json1-SFO/data"));
  assertTrue(RestUtils.exists("spark-test-json1-OTP/data"));

  assertTrue(RestUtils.exists("spark-test-json2-SFO/data"));
  assertTrue(RestUtils.exists("spark-test-json2-OTP/data"));

  assertTrue(RestUtils.exists("spark-test-json-ba-SFO/data"));
  assertTrue(RestUtils.exists("spark-test-json-ba-OTP/data"));

  assertTrue(RestUtils.exists("spark-test-json-ba1-SFO/data"));
  assertTrue(RestUtils.exists("spark-test-json-ba1-OTP/data"));

  assertTrue(RestUtils.exists("spark-test-json-ba2-SFO/data"));
  assertTrue(RestUtils.exists("spark-test-json-ba2-OTP/data"));

  assertThat(RestUtils.get("spark-test-json-SFO/data/_search?"), containsString("business"));
  assertThat(RestUtils.get("spark-test-json-OTP/data/_search?"), containsString("participants"));
}
 
开发者ID:elastic,项目名称:elasticsearch-hadoop,代码行数:39,代码来源:AbstractJavaEsSparkTest.java

示例4: importHttpfile

import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void importHttpfile(String httplogpath) {
  // import http logs
  JavaRDD<String> accessLogs = spark.sc.textFile(httplogpath, this.partition).map(s -> ApacheAccessLog.parseFromLogLine(s)).filter(ApacheAccessLog::checknull);

  JavaEsSpark.saveJsonToEs(accessLogs, logIndex + "/" + this.httpType);
}
 
开发者ID:apache,项目名称:incubator-sdap-mudrod,代码行数:7,代码来源:ImportLogFile.java

示例5: importFtpfile

import org.elasticsearch.spark.rdd.api.java.JavaEsSpark; //导入方法依赖的package包/类
public void importFtpfile(String ftplogpath) {
  // import ftp logs
  JavaRDD<String> ftpLogs = spark.sc.textFile(ftplogpath, this.partition).map(s -> FtpLog.parseFromLogLine(s)).filter(FtpLog::checknull);

  JavaEsSpark.saveJsonToEs(ftpLogs, logIndex + "/" + this.ftpType);
}
 
开发者ID:apache,项目名称:incubator-sdap-mudrod,代码行数:7,代码来源:ImportLogFile.java


注:本文中的org.elasticsearch.spark.rdd.api.java.JavaEsSpark.saveJsonToEs方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。