当前位置: 首页>>代码示例>>Java>>正文


Java AbstractSerDe类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.AbstractSerDe的典型用法代码示例。如果您正苦于以下问题:Java AbstractSerDe类的具体用法?Java AbstractSerDe怎么用?Java AbstractSerDe使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


AbstractSerDe类属于org.apache.hadoop.hive.serde2包,在下文中一共展示了AbstractSerDe类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: initHeaderSerde

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
/**
 * Inits the header serde.
 *
 * @throws ClassNotFoundException the class not found exception
 * @throws SerDeException         the ser de exception
 */
@SuppressWarnings("unchecked")
private void initHeaderSerde() throws ClassNotFoundException, SerDeException {
  if (headerSerde == null) {
    headerSerde = ReflectionUtils.newInstance(
      ctx.getConf().getClass(LensConfConstants.QUERY_OUTPUT_SERDE,
        (Class<? extends AbstractSerDe>) Class.forName(LensConfConstants.DEFAULT_OUTPUT_SERDE), SerDe.class),
      ctx.getConf());

    Properties hprops = new Properties();
    if (columnNames.size() > 0) {
      hprops.setProperty(serdeConstants.LIST_COLUMNS, StringUtils.join(escapedColumnNames, ","));
    }
    if (htypes.length() > 0) {
      hprops.setProperty(serdeConstants.LIST_COLUMN_TYPES, htypes);
    }
    headerSerde.initialize(ctx.getConf(), hprops);

    headerOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnHeaderOIs);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:27,代码来源:AbstractOutputFormatter.java

示例2: getShipFiles

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Override
public List<String> getShipFiles() {
    List<String> cacheFiles = new ArrayList<String>();
    String hadoopVersion = "20S";
    if (Utils.isHadoop23() || Utils.isHadoop2()) {
        hadoopVersion = "23";
    }
    Class hadoopVersionShimsClass;
    try {
        hadoopVersionShimsClass = Class.forName("org.apache.hadoop.hive.shims.Hadoop" +
                hadoopVersion + "Shims");
    } catch (ClassNotFoundException e) {
        throw new RuntimeException("Cannot find Hadoop" + hadoopVersion + "ShimsClass in classpath");
    }
    Class[] classList = new Class[] {OrcFile.class, HiveConf.class, AbstractSerDe.class,
            org.apache.hadoop.hive.shims.HadoopShims.class, HadoopShimsSecure.class, hadoopVersionShimsClass,
            Input.class};
    return FuncUtils.getShipFiles(classList);
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:20,代码来源:OrcStorage.java

示例3: TestIntWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"properties":{"num":7}}
       addWritable(stuff, 7);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("properties");
	jn = jn.findValue("num");
	Assert.assertEquals(7, jn.getIntValue());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:18,代码来源:TestGeoJsonSerDe.java

示例4: TestEpochWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestEpochWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"properties":{"when":147147147147}}
	long epoch = 0L;  // 147147147147L;
	java.sql.Date expected = new java.sql.Date(epoch - TimeZone.getDefault().getOffset(epoch));
       addWritable(stuff, expected);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("properties");
	jn = jn.findValue("when");
	java.sql.Date actual = new java.sql.Date(jn.getLongValue());
	long day = 24*3600*1000;  // DateWritable stores days not milliseconds.
	Assert.assertEquals(epoch/day, jn.getLongValue()/day);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:22,代码来源:TestGeoJsonSerDe.java

示例5: TestPointWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestPointWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"properties":{},"geometry":{"type":"Point","coordinates":[15.0,5.0]}}
       addWritable(stuff, new Point(15.0, 5.0));
	Writable jsw = jserde.serialize(stuff, rowOI);
       String rslt = ((Text)jsw).toString();
	JsonNode jn = new ObjectMapper().readTree(rslt);
	jn = jn.findValue("geometry");
	Assert.assertNotNull(jn.findValue("type"));
	Assert.assertNotNull(jn.findValue("coordinates"));
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:19,代码来源:TestGeoJsonSerDe.java

示例6: TestIntParse

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new GeoJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"properties\":{\"num\":7}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("num");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals(7, ((IntWritable)fieldData).get());
       value.set("{\"properties\":{\"num\":9}}");
       row = jserde.deserialize(value);
	f0 = rowOI.getStructFieldRef("num");
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:24,代码来源:TestGeoJsonSerDe.java

示例7: TestDateParse

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestDateParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new GeoJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"properties\":{\"when\":\"2020-02-20\"}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals("2020-02-20",
						((DateWritable)fieldData).get().toString());
       value.set("{\"properties\":{\"when\":\"2017-05-05\"}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals("2017-05-05",
						((DateWritable)fieldData).get().toString());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:25,代码来源:TestGeoJsonSerDe.java

示例8: TestEpochParse

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestEpochParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new GeoJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"properties\":{\"when\":147147147147}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	//Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime());
	Assert.assertEquals(new java.sql.Date(147147147147L).toString(),
						((DateWritable)fieldData).get().toString());
       value.set("{\"properties\":{\"when\":142857142857}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	//Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get());
	Assert.assertEquals(new java.sql.Date(142857142857L).toString(),
						((DateWritable)fieldData).get().toString());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:27,代码来源:TestGeoJsonSerDe.java

示例9: TestPointParse

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestPointParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new GeoJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("shape");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);

       value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
       row = jserde.deserialize(value);
	f0 = rowOI.getStructFieldRef("shape");
	fieldData = rowOI.getStructFieldData(row, f0);
	ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:25,代码来源:TestGeoJsonSerDe.java

示例10: TestIntOnly

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntOnly() throws Exception {  // Is this valid for GeoJSON?
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       //value.set("{\"properties\":{\"num\":7}}");
       addWritable(stuff, 7);
	Object row = runSerDe(stuff, jserde, rowOI);
	Object fieldData = getField("num", row, rowOI);
	Assert.assertEquals(7, ((IntWritable)fieldData).get());
	stuff.clear();
	addWritable(stuff, 9);
	row = runSerDe(stuff, jserde, rowOI);
	fieldData = getField("num", row, rowOI);
	Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:21,代码来源:TestGeoJsonSerDe.java

示例11: TestPointOnly

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestPointOnly() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       //value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
       addWritable(stuff, new Point(15.0, 5.0));
	Object row = runSerDe(stuff, jserde, rowOI);
	Object fieldData = getField("shape", row, rowOI);
	ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);

       //value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
	stuff.clear();
       addWritable(stuff, new Point(7.0, 4.0));
	row = runSerDe(stuff, jserde, rowOI);
	fieldData = getField("shape", row, rowOI);
	ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:23,代码来源:TestGeoJsonSerDe.java

示例12: TestIntPoint

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntPoint() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num,shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "bigint,binary");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // value.set("{\"properties\":{\"num\":7},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
       addWritable(stuff, 7L);
       addWritable(stuff, new Point(15.0, 5.0));
	Object row = runSerDe(stuff, jserde, rowOI);
	Object fieldData = getField("num", row, rowOI);
	Assert.assertEquals(7, ((LongWritable)fieldData).get());

       //value.set("{\"properties\":{\"num\":4},\"geometry\":{\"type\":\"Point\",\"coordinates\":[7.0,2.0]}}");
	stuff.clear();
       addWritable(stuff, 4L);
       addWritable(stuff, new Point(7.0, 2.0));
	row = runSerDe(stuff, jserde, rowOI);
	fieldData = getField("num", row, rowOI);
	Assert.assertEquals(4, ((LongWritable)fieldData).get());
	fieldData = getField("shape", row, rowOI);
	ckPoint(new Point(7.0, 2.0), (BytesWritable)fieldData);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:27,代码来源:TestGeoJsonSerDe.java

示例13: TestNullAttr

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestNullAttr() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       //value.set("{\"properties\":{\"num\":7}}");
	addWritable(stuff, 7);
	Object row = runSerDe(stuff, jserde, rowOI);
	Object fieldData = getField("num", row, rowOI);
	Assert.assertEquals(7, ((IntWritable)fieldData).get());
       //value.set("{\"properties\":{}}");
	stuff.set(0, null);
	row = runSerDe(stuff, jserde, rowOI);
	fieldData = getField("num", row, rowOI);
	Assert.assertNull(fieldData);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:21,代码来源:TestGeoJsonSerDe.java

示例14: TestNullGeom

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestNullGeom() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       //value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
       addWritable(stuff, new Point(15.0, 5.0));
	Object row = runSerDe(stuff, jserde, rowOI);
	Object fieldData = getField("shape", row, rowOI);
	ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);

       //value.set("{\"properties\":{},\"coordinates\":null}");
	stuff.set(0, null);
	row = runSerDe(stuff, jserde, rowOI);
	fieldData = getField("shape", row, rowOI);
	Assert.assertNull(fieldData);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:22,代码来源:TestGeoJsonSerDe.java

示例15: TestIntWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"attributes":{"num":7}}
       addWritable(stuff, 7);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("attributes");
	jn = jn.findValue("num");
	Assert.assertEquals(7, jn.getIntValue());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:18,代码来源:TestEsriJsonSerDe.java


注:本文中的org.apache.hadoop.hive.serde2.AbstractSerDe类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。