当前位置: 首页>>代码示例>>Java>>正文


Java AbstractSerDe.serialize方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.AbstractSerDe.serialize方法的典型用法代码示例。如果您正苦于以下问题:Java AbstractSerDe.serialize方法的具体用法?Java AbstractSerDe.serialize怎么用?Java AbstractSerDe.serialize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.serde2.AbstractSerDe的用法示例。


在下文中一共展示了AbstractSerDe.serialize方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: TestIntWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"properties":{"num":7}}
       addWritable(stuff, 7);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("properties");
	jn = jn.findValue("num");
	Assert.assertEquals(7, jn.getIntValue());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:18,代码来源:TestGeoJsonSerDe.java

示例2: TestEpochWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestEpochWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"properties":{"when":147147147147}}
	long epoch = 0L;  // 147147147147L;
	java.sql.Date expected = new java.sql.Date(epoch - TimeZone.getDefault().getOffset(epoch));
       addWritable(stuff, expected);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("properties");
	jn = jn.findValue("when");
	java.sql.Date actual = new java.sql.Date(jn.getLongValue());
	long day = 24*3600*1000;  // DateWritable stores days not milliseconds.
	Assert.assertEquals(epoch/day, jn.getLongValue()/day);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:22,代码来源:TestGeoJsonSerDe.java

示例3: TestPointWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"properties":{},"geometry":{"type":"Point","coordinates":[15.0,5.0]}}
       addWritable(stuff, new Point(15.0, 5.0));
	Writable jsw = jserde.serialize(stuff, rowOI);
       String rslt = ((Text)jsw).toString();
	JsonNode jn = new ObjectMapper().readTree(rslt);
	jn = jn.findValue("geometry");
	Assert.assertNotNull(jn.findValue("type"));
	Assert.assertNotNull(jn.findValue("coordinates"));
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:19,代码来源:TestGeoJsonSerDe.java

示例4: TestIntWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"attributes":{"num":7}}
       addWritable(stuff, 7);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("attributes");
	jn = jn.findValue("num");
	Assert.assertEquals(7, jn.getIntValue());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:18,代码来源:TestEsriJsonSerDe.java

示例5: TestEpochWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestEpochWrite() throws Exception {
	ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	AbstractSerDe jserde = mkSerDe(proptab);
	StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

	// {"attributes":{"when":147147147147}}
	long epoch = 147147147147L;
	SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MMM-dd");
	sdf.setTimeZone(TimeZone.getTimeZone("America/New_York"));
	java.sql.Date expected = new java.sql.Date(epoch);
	String expString = sdf.format(expected);
	//System.err.println(expected.getTime());
	addWritable(stuff, expected);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("attributes");
	jn = jn.findValue("when");
	java.sql.Date actual = new java.sql.Date(jn.getLongValue());
	String actualDateString = sdf.format(actual);
	Assert.assertEquals(expString, actualDateString);  // workaround DateWritable,j.s.Date
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:26,代码来源:TestEsriJsonSerDe.java

示例6: TestTimeWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestTimeWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "timestamp");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"attributes":{"when":147147147147}}
       long epoch = 147147147147L;
	java.sql.Timestamp expected = new java.sql.Timestamp(epoch);
       addWritable(stuff, expected);
	Writable jsw = jserde.serialize(stuff, rowOI);
	JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
	jn = jn.findValue("attributes");
	jn = jn.findValue("when");
	java.sql.Timestamp actual = new java.sql.Timestamp(jn.getLongValue());
	Assert.assertEquals(expected, actual);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:21,代码来源:TestEsriJsonSerDe.java

示例7: TestPointWrite

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointWrite() throws Exception {
       ArrayList<Object> stuff = new ArrayList<Object>();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
	AbstractSerDe jserde = mkSerDe(proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       // {"attributes":{},"geometry":{"x":15.0,"y":5.0}}
       addWritable(stuff, new Point(15.0, 5.0));
	Writable jsw = jserde.serialize(stuff, rowOI);
       String rslt = ((Text)jsw).toString();
	JsonNode jn = new ObjectMapper().readTree(rslt);
	jn = jn.findValue("geometry");
	Assert.assertNotNull(jn.findValue("x"));
	Assert.assertNotNull(jn.findValue("y"));
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:19,代码来源:TestEsriJsonSerDe.java

示例8: runSerDe

import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
protected Object runSerDe(Object stuff, AbstractSerDe jserde, StructObjectInspector rowOI) throws Exception {
	Writable jsw = jserde.serialize(stuff, rowOI);
	//System.err.println(jsw);
	return jserde.deserialize(jsw);
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:6,代码来源:JsonSerDeTestingBase.java


注:本文中的org.apache.hadoop.hive.serde2.AbstractSerDe.serialize方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。