本文整理汇总了Java中org.apache.hadoop.hive.serde2.AbstractSerDe.deserialize方法的典型用法代码示例。如果您正苦于以下问题:Java AbstractSerDe.deserialize方法的具体用法?Java AbstractSerDe.deserialize怎么用?Java AbstractSerDe.deserialize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.AbstractSerDe
的用法示例。
在下文中一共展示了AbstractSerDe.deserialize方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: TestIntParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"num\":7}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("num");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
value.set("{\"properties\":{\"num\":9}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("num");
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例2: TestDateParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestDateParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"when\":\"2020-02-20\"}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals("2020-02-20",
((DateWritable)fieldData).get().toString());
value.set("{\"properties\":{\"when\":\"2017-05-05\"}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals("2017-05-05",
((DateWritable)fieldData).get().toString());
}
示例3: TestEpochParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestEpochParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"when\":147147147147}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
//Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime());
Assert.assertEquals(new java.sql.Date(147147147147L).toString(),
((DateWritable)fieldData).get().toString());
value.set("{\"properties\":{\"when\":142857142857}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
//Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get());
Assert.assertEquals(new java.sql.Date(142857142857L).toString(),
((DateWritable)fieldData).get().toString());
}
示例4: TestPointParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("shape");
Object fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("shape");
fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例5: TestIntParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new EsriJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"attributes\":{\"num\":7},\"geometry\":null}");
value.set("{\"attributes\":{\"num\":7}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("num");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
value.set("{\"attributes\":{\"num\":9}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("num");
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例6: TestDateParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestDateParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new EsriJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"attributes\":{\"when\":\"2020-02-20\"}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals("2020-02-20",
((DateWritable)fieldData).get().toString());
value.set("{\"attributes\":{\"when\":\"2017-05-05\"}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals("2017-05-05",
((DateWritable)fieldData).get().toString());
}
示例7: TestEpochParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestEpochParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new EsriJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"attributes\":{\"when\":147147147147}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
//Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime());
Assert.assertEquals(new java.sql.Date(147147147147L).toString(),
((DateWritable)fieldData).get().toString());
value.set("{\"attributes\":{\"when\":142857142857}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
//Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get());
Assert.assertEquals(new java.sql.Date(142857142857L).toString(),
((DateWritable)fieldData).get().toString());
}
示例8: TestPointParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new EsriJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"attributes\":{},\"geometry\":{\"x\":15.0,\"y\":5.0}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("shape");
Object fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
value.set("{\"attributes\":{},\"geometry\":{\"x\":7.0,\"y\":4.0}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("shape");
fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例9: TestTimeParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestTimeParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new EsriJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "timestamp");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"attributes\":{\"when\":\"2020-02-20\"}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(
new java.text.SimpleDateFormat("yyyy-MM-dd").parse("2020-02-20").getTime(),
((TimestampWritable)fieldData).getTimestamp().getTime());
value.set("{\"attributes\":{\"when\":\"2017-05-05 05:05\"}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(
new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").parse("2017-05-05 05:05").getTime(),
((TimestampWritable)fieldData).getTimestamp().getTime());
value.set("{\"attributes\":{\"when\":\"2017-08-09 10:11:12\"}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(
new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2017-08-09 10:11:12").getTime(),
((TimestampWritable)fieldData).getTimestamp().getTime());
value.set("{\"attributes\":{\"when\":\"2017-06-05 04:03:02.123456789\"}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(
new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse("2017-06-05 04:03:02.123").getTime(),
((TimestampWritable)fieldData).getTimestamp().getTime()); // ns parsed but not checked
}
示例10: runSerDe
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
protected Object runSerDe(Object stuff, AbstractSerDe jserde, StructObjectInspector rowOI) throws Exception {
Writable jsw = jserde.serialize(stuff, rowOI);
//System.err.println(jsw);
return jserde.deserialize(jsw);
}