本文整理汇总了Java中org.apache.hadoop.hive.serde2.AbstractSerDe.getObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java AbstractSerDe.getObjectInspector方法的具体用法?Java AbstractSerDe.getObjectInspector怎么用?Java AbstractSerDe.getObjectInspector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.AbstractSerDe
的用法示例。
在下文中一共展示了AbstractSerDe.getObjectInspector方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: TestIntWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"properties":{"num":7}}
addWritable(stuff, 7);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("properties");
jn = jn.findValue("num");
Assert.assertEquals(7, jn.getIntValue());
}
示例2: TestEpochWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestEpochWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"properties":{"when":147147147147}}
long epoch = 0L; // 147147147147L;
java.sql.Date expected = new java.sql.Date(epoch - TimeZone.getDefault().getOffset(epoch));
addWritable(stuff, expected);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("properties");
jn = jn.findValue("when");
java.sql.Date actual = new java.sql.Date(jn.getLongValue());
long day = 24*3600*1000; // DateWritable stores days not milliseconds.
Assert.assertEquals(epoch/day, jn.getLongValue()/day);
}
示例3: TestPointWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"properties":{},"geometry":{"type":"Point","coordinates":[15.0,5.0]}}
addWritable(stuff, new Point(15.0, 5.0));
Writable jsw = jserde.serialize(stuff, rowOI);
String rslt = ((Text)jsw).toString();
JsonNode jn = new ObjectMapper().readTree(rslt);
jn = jn.findValue("geometry");
Assert.assertNotNull(jn.findValue("type"));
Assert.assertNotNull(jn.findValue("coordinates"));
}
示例4: TestIntParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"num\":7}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("num");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
value.set("{\"properties\":{\"num\":9}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("num");
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例5: TestPointOnly
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointOnly() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"attributes\":{},\"geometry\":{\"x\":15.0,\"y\":5.0}}");
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("shape", row, rowOI);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
//value.set("{\"attributes\":{},\"geometry\":{\"x\":7.0,\"y\":4.0}}");
stuff.clear();
addWritable(stuff, new Point(7.0, 4.0));
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("shape", row, rowOI);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例6: TestIntOnly
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntOnly() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
addWritable(stuff, 7);
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
// value.set("{\"attributes\":{\"num\":9}}");
stuff.clear();
addWritable(stuff, 9);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例7: TestPointParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("shape");
Object fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("shape");
fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例8: TestIntOnly
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntOnly() throws Exception { // Is this valid for GeoJSON?
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{\"num\":7}}");
addWritable(stuff, 7);
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
stuff.clear();
addWritable(stuff, 9);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例9: TestPointOnly
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestPointOnly() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("shape", row, rowOI);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
//value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
stuff.clear();
addWritable(stuff, new Point(7.0, 4.0));
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("shape", row, rowOI);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例10: TestIntPoint
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntPoint() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num,shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "bigint,binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// value.set("{\"properties\":{\"num\":7},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
addWritable(stuff, 7L);
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((LongWritable)fieldData).get());
//value.set("{\"properties\":{\"num\":4},\"geometry\":{\"type\":\"Point\",\"coordinates\":[7.0,2.0]}}");
stuff.clear();
addWritable(stuff, 4L);
addWritable(stuff, new Point(7.0, 2.0));
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertEquals(4, ((LongWritable)fieldData).get());
fieldData = getField("shape", row, rowOI);
ckPoint(new Point(7.0, 2.0), (BytesWritable)fieldData);
}
示例11: TestNullAttr
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestNullAttr() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{\"num\":7}}");
addWritable(stuff, 7);
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
//value.set("{\"properties\":{}}");
stuff.set(0, null);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertNull(fieldData);
}
示例12: TestNullGeom
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestNullGeom() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("shape", row, rowOI);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
//value.set("{\"properties\":{},\"coordinates\":null}");
stuff.set(0, null);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("shape", row, rowOI);
Assert.assertNull(fieldData);
}
示例13: TestIntWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"attributes":{"num":7}}
addWritable(stuff, 7);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("attributes");
jn = jn.findValue("num");
Assert.assertEquals(7, jn.getIntValue());
}
示例14: TestNullGeom
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestNullGeom() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"attributes\":{},\"geometry\":{\"x\":15.0,\"y\":5.0}}");
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("shape", row, rowOI);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
//value.set("{\"attributes\":{},\"geometry\":null}");
stuff.set(0, null);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("shape", row, rowOI);
Assert.assertNull(fieldData);
}
示例15: TestTimeWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入方法依赖的package包/类
@Test
public void TestTimeWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "timestamp");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"attributes":{"when":147147147147}}
long epoch = 147147147147L;
java.sql.Timestamp expected = new java.sql.Timestamp(epoch);
addWritable(stuff, expected);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("attributes");
jn = jn.findValue("when");
java.sql.Timestamp actual = new java.sql.Timestamp(jn.getLongValue());
Assert.assertEquals(expected, actual);
}