本文整理汇总了Java中org.apache.hadoop.hive.serde2.AbstractSerDe类的典型用法代码示例。如果您正苦于以下问题:Java AbstractSerDe类的具体用法?Java AbstractSerDe怎么用?Java AbstractSerDe使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AbstractSerDe类属于org.apache.hadoop.hive.serde2包,在下文中一共展示了AbstractSerDe类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initHeaderSerde
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
/**
* Inits the header serde.
*
* @throws ClassNotFoundException the class not found exception
* @throws SerDeException the ser de exception
*/
@SuppressWarnings("unchecked")
private void initHeaderSerde() throws ClassNotFoundException, SerDeException {
if (headerSerde == null) {
headerSerde = ReflectionUtils.newInstance(
ctx.getConf().getClass(LensConfConstants.QUERY_OUTPUT_SERDE,
(Class<? extends AbstractSerDe>) Class.forName(LensConfConstants.DEFAULT_OUTPUT_SERDE), SerDe.class),
ctx.getConf());
Properties hprops = new Properties();
if (columnNames.size() > 0) {
hprops.setProperty(serdeConstants.LIST_COLUMNS, StringUtils.join(escapedColumnNames, ","));
}
if (htypes.length() > 0) {
hprops.setProperty(serdeConstants.LIST_COLUMN_TYPES, htypes);
}
headerSerde.initialize(ctx.getConf(), hprops);
headerOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnHeaderOIs);
}
}
示例2: getShipFiles
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Override
public List<String> getShipFiles() {
List<String> cacheFiles = new ArrayList<String>();
String hadoopVersion = "20S";
if (Utils.isHadoop23() || Utils.isHadoop2()) {
hadoopVersion = "23";
}
Class hadoopVersionShimsClass;
try {
hadoopVersionShimsClass = Class.forName("org.apache.hadoop.hive.shims.Hadoop" +
hadoopVersion + "Shims");
} catch (ClassNotFoundException e) {
throw new RuntimeException("Cannot find Hadoop" + hadoopVersion + "ShimsClass in classpath");
}
Class[] classList = new Class[] {OrcFile.class, HiveConf.class, AbstractSerDe.class,
org.apache.hadoop.hive.shims.HadoopShims.class, HadoopShimsSecure.class, hadoopVersionShimsClass,
Input.class};
return FuncUtils.getShipFiles(classList);
}
示例3: TestIntWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"properties":{"num":7}}
addWritable(stuff, 7);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("properties");
jn = jn.findValue("num");
Assert.assertEquals(7, jn.getIntValue());
}
示例4: TestEpochWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestEpochWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"properties":{"when":147147147147}}
long epoch = 0L; // 147147147147L;
java.sql.Date expected = new java.sql.Date(epoch - TimeZone.getDefault().getOffset(epoch));
addWritable(stuff, expected);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("properties");
jn = jn.findValue("when");
java.sql.Date actual = new java.sql.Date(jn.getLongValue());
long day = 24*3600*1000; // DateWritable stores days not milliseconds.
Assert.assertEquals(epoch/day, jn.getLongValue()/day);
}
示例5: TestPointWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestPointWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"properties":{},"geometry":{"type":"Point","coordinates":[15.0,5.0]}}
addWritable(stuff, new Point(15.0, 5.0));
Writable jsw = jserde.serialize(stuff, rowOI);
String rslt = ((Text)jsw).toString();
JsonNode jn = new ObjectMapper().readTree(rslt);
jn = jn.findValue("geometry");
Assert.assertNotNull(jn.findValue("type"));
Assert.assertNotNull(jn.findValue("coordinates"));
}
示例6: TestIntParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"num\":7}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("num");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
value.set("{\"properties\":{\"num\":9}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("num");
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例7: TestDateParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestDateParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"when\":\"2020-02-20\"}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals("2020-02-20",
((DateWritable)fieldData).get().toString());
value.set("{\"properties\":{\"when\":\"2017-05-05\"}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
Assert.assertEquals("2017-05-05",
((DateWritable)fieldData).get().toString());
}
示例8: TestEpochParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestEpochParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{\"when\":147147147147}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("when");
Object fieldData = rowOI.getStructFieldData(row, f0);
//Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime());
Assert.assertEquals(new java.sql.Date(147147147147L).toString(),
((DateWritable)fieldData).get().toString());
value.set("{\"properties\":{\"when\":142857142857}}");
row = jserde.deserialize(value);
fieldData = rowOI.getStructFieldData(row, f0);
//Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get());
Assert.assertEquals(new java.sql.Date(142857142857L).toString(),
((DateWritable)fieldData).get().toString());
}
示例9: TestPointParse
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestPointParse() throws Exception {
Configuration config = new Configuration();
Text value = new Text();
AbstractSerDe jserde = new GeoJsonSerDe();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
jserde.initialize(config, proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
Object row = jserde.deserialize(value);
StructField f0 = rowOI.getStructFieldRef("shape");
Object fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
row = jserde.deserialize(value);
f0 = rowOI.getStructFieldRef("shape");
fieldData = rowOI.getStructFieldData(row, f0);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例10: TestIntOnly
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntOnly() throws Exception { // Is this valid for GeoJSON?
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{\"num\":7}}");
addWritable(stuff, 7);
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
stuff.clear();
addWritable(stuff, 9);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertEquals(9, ((IntWritable)fieldData).get());
}
示例11: TestPointOnly
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestPointOnly() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("shape", row, rowOI);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
//value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"type\":\"Point\",\"coordinates\":[7.0,4.0]}}");
stuff.clear();
addWritable(stuff, new Point(7.0, 4.0));
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("shape", row, rowOI);
ckPoint(new Point(7.0, 4.0), (BytesWritable)fieldData);
}
示例12: TestIntPoint
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntPoint() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num,shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "bigint,binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// value.set("{\"properties\":{\"num\":7},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
addWritable(stuff, 7L);
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((LongWritable)fieldData).get());
//value.set("{\"properties\":{\"num\":4},\"geometry\":{\"type\":\"Point\",\"coordinates\":[7.0,2.0]}}");
stuff.clear();
addWritable(stuff, 4L);
addWritable(stuff, new Point(7.0, 2.0));
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertEquals(4, ((LongWritable)fieldData).get());
fieldData = getField("shape", row, rowOI);
ckPoint(new Point(7.0, 2.0), (BytesWritable)fieldData);
}
示例13: TestNullAttr
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestNullAttr() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{\"num\":7}}");
addWritable(stuff, 7);
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("num", row, rowOI);
Assert.assertEquals(7, ((IntWritable)fieldData).get());
//value.set("{\"properties\":{}}");
stuff.set(0, null);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("num", row, rowOI);
Assert.assertNull(fieldData);
}
示例14: TestNullGeom
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestNullGeom() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "shape");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "binary");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
//value.set("{\"properties\":{},\"geometry\":{\"type\":\"Point\",\"coordinates\":[15.0,5.0]}}");
addWritable(stuff, new Point(15.0, 5.0));
Object row = runSerDe(stuff, jserde, rowOI);
Object fieldData = getField("shape", row, rowOI);
ckPoint(new Point(15.0, 5.0), (BytesWritable)fieldData);
//value.set("{\"properties\":{},\"coordinates\":null}");
stuff.set(0, null);
row = runSerDe(stuff, jserde, rowOI);
fieldData = getField("shape", row, rowOI);
Assert.assertNull(fieldData);
}
示例15: TestIntWrite
import org.apache.hadoop.hive.serde2.AbstractSerDe; //导入依赖的package包/类
@Test
public void TestIntWrite() throws Exception {
ArrayList<Object> stuff = new ArrayList<Object>();
Properties proptab = new Properties();
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num");
proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int");
AbstractSerDe jserde = mkSerDe(proptab);
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();
// {"attributes":{"num":7}}
addWritable(stuff, 7);
Writable jsw = jserde.serialize(stuff, rowOI);
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString());
jn = jn.findValue("attributes");
jn = jn.findValue("num");
Assert.assertEquals(7, jn.getIntValue());
}