本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo方法的典型用法代码示例。如果您正苦于以下问题:Java TypeInfoFactory.stringTypeInfo方法的具体用法?Java TypeInfoFactory.stringTypeInfo怎么用?Java TypeInfoFactory.stringTypeInfo使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory
的用法示例。
在下文中一共展示了TypeInfoFactory.stringTypeInfo方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readStringPredicatePushdown
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void readStringPredicatePushdown() throws IOException {
TypeInfo typeInfo = TypeInfoFactory.stringTypeInfo;
try (OrcWriter writer = getOrcWriter(typeInfo)) {
writer.addRow("hello");
writer.addRow("world");
}
StructTypeInfo structTypeInfo = new StructTypeInfoBuilder().add("a", TypeInfoFactory.stringTypeInfo).build();
SearchArgument searchArgument = SearchArgumentFactory.newBuilder().startAnd().equals("a", "hello").end().build();
OrcFile orcFile = OrcFile.source().columns(structTypeInfo).schemaFromFile().searchArgument(searchArgument).build();
Tap<?, ?, ?> tap = new Hfs(orcFile, path);
List<Tuple> list = Plunger.readDataFromTap(tap).asTupleList();
assertThat(list.size(), is(1));
assertThat(list.get(0).getObject(0), is((Object) "hello"));
}
示例2: getTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
private TypeInfo getTypeInfo(String fieldType) {
if (fieldType.equals(TEXT) || fieldType.equals(STRING) || fieldType.equals(STORED)) {
return TypeInfoFactory.stringTypeInfo;
} else if (fieldType.equals(LONG)) {
return TypeInfoFactory.longTypeInfo;
} else if (fieldType.equals(INT)) {
return TypeInfoFactory.intTypeInfo;
} else if (fieldType.equals(FLOAT)) {
return TypeInfoFactory.floatTypeInfo;
} else if (fieldType.equals(DOUBLE)) {
return TypeInfoFactory.doubleTypeInfo;
} else if (fieldType.equals(DATE)) {
return TypeInfoFactory.dateTypeInfo;
} else if (fieldType.equals(GEO_POINTVECTOR) || fieldType.equals(GEO_RECURSIVEPREFIX)
|| fieldType.equals(GEO_TERMPREFIX)) {
List<TypeInfo> typeInfos = Arrays.asList((TypeInfo) TypeInfoFactory.floatTypeInfo,
(TypeInfo) TypeInfoFactory.floatTypeInfo);
return TypeInfoFactory.getStructTypeInfo(Arrays.asList(LATITUDE, LONGITUDE), typeInfos);
}
// Return string for anything that is not a built in type.
return TypeInfoFactory.stringTypeInfo;
}
示例3: getRowIDSearchCondition
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void getRowIDSearchCondition() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "hi");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例4: rangeEqual
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test()
public void rangeEqual() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
Collection<Range> ranges = handler.getRanges(conf);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例5: rangeGreaterThanOrEqual
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void rangeGreaterThanOrEqual() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
Collection<Range> ranges = handler.getRanges(conf);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertFalse(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例6: pushdownTuple
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void pushdownTuple() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
AccumuloPredicateHandler.PushdownTuple tuple = new AccumuloPredicateHandler.PushdownTuple(sConditions.get(0));
byte [] expectedVal = new byte[4];
ByteBuffer.wrap(expectedVal).putInt(5);
assertEquals(tuple.getConstVal(), expectedVal);
assertEquals(tuple.getcOpt().getClass(), Equal.class);
assertEquals(tuple.getpCompare().getClass(), IntCompare.class);
} catch (Exception e) {
fail(StringUtils.stringifyException(e));
}
}
示例7: readString
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void readString() throws IOException {
TypeInfo typeInfo = TypeInfoFactory.stringTypeInfo;
try (OrcWriter writer = getOrcWriter(typeInfo)) {
writer.addRow("hello");
writer.addRow((Object) null);
}
List<Tuple> list = read(typeInfo);
assertThat(list.size(), is(2));
assertThat(list.get(0).getObject(0), is((Object) "hello"));
assertThat(list.get(1).getObject(0), is(nullValue()));
}
示例8: readIncorrectType
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test(expected = TupleException.class)
public void readIncorrectType() throws IOException {
TypeInfo typeInfo = TypeInfoFactory.stringTypeInfo;
try (OrcWriter writer = getOrcWriter(typeInfo)) {
writer.addRow("hello");
}
Fields intField = new Fields("A", int.class);
OrcFile orcFile = OrcFile.source().declaredFields(intField).schema(intField).build();
Tap<?, ?, ?> tap = new Hfs(orcFile, path);
Plunger.readDataFromTap(tap);
}
示例9: rangeGreaterThan
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test()
public void rangeGreaterThan() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
Collection<Range> ranges = handler.getRanges(conf);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertFalse(range.contains(new Key(new Text("aaa"))));
assertFalse(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
assertTrue(range.beforeStartKey(new Key(new Text("aaa"))));
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例10: rangeLessThan
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void rangeLessThan() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
Collection<Range> ranges = handler.getRanges(conf);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertFalse(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertTrue(range.afterEndKey(new Key(new Text("aaa"))));
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例11: rangeLessThanOrEqual
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void rangeLessThanOrEqual() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
Collection<Range> ranges = handler.getRanges(conf);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertFalse(range.afterEndKey(new Key(new Text("aaa"))));
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例12: multiRange
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void multiRange() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = Utilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
Collection<Range> ranges = handler.getRanges(conf);
assertEquals(ranges.size(), 2);
Iterator<Range> itr = ranges.iterator();
Range range1 = itr.next();
Range range2 = itr.next();
assertNull(range1.clip(range2, true));
} catch (Exception e) {
fail("Error getting search conditions");
}
}
示例13: iteratorIgnoreRowIDFields
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void iteratorIgnoreRowIDFields() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = Utilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
List<IteratorSetting> iterators = handler.getIterators(conf);
assertEquals(iterators.size() , 0);
} catch (SerDeException e) {
StringUtils.stringifyException(e);
}
}
示例14: ignoreIteratorPushdown
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void ignoreIteratorPushdown() {
setup();
conf.set(serdeConstants.LIST_COLUMNS, "field1,field2,rid");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string");
conf.set(AccumuloSerde.COLUMN_MAPPINGS, "cf|f1,cf|f2,rowID");
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = Utilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
conf.setBoolean(AccumuloSerde.NO_ITERATOR_PUSHDOWN, true);
try {
List<IteratorSetting> iterators = handler.getIterators(conf);
assertEquals(iterators.size(), 0);
} catch (Exception e) {
fail(StringUtils.stringifyException(e));
}
}
示例15: PhoenixStringObjectInspector
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
public PhoenixStringObjectInspector(boolean escaped, byte escapeChar) {
super(TypeInfoFactory.stringTypeInfo);
this.escaped = escaped;
this.escapeChar = escapeChar;
}