本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector方法的典型用法代碼示例。如果您正苦於以下問題:Java PrimitiveObjectInspectorFactory.javaStringObjectInspector方法的具體用法?Java PrimitiveObjectInspectorFactory.javaStringObjectInspector怎麽用?Java PrimitiveObjectInspectorFactory.javaStringObjectInspector使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorFactory.javaStringObjectInspector方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: MDSMapObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
else{
throw new RuntimeException( "Map key type is string only." );
}
valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() );
if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
}
else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
}
else{
getField = new NestedGetField();
}
}
示例2: testReverseTailK
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testReverseTailK() throws Exception {
// = top-k
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-k -2 -reverse")};
final String[] values = new String[] {"banana", "apple", "candy"};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(2, res.size());
Assert.assertEquals("candy", res.get(0));
Assert.assertEquals("banana", res.get(1));
}
示例3: testReverseTailKWithKey
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testReverseTailKWithKey() throws Exception {
// = top-k
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-k -2 -reverse")};
final String[] values = new String[] {"banana", "apple", "candy"};
final double[] keys = new double[] {0.7, 0.5, 0.8};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i], keys[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(2, res.size());
Assert.assertEquals("candy", res.get(0));
Assert.assertEquals("banana", res.get(1));
}
示例4: BitcoinTransactionHashUDFInvalidArguments
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void BitcoinTransactionHashUDFInvalidArguments() throws HiveException {
final BitcoinTransactionHashUDF bthUDF = new BitcoinTransactionHashUDF();
UDFArgumentLengthException exNull = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(null), "Exception is thrown in case of null parameter");
UDFArgumentLengthException exLen = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(new ObjectInspector[2]), "Exception is thrown in case of invalid length parameter");
StringObjectInspector[] testStringOI = new StringObjectInspector[1];
testStringOI[0]=PrimitiveObjectInspectorFactory.javaStringObjectInspector;
UDFArgumentException wrongType = assertThrows(UDFArgumentException.class, ()->bthUDF.initialize(testStringOI), "Exception is thrown in case of invalid type of parameter");
}
示例5: BitcoinTransactionHashSegwitUDFInvalidArguments
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void BitcoinTransactionHashSegwitUDFInvalidArguments() throws HiveException {
BitcoinTransactionHashSegwitUDF bthUDF = new BitcoinTransactionHashSegwitUDF();
UDFArgumentLengthException exNull = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(null), "Exception is thrown in case of null parameter");
UDFArgumentLengthException exLen = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(new ObjectInspector[2]), "Exception is thrown in case of invalid length parameter");
StringObjectInspector[] testStringOI = new StringObjectInspector[1];
testStringOI[0]=PrimitiveObjectInspectorFactory.javaStringObjectInspector;
UDFArgumentException wrongType = assertThrows(UDFArgumentException.class, ()->bthUDF.initialize(testStringOI), "Exception is thrown in case of invalid type of parameter");
}
示例6: testTailK
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testTailK() throws Exception {
TailKOrderedMapEvaluator evaluator = new TailKOrderedMapEvaluator();
TailKOrderedMapEvaluator.MapAggregationBuffer agg = (TailKOrderedMapEvaluator.MapAggregationBuffer) evaluator.getNewAggregationBuffer();
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector};
final double[] keys = new double[] {0.7, 0.5, 0.8};
final String[] values = new String[] {"banana", "apple", "candy"};
int size = -2;
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < keys.length; i++) {
evaluator.iterate(agg, new Object[] {keys[i], values[i], size});
}
Map<Object, Object> res = evaluator.terminate(agg);
Object[] sortedValues = res.values().toArray();
Assert.assertEquals(Math.abs(size), sortedValues.length);
Assert.assertEquals("apple", sortedValues[0]);
Assert.assertEquals("banana", sortedValues[1]);
evaluator.close();
}
示例7: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException(
"extract_feature() has an single arguments: string feature");
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
if (argumentOI.getPrimitiveCategory() != PrimitiveCategory.STRING) {
throw new UDFArgumentTypeException(0, "Type mismatch: feature");
}
return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
示例8: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 0) {
throw new UDFArgumentLengthException("row_number() has no argument.");
}
return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
示例9: testStringDouble
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testStringDouble() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {new DeferredJavaObject("f1"),
new DeferredJavaObject(2.5d)});
Assert.assertEquals("f1:2.5", ret.toString());
}
示例10: testReverseOrder
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testReverseOrder() throws Exception {
ReverseOrderedMapEvaluator evaluator = new ReverseOrderedMapEvaluator();
ReverseOrderedMapEvaluator.MapAggregationBuffer agg = (ReverseOrderedMapEvaluator.MapAggregationBuffer) evaluator.getNewAggregationBuffer();
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector};
final double[] keys = new double[] {0.7, 0.5, 0.8};
final String[] values = new String[] {"banana", "apple", "candy"};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < keys.length; i++) {
evaluator.iterate(agg, new Object[] {keys[i], values[i]});
}
Map<Object, Object> res = evaluator.terminate(agg);
Object[] sortedValues = res.values().toArray();
Assert.assertEquals(3, sortedValues.length);
Assert.assertEquals("candy", sortedValues[0]);
Assert.assertEquals("banana", sortedValues[1]);
Assert.assertEquals("apple", sortedValues[2]);
evaluator.close();
}
示例11: testPrimitiveParam
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test(expected = UDFArgumentTypeException.class)
public void testPrimitiveParam() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
PrimitiveObjectInspectorFactory.javaStringObjectInspector
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
}
示例12: testExpectedMode
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
public void testExpectedMode() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[2];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, new Text("normal"));
udf.initialize(argOIs);
udf.close();
}
示例13: testInvalidMode
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test(expected = UDFArgumentException.class)
public void testInvalidMode() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[2];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, new Text("unsupported mode"));
udf.initialize(argOIs);
udf.close();
}
示例14: testMismatch
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test(expected = UDFArgumentException.class)
public void testMismatch() throws HiveException, IOException {
VectorizeFeaturesUDF udf = new VectorizeFeaturesUDF();
ObjectInspector[] argOIs = new ObjectInspector[3];
List<String> featureNames = Arrays.asList("a", "b", "c");
argOIs[0] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, featureNames);
argOIs[1] = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
argOIs[2] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
udf.initialize(argOIs);
udf.close();
}
示例15: testUnsupportedRegularization
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test(expected = UDFArgumentException.class)
public void testUnsupportedRegularization() throws Exception {
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
ObjectInspector params = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-reg UnsupportedReg");
udtf.initialize(new ObjectInspector[] {stringListOI, floatOI, params});
}