本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector方法的典型用法代碼示例。如果您正苦於以下問題:Java PrimitiveObjectInspectorFactory.writableLongObjectInspector方法的具體用法?Java PrimitiveObjectInspectorFactory.writableLongObjectInspector怎麽用?Java PrimitiveObjectInspectorFactory.writableLongObjectInspector使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorFactory.writableLongObjectInspector方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type StructObjectInspector
*
* @param arguments array of length 1 containing one StructObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (BinaryWritable)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of StructObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a BitcoinTransaction");
}
this.soi = (StructObjectInspector)arguments[0];
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
this.wioi = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.wloi = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
// the UDF returns the hash value of a BitcoinTransaction as byte array
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
示例2: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type StructObjectInspector
*
* @param arguments array of length 1 containing one StructObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (BinaryWritable)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of StructObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a BitcoinTransaction");
}
this.soi = (StructObjectInspector)arguments[0];
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
this.wioi = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.wloi = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
this.wbyoi = PrimitiveObjectInspectorFactory.writableByteObjectInspector;
// the UDF returns the hash value of a BitcoinTransaction as byte array
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
示例3: merge
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
void merge(final int o_size, @Nonnull final Object o_sum, @Nonnull final Object o_count,
@Nonnull final StandardListObjectInspector sumOI,
@Nonnull final StandardListObjectInspector countOI) throws HiveException {
final WritableDoubleObjectInspector sumElemOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
final WritableLongObjectInspector countElemOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
if (o_size != _size) {
if (_size == -1) {
init(o_size);
} else {
throw new HiveException("Mismatch in the number of elements");
}
}
final double[] sum = _sum;
final long[] count = _count;
for (int i = 0, len = _size; i < len; i++) {
Object sum_e = sumOI.getListElement(o_sum, i);
sum[i] += sumElemOI.get(sum_e);
Object count_e = countOI.getListElement(o_count, i);
count[i] += countElemOI.get(count_e);
}
}
示例4: getObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
} else {
throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
}
}
示例5: newObjectInspectorFromHiveType
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
private static ObjectInspector newObjectInspectorFromHiveType(final ASTNode type) {
// matching by token names, because token IDs (which are static final) drastically change between versions.
switch (type.getToken().getText()) {
case "TOK_STRING":
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
case "TOK_INT":
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
case "TOK_DOUBLE":
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
case "TOK_FLOAT":
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
case "TOK_BIGINT":
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
case "TOK_BOOLEAN": {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
case "TOK_STRUCT": {
final ASTNode tabColList = (ASTNode) type.getChild(0);
final List<String> names = new ArrayList<>();
final List<ObjectInspector> ois = new ArrayList<>();
for (final Node tabCol : tabColList.getChildren()) {
final ASTNode a = (ASTNode) tabCol;
names.add(a.getChild(0).toString());
ois.add(newObjectInspectorFromHiveType((ASTNode) a.getChild(1)));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(names, ois);
}
case "TOK_MAP": {
final ObjectInspector keyType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
final ObjectInspector valueType = newObjectInspectorFromHiveType((ASTNode) type.getChild(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyType, valueType);
}
case "TOK_LIST": {
final ObjectInspector itemType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
return ObjectInspectorFactory.getStandardListObjectInspector(itemType);
}
default:
throw new IllegalArgumentException("unsupported type: " + type.toStringTree());
}
}
示例6: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 3) {
throw new UDFArgumentException("_FUNC_ takes exactly 3 arguments: " + argOIs.length);
}
this.latOI = HiveUtils.asDoubleCompatibleOI(argOIs[0]);
this.lonOI = HiveUtils.asDoubleCompatibleOI(argOIs[1]);
this.zoomOI = HiveUtils.asIntegerOI(argOIs[2]);
this.result = new LongWritable();
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
}
示例7: craeteObjectInspectorFromTypeInfo
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
public static ObjectInspector craeteObjectInspectorFromTypeInfo( final TypeInfo typeInfo ){
switch ( typeInfo.getCategory() ){
case STRUCT:
return new MDSStructObjectInspector( (StructTypeInfo)typeInfo );
case MAP:
return new MDSMapObjectInspector( (MapTypeInfo)typeInfo );
case LIST:
return new MDSListObjectInspector( (ListTypeInfo)typeInfo );
case UNION:
UnionTypeInfo unionTypeInfo = (UnionTypeInfo)typeInfo;
List<ObjectInspector> unionList = new ArrayList<ObjectInspector>();
for( TypeInfo childTypeInfo : unionTypeInfo.getAllUnionObjectTypeInfos() ){
unionList.add( craeteObjectInspectorFromTypeInfo( childTypeInfo ) );
}
return ObjectInspectorFactory.getStandardUnionObjectInspector( unionList );
case PRIMITIVE:
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
switch( primitiveTypeInfo.getPrimitiveCategory() ){
case STRING:
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
case BOOLEAN:
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
case BYTE:
return PrimitiveObjectInspectorFactory.writableByteObjectInspector;
case DOUBLE:
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
case FLOAT:
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
case INT:
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
case LONG:
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
case SHORT:
return PrimitiveObjectInspectorFactory.writableShortObjectInspector;
case DATE:
case DECIMAL:
case TIMESTAMP:
case VOID:
default:
throw new UnsupportedOperationException( "Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory() );
}
default:
throw new UnsupportedOperationException( "Unknown category " + typeInfo.getCategory() );
}
}
示例8: testWritableLongFeature
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testWritableLongFeature() throws Exception {
List<LongWritable> x = Arrays.asList(new LongWritable(111L), new LongWritable(222L));
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
testFeature(x, featureOI, LongWritable.class, Long.class);
}
示例9: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 1, 1);
checkArgPrimitive(arguments, 0);
checkArgGroups(arguments, 0, inputTypes, NUMERIC_GROUP, VOID_GROUP);
obtainIntConverter(arguments, 0, inputTypes, converters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
return outputOI;
}