本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector方法的典型用法代碼示例。如果您正苦於以下問題:Java PrimitiveObjectInspectorFactory.writableIntObjectInspector方法的具體用法?Java PrimitiveObjectInspectorFactory.writableIntObjectInspector怎麽用?Java PrimitiveObjectInspectorFactory.writableIntObjectInspector使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorFactory.writableIntObjectInspector方法的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, VOID_GROUP);
obtainStringConverter(arguments, 0, inputTypes, converters);
obtainStringConverter(arguments, 1, inputTypes, converters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
示例2: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type StructObjectInspector
*
* @param arguments array of length 1 containing one StructObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (BinaryWritable)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of StructObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a BitcoinTransaction");
}
this.soi = (StructObjectInspector)arguments[0];
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
this.wioi = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.wloi = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
// the UDF returns the hash value of a BitcoinTransaction as byte array
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
示例3: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type StructObjectInspector
*
* @param arguments array of length 1 containing one StructObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (BinaryWritable)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of StructObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a BitcoinTransaction");
}
this.soi = (StructObjectInspector)arguments[0];
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
this.wioi = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.wloi = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
this.wbyoi = PrimitiveObjectInspectorFactory.writableByteObjectInspector;
// the UDF returns the hash value of a BitcoinTransaction as byte array
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
示例4: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 4 && argOIs.length != 5) {
throw new UDFArgumentException("_FUNC_ takes 4 or 5 arguments");
}
this.modelTypeOI = HiveUtils.asIntegerOI(argOIs[1]);
this.stringOI = HiveUtils.asStringOI(argOIs[2]);
ListObjectInspector listOI = HiveUtils.asListOI(argOIs[3]);
this.featureListOI = listOI;
ObjectInspector elemOI = listOI.getListElementObjectInspector();
this.featureElemOI = HiveUtils.asDoubleCompatibleOI(elemOI);
boolean classification = false;
if (argOIs.length == 5) {
classification = HiveUtils.getConstBoolean(argOIs[4]);
}
this.classification = classification;
if (classification) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
}
示例5: getObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
} else {
throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
}
}
示例6: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 1, 1);
checkArgPrimitive(arguments, 0);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
示例7: newObjectInspectorFromHiveType
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
private static ObjectInspector newObjectInspectorFromHiveType(final ASTNode type) {
// matching by token names, because token IDs (which are static final) drastically change between versions.
switch (type.getToken().getText()) {
case "TOK_STRING":
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
case "TOK_INT":
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
case "TOK_DOUBLE":
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
case "TOK_FLOAT":
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
case "TOK_BIGINT":
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
case "TOK_BOOLEAN": {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
case "TOK_STRUCT": {
final ASTNode tabColList = (ASTNode) type.getChild(0);
final List<String> names = new ArrayList<>();
final List<ObjectInspector> ois = new ArrayList<>();
for (final Node tabCol : tabColList.getChildren()) {
final ASTNode a = (ASTNode) tabCol;
names.add(a.getChild(0).toString());
ois.add(newObjectInspectorFromHiveType((ASTNode) a.getChild(1)));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(names, ois);
}
case "TOK_MAP": {
final ObjectInspector keyType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
final ObjectInspector valueType = newObjectInspectorFromHiveType((ASTNode) type.getChild(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyType, valueType);
}
case "TOK_LIST": {
final ObjectInspector itemType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
return ObjectInspectorFactory.getStandardListObjectInspector(itemType);
}
default:
throw new IllegalArgumentException("unsupported type: " + type.toStringTree());
}
}
示例8: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("ethereumGetChainId only takes one argument: Struct<EthereumTransction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("ethereumGetChainId only takes one argument: Struct<EthereumTransction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a EthereumTransction");
}
this.ethereumUDFUtil=new EthereumUDFUtil((StructObjectInspector) arguments[0]);
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
}
示例9: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 2) {
throw new UDFArgumentException("_FUNC_ takes exactly 2 arguments: " + argOIs.length);
}
this.latOI = HiveUtils.asDoubleCompatibleOI(argOIs[0]);
this.zoomOI = HiveUtils.asIntegerOI(argOIs[1]);
this.result = new IntWritable();
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
}
示例10: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 2) {
throw new UDFArgumentException("_FUNC_ takes exactly 2 arguments: " + argOIs.length);
}
this.lonOI = HiveUtils.asDoubleCompatibleOI(argOIs[0]);
this.zoomOI = HiveUtils.asIntegerOI(argOIs[1]);
this.result = new IntWritable();
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
}
示例11: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 1);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.inputListOI = (ListObjectInspector) parameters[0];
this.inputListElemOI = HiveUtils.asDoubleCompatibleOI(inputListOI.getListElementObjectInspector());
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) parameters[0];
this.internalMergeOI = soi;
this.sizeField = soi.getStructFieldRef("size");
this.sumField = soi.getStructFieldRef("sum");
this.countField = soi.getStructFieldRef("count");
this.sizeOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.sumOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
this.countOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI();
} else {// terminate
outputOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
}
return outputOI;
}
示例12: craeteObjectInspectorFromTypeInfo
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
public static ObjectInspector craeteObjectInspectorFromTypeInfo( final TypeInfo typeInfo ){
switch ( typeInfo.getCategory() ){
case STRUCT:
return new MDSStructObjectInspector( (StructTypeInfo)typeInfo );
case MAP:
return new MDSMapObjectInspector( (MapTypeInfo)typeInfo );
case LIST:
return new MDSListObjectInspector( (ListTypeInfo)typeInfo );
case UNION:
UnionTypeInfo unionTypeInfo = (UnionTypeInfo)typeInfo;
List<ObjectInspector> unionList = new ArrayList<ObjectInspector>();
for( TypeInfo childTypeInfo : unionTypeInfo.getAllUnionObjectTypeInfos() ){
unionList.add( craeteObjectInspectorFromTypeInfo( childTypeInfo ) );
}
return ObjectInspectorFactory.getStandardUnionObjectInspector( unionList );
case PRIMITIVE:
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
switch( primitiveTypeInfo.getPrimitiveCategory() ){
case STRING:
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
case BOOLEAN:
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
case BYTE:
return PrimitiveObjectInspectorFactory.writableByteObjectInspector;
case DOUBLE:
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
case FLOAT:
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
case INT:
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
case LONG:
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
case SHORT:
return PrimitiveObjectInspectorFactory.writableShortObjectInspector;
case DATE:
case DECIMAL:
case TIMESTAMP:
case VOID:
default:
throw new UnsupportedOperationException( "Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory() );
}
default:
throw new UnsupportedOperationException( "Unknown category " + typeInfo.getCategory() );
}
}
示例13: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] OIs) throws UDFArgumentException {
if (OIs.length != 2) {
throw new UDFArgumentLengthException("Specify two arguments");
}
if (HiveUtils.isListOI(OIs[0]) && HiveUtils.isMapOI(OIs[1])) {
// for (array<features::string> features, const map<string, array<number>> quantiles_map)
if (!HiveUtils.isStringOI(((ListObjectInspector) OIs[0]).getListElementObjectInspector())) {
throw new UDFArgumentTypeException(0,
"Only array<string> type argument is acceptable but " + OIs[0].getTypeName()
+ " was passed as `features`");
}
featuresOI = HiveUtils.asListOI(OIs[0]);
featureOI = HiveUtils.asStringOI(featuresOI.getListElementObjectInspector());
quantilesMapOI = HiveUtils.asMapOI(OIs[1]);
if (!HiveUtils.isStringOI(quantilesMapOI.getMapKeyObjectInspector())
|| !HiveUtils.isListOI(quantilesMapOI.getMapValueObjectInspector())
|| !HiveUtils.isNumberOI(((ListObjectInspector) quantilesMapOI.getMapValueObjectInspector()).getListElementObjectInspector())) {
throw new UDFArgumentTypeException(1,
"Only map<string, array<number>> type argument is acceptable but "
+ OIs[1].getTypeName() + " was passed as `quantiles_map`");
}
keyOI = HiveUtils.asStringOI(quantilesMapOI.getMapKeyObjectInspector());
quantilesOI = HiveUtils.asListOI(quantilesMapOI.getMapValueObjectInspector());
quantileOI = HiveUtils.asDoubleCompatibleOI(quantilesOI.getListElementObjectInspector());
multiple = true;
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
} else if (HiveUtils.isPrimitiveOI(OIs[0]) && HiveUtils.isListOI(OIs[1])) {
// for (number weight, const array<number> quantiles)
weightOI = HiveUtils.asDoubleCompatibleOI(OIs[0]);
quantilesOI = HiveUtils.asListOI(OIs[1]);
if (!HiveUtils.isNumberOI(quantilesOI.getListElementObjectInspector())) {
throw new UDFArgumentTypeException(1,
"Only array<number> type argument is acceptable but " + OIs[1].getTypeName()
+ " was passed as `quantiles`");
}
quantileOI = HiveUtils.asDoubleCompatibleOI(quantilesOI.getListElementObjectInspector());
multiple = false;
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else {
throw new UDFArgumentTypeException(0,
"Only <array<features::string>, map<string, array<number>>> "
+ "or <number, array<number>> type arguments are accepted but <"
+ OIs[0].getTypeName() + ", " + OIs[1].getTypeName() + "> was passed.");
}
}
示例14: testWritableIntFeature
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testWritableIntFeature() throws Exception {
List<IntWritable> x = Arrays.asList(new IntWritable(111), new IntWritable(222));
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
testFeature(x, featureOI, IntWritable.class, Integer.class);
}