本文整理汇总了Java中org.apache.hadoop.hive.ql.udf.generic.GenericUDF类的典型用法代码示例。如果您正苦于以下问题:Java GenericUDF类的具体用法?Java GenericUDF怎么用?Java GenericUDF使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
GenericUDF类属于org.apache.hadoop.hive.ql.udf.generic包,在下文中一共展示了GenericUDF类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: addChildNode
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Override
public void addChildNode( final ExprNodeGenericFuncDesc exprNodeDesc ){
GenericUDF udf = exprNodeDesc.getGenericUDF();
if( udf instanceof GenericUDFOPAnd ){
childNodeList.add( new HiveExprAndNode( exprNodeDesc.getChildren() ) );
}
else if( udf instanceof GenericUDFOPOr ){
childNodeList.add( new HiveExprOrNode( exprNodeDesc.getChildren() ) );
}
else if( udf instanceof GenericUDFOPNot ){
childNodeList.add( new HiveExprNotNode( exprNodeDesc.getChildren() ) );
}
else{
childNodeList.add( HiveExprFactory.get( exprNodeDesc , udf , exprNodeDesc.getChildren() ) );
}
}
示例2: matchAndCreateUDFHolder
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
private HiveFuncHolder matchAndCreateUDFHolder(String udfName,
Class<? extends UDF> udfClazz,
MajorType[] argTypes,
ObjectInspector[] argOIs) {
try {
GenericUDF udfInstance = new GenericUDFBridge(udfName, false/* is operator */, udfClazz.getName());
ObjectInspector returnOI = udfInstance.initialize(argOIs);
return new HiveFuncHolder(
udfName,
udfClazz,
argTypes,
returnOI,
Types.optional(ObjectInspectorHelper.getDrillType(returnOI)),
nonDeterministicUDFs.contains(udfClazz));
} catch (Exception e) { /*ignore this*/ }
return null;
}
示例3: matchAndCreateUDFHolder
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
private HiveFuncHolder matchAndCreateUDFHolder(String udfName,
Class<? extends UDF> udfClazz,
CompleteType[] argTypes,
ObjectInspector[] argOIs) {
try {
GenericUDF udfInstance = new GenericUDFBridge(udfName, false/* is operator */, udfClazz.getName());
ObjectInspector returnOI = udfInstance.initialize(argOIs);
return new HiveFuncHolder(
udfName,
udfClazz,
argTypes,
returnOI,
CompleteType.fromMinorType(ObjectInspectorHelper.getMinorType(returnOI)),
nonDeterministicUDFs.contains(udfClazz));
} catch (Exception e) { /*ignore this*/ }
return null;
}
示例4: extractNamecoinFieldFirstUpdate
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void extractNamecoinFieldFirstUpdate() throws HiveException {
String firstUpdateScript ="520A642F666C6173687570641460C7B068EDEA60281DAF424C38D8DAB87C96CF993D7B226970223A223134352E3234392E3130362E323238222C226D6170223A7B222A223A7B226970223A223134352E3234392E3130362E323238227D7D7D6D6D76A91451B4FC93AAB8CBDBD0AC9BC8EAF824643FC1E29B88AC";
byte[] firstUpdateScriptBytes = BitcoinUtil.convertHexStringToByteArray(firstUpdateScript);
NamecoinExtractFieldUDF nefu = new NamecoinExtractFieldUDF();
ObjectInspector[] arguments = new ObjectInspector[1];
arguments[0] = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;;
nefu.initialize(arguments);
GenericUDF.DeferredObject[] doa = new GenericUDF.DeferredObject[1];
doa[0]=new GenericUDF.DeferredJavaObject(new BytesWritable(firstUpdateScriptBytes));
List<Text> resultList = (List<Text>) nefu.evaluate(doa);
Text[] result=resultList.toArray(new Text[resultList.size()]);
assertNotNull( result,"Valid result obtained");
// test for domain name
assertEquals("d/flashupd",result[0].toString(),"Domain name of first update detected correctly");
// test for domain value
assertEquals("{\"ip\":\"145.249.106.228\",\"map\":{\"*\":{\"ip\":\"145.249.106.228\"}}}",result[1].toString(),"Domain value of first update detected correctly");
}
示例5: extractNamecoinFieldUpdate
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void extractNamecoinFieldUpdate() throws HiveException {
String updateScript = "5309642F70616E656C6B612D7B226970223A22382E382E382E38222C226D6170223A7B222A223A7B226970223A22382E382E382E38227D7D7D6D7576A9148D804B079AC79AD0CA108A4E5B679DB591FF069B88AC";
byte[] updateScriptBytes = BitcoinUtil.convertHexStringToByteArray(updateScript);
NamecoinExtractFieldUDF nefu = new NamecoinExtractFieldUDF();
ObjectInspector[] arguments = new ObjectInspector[1];
arguments[0] = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;;
nefu.initialize(arguments);
GenericUDF.DeferredObject[] doa = new GenericUDF.DeferredObject[1];
doa[0]=new GenericUDF.DeferredJavaObject(new BytesWritable(updateScriptBytes));
List<Text> resultList = (List<Text>) nefu.evaluate(doa);
Text[] result=resultList.toArray(new Text[resultList.size()]);
assertNotNull( result,"Valid result obtained");
// test for domain name
assertEquals("d/panelka",result[0].toString(),"Domain name of first update detected correctly");
// test for domain value
assertEquals("{\"ip\":\"8.8.8.8\",\"map\":{\"*\":{\"ip\":\"8.8.8.8\"}}}",result[1].toString(),"Domain value of first update detected correctly");
}
示例6: testThreeArgument
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void testThreeArgument() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[3];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
udf.initialize(argOIs);
udf.close();
}
示例7: testFourArgument
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void testFourArgument() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[4];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// stopTags
argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
udf.initialize(argOIs);
udf.close();
}
示例8: testFiveArgumentArray
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void testFiveArgumentArray() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[5];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// stopTags
argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// userDictUrl
argOIs[4] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
udf.initialize(argOIs);
udf.close();
}
示例9: testFiveArgumenString
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void testFiveArgumenString() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[5];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// stopTags
argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// userDictUrl
argOIs[4] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
udf.initialize(argOIs);
udf.close();
}
示例10: getHiveBucket
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
public static int getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount)
throws HiveException
{
GenericUDFHash udf = new GenericUDFHash();
ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
GenericUDF.DeferredObject[] deferredObjects = new GenericUDF.DeferredObject[columnBindings.size()];
int i = 0;
for (Entry<ObjectInspector, Object> entry : columnBindings) {
objectInspectors[i] = entry.getKey();
deferredObjects[i] = new GenericUDF.DeferredJavaObject(entry.getValue());
i++;
}
ObjectInspector udfInspector = udf.initialize(objectInspectors);
IntObjectInspector inspector = checkType(udfInspector, IntObjectInspector.class, "udfInspector");
Object result = udf.evaluate(deferredObjects);
HiveKey hiveKey = new HiveKey();
hiveKey.setHashCode(inspector.get(result));
return new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);
}
示例11: initializeArguments
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
/**
* @see #initializeSimpleResult(Class)
* @see #initializeComplexResult(Class)
*/
static
public ObjectInspector[] initializeArguments(Class<? extends GenericUDF> clazz, ObjectInspector[] inspectors) throws UDFArgumentException {
Evaluator evaluator = getEvaluator(clazz);
if(inspectors.length == 1){
ObjectInspector inspector = inspectors[0];
ObjectInspector.Category category = inspector.getCategory();
switch(category){
case STRUCT:
return initializeStruct(evaluator, inspector);
default:
return initializePrimitiveList(evaluator, inspectors);
}
}
return initializePrimitiveList(evaluator, inspectors);
}
示例12: loadArguments
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
static
private Map<FieldName, FieldValue> loadArguments(Evaluator evaluator, ObjectInspector[] inspectors, GenericUDF.DeferredObject[] objects) throws HiveException {
if(inspectors.length == 1){
ObjectInspector inspector = inspectors[0];
ObjectInspector.Category category = inspector.getCategory();
switch(category){
case STRUCT:
return loadStruct(evaluator, inspectors[0], objects[0]);
default:
return loadPrimitiveList(evaluator, inspectors, objects);
}
}
return loadPrimitiveList(evaluator, inspectors, objects);
}
示例13: test2
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void test2() throws Exception {
IsoWeekOfWeekYear udf = new IsoWeekOfWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
long l = 1388358783962L;
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(52, output);
l = 1388359114590L;
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(52, output);
}
示例14: testYearBoundary1
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void testYearBoundary1() throws HiveException, IOException {
UsWeekOfWeekYear udf = new UsWeekOfWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
DateTimeFormatter dtf = ISODateTimeFormat.basicDateTime();
// Sunday December 29, 2013 - Saturday January 4, 2014 are Week 1 of 2014
DateTime dt = dtf.parseDateTime("20131229T000000.000Z");
long l = dt.getMillis();
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(1, output);
dt = dtf.parseDateTime("20140104T000000.000Z");
l = dt.getMillis();
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(1, output);
}
示例15: testYearBoundary2
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入依赖的package包/类
@Test
public void testYearBoundary2() throws HiveException, IOException {
UsWeekOfWeekYear udf = new UsWeekOfWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
DateTimeFormatter dtf = ISODateTimeFormat.basicDateTime();
// Sunday December 28, 2014 - Saturday January 3, 2015 are Week 53 of 2014
DateTime dt = dtf.parseDateTime("20141229T000000.000Z");
long l = dt.getMillis();
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(53, output);
dt = dtf.parseDateTime("20150103T000000.000Z");
l = dt.getMillis();
dt = dtf.parseDateTime("20141229T000000.000Z");
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(53, output);
}