当前位置: 首页>>代码示例>>Java>>正文


Java GenericUDF.DeferredObject方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject方法的典型用法代码示例。如果您正苦于以下问题:Java GenericUDF.DeferredObject方法的具体用法?Java GenericUDF.DeferredObject怎么用?Java GenericUDF.DeferredObject使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.ql.udf.generic.GenericUDF的用法示例。


在下文中一共展示了GenericUDF.DeferredObject方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: extractNamecoinFieldFirstUpdate

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
@Test
public void extractNamecoinFieldFirstUpdate() throws HiveException {
	String firstUpdateScript ="520A642F666C6173687570641460C7B068EDEA60281DAF424C38D8DAB87C96CF993D7B226970223A223134352E3234392E3130362E323238222C226D6170223A7B222A223A7B226970223A223134352E3234392E3130362E323238227D7D7D6D6D76A91451B4FC93AAB8CBDBD0AC9BC8EAF824643FC1E29B88AC";
	byte[] firstUpdateScriptBytes = BitcoinUtil.convertHexStringToByteArray(firstUpdateScript);
	NamecoinExtractFieldUDF nefu = new NamecoinExtractFieldUDF();
	ObjectInspector[] arguments = new ObjectInspector[1];
	arguments[0] =  PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;;
	nefu.initialize(arguments);	
	
	GenericUDF.DeferredObject[] doa = new GenericUDF.DeferredObject[1];
	
	doa[0]=new GenericUDF.DeferredJavaObject(new BytesWritable(firstUpdateScriptBytes));
	List<Text> resultList = (List<Text>) nefu.evaluate(doa);
	
	Text[] result=resultList.toArray(new Text[resultList.size()]);
	assertNotNull( result,"Valid result obtained");
	// test for domain name
	assertEquals("d/flashupd",result[0].toString(),"Domain name of first update detected correctly");
	// test for domain value
	assertEquals("{\"ip\":\"145.249.106.228\",\"map\":{\"*\":{\"ip\":\"145.249.106.228\"}}}",result[1].toString(),"Domain value of first update detected correctly");
	
}
 
开发者ID:ZuInnoTe,项目名称:hadoopcryptoledger,代码行数:23,代码来源:NamecoinUDFTest.java

示例2: extractNamecoinFieldUpdate

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
@Test
public void extractNamecoinFieldUpdate() throws HiveException {
	String updateScript = "5309642F70616E656C6B612D7B226970223A22382E382E382E38222C226D6170223A7B222A223A7B226970223A22382E382E382E38227D7D7D6D7576A9148D804B079AC79AD0CA108A4E5B679DB591FF069B88AC";
	byte[] updateScriptBytes = BitcoinUtil.convertHexStringToByteArray(updateScript);
	NamecoinExtractFieldUDF nefu = new NamecoinExtractFieldUDF();
	ObjectInspector[] arguments = new ObjectInspector[1];
	arguments[0] =  PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;;
	nefu.initialize(arguments);	
	
	GenericUDF.DeferredObject[] doa = new GenericUDF.DeferredObject[1];
	
	doa[0]=new GenericUDF.DeferredJavaObject(new BytesWritable(updateScriptBytes));
	List<Text> resultList = (List<Text>) nefu.evaluate(doa);
	Text[] result=resultList.toArray(new Text[resultList.size()]);
	assertNotNull( result,"Valid result obtained");
	// test for domain name
	assertEquals("d/panelka",result[0].toString(),"Domain name of first update detected correctly");
	// test for domain value
	assertEquals("{\"ip\":\"8.8.8.8\",\"map\":{\"*\":{\"ip\":\"8.8.8.8\"}}}",result[1].toString(),"Domain value of first update detected correctly");
	
}
 
开发者ID:ZuInnoTe,项目名称:hadoopcryptoledger,代码行数:22,代码来源:NamecoinUDFTest.java

示例3: getHiveBucket

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
public static int getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount)
        throws HiveException
{
    GenericUDFHash udf = new GenericUDFHash();
    ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
    GenericUDF.DeferredObject[] deferredObjects = new GenericUDF.DeferredObject[columnBindings.size()];

    int i = 0;
    for (Entry<ObjectInspector, Object> entry : columnBindings) {
        objectInspectors[i] = entry.getKey();
        deferredObjects[i] = new GenericUDF.DeferredJavaObject(entry.getValue());
        i++;
    }

    ObjectInspector udfInspector = udf.initialize(objectInspectors);
    IntObjectInspector inspector = checkType(udfInspector, IntObjectInspector.class, "udfInspector");

    Object result = udf.evaluate(deferredObjects);
    HiveKey hiveKey = new HiveKey();
    hiveKey.setHashCode(inspector.get(result));

    return new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:24,代码来源:TestHiveBucketing.java

示例4: loadArguments

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
static
private Map<FieldName, FieldValue> loadArguments(Evaluator evaluator, ObjectInspector[] inspectors, GenericUDF.DeferredObject[] objects) throws HiveException {

	if(inspectors.length == 1){
		ObjectInspector inspector = inspectors[0];

		ObjectInspector.Category category = inspector.getCategory();
		switch(category){
			case STRUCT:
				return loadStruct(evaluator, inspectors[0], objects[0]);
			default:
				return loadPrimitiveList(evaluator, inspectors, objects);
		}
	}

	return loadPrimitiveList(evaluator, inspectors, objects);
}
 
开发者ID:jpmml,项目名称:jpmml-hive,代码行数:18,代码来源:PMMLUtil.java

示例5: test

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
@Test
public void test() throws Exception {
    final SelectKBestUDF selectKBest = new SelectKBestUDF();
    final int k = 2;
    final double[] data = new double[] {250.29999999999998, 170.90000000000003, 73.2,
            12.199999999999996};
    final double[] importanceList = new double[] {292.1666753739119, 152.70000455081467,
            187.93333893418327, 59.93333511948589};

    final GenericUDF.DeferredObject[] dObjs = new GenericUDF.DeferredObject[] {
            new GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(data)),
            new GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(importanceList)),
            new GenericUDF.DeferredJavaObject(k)};

    selectKBest.initialize(new ObjectInspector[] {
            ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
            ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
            ObjectInspectorUtils.getConstantObjectInspector(
                PrimitiveObjectInspectorFactory.javaIntObjectInspector, k)});
    final List<DoubleWritable> resultObj = selectKBest.evaluate(dObjs);

    Assert.assertEquals(resultObj.size(), k);

    final double[] result = new double[k];
    for (int i = 0; i < k; i++) {
        result[i] = resultObj.get(i).get();
    }

    final double[] answer = new double[] {250.29999999999998, 73.2};

    Assert.assertArrayEquals(answer, result, 0.d);
    selectKBest.close();
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:34,代码来源:SelectKBestUDFTest.java

示例6: evaluate

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
@Override
public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
  assert (arguments.length == 2);

  if (arguments[0].get() == null || arguments[1].get() == null) {
    return null;
  }

  String ip = (String) converters[0].convert(arguments[0].get());
  String filename = (String) converters[1].convert(arguments[1].get());

  return lookup(ip, filename);
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:14,代码来源:Geoloc.java

示例7: evaluateComplex

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
/**
 * @see #initializeArguments(Class, ObjectInspector[])
 * @see #initializeComplexResult(Class)
 */
static
public Object evaluateComplex(Class<? extends GenericUDF> clazz, ObjectInspector[] inspectors, GenericUDF.DeferredObject[] objects) throws HiveException {
	Evaluator evaluator = getEvaluator(clazz);

	Map<FieldName, FieldValue> arguments = loadArguments(evaluator, inspectors, objects);

	Map<FieldName, ?> result = evaluator.evaluate(arguments);

	return storeResult(evaluator, result);
}
 
开发者ID:jpmml,项目名称:jpmml-hive,代码行数:15,代码来源:PMMLUtil.java

示例8: evaluate

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
@Override
public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
  assert arguments.length == 4;

  String year = getArgument(0, arguments);
  String month = getArgument(1, arguments);
  String day = getArgument(2, arguments);
  String event_date = getArgument(3, arguments);
  List<Object> result = new ArrayList<Object>(4);
  try {
    OccurrenceParseResult<AtomizedLocalDate> parsed =
      TemporalInterpreter.interpretEventDate(year, month, day, event_date);
    OccurrenceParseResult<TemporalAccessor> parsed2 =
            TemporalInterpreter.interpretRecordedDate(year, month, day, event_date);
    if (parsed.isSuccessful() && parsed.getIssues().isEmpty()) {
      result.add(parsed.getPayload().getYear());
      result.add(parsed.getPayload().getMonth());
      result.add(parsed.getPayload().getDay());
      result.add(TemporalAccessorUtils.toDate(parsed2.getPayload(), true).getTime());
    }
    else{
      result.add(null);
      result.add(null);
      result.add(null);
      result.add(null);
    }
  } catch (Exception e) {
    // not much to do - indicates bad data
  }

  return result;
}
 
开发者ID:gbif,项目名称:occurrence,代码行数:33,代码来源:DateParseUDF.java

示例9: getArgument

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
private String getArgument(int index, GenericUDF.DeferredObject[] arguments) throws HiveException {
  DeferredObject deferredObject = arguments[index];
  if (deferredObject == null) {
    return null;
  }

  Object convertedObject = converters[index].convert(deferredObject.get());
  return convertedObject == null ? null : convertedObject.toString();
}
 
开发者ID:gbif,项目名称:occurrence,代码行数:10,代码来源:DateParseUDF.java

示例10: loadStruct

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
static
private Map<FieldName, FieldValue> loadStruct(Evaluator evaluator, ObjectInspector inspector, GenericUDF.DeferredObject object) throws HiveException {
	Map<FieldName, FieldValue> result = Maps.newLinkedHashMap();

	StructObjectInspector structInspector = (StructObjectInspector)inspector;

	Object structObject = object.get();

	List<FieldName> activeFields = evaluator.getActiveFields();
	for(FieldName activeField : activeFields){
		StructField structField = structInspector.getStructFieldRef(activeField.getValue());

		PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector)structField.getFieldObjectInspector();

		Object primitiveObject = structInspector.getStructFieldData(structObject, structField);

		FieldValue value = EvaluatorUtil.prepare(evaluator, activeField, primitiveObjectInspector.getPrimitiveJavaObject(primitiveObject));

		result.put(activeField, value);
	}

	return result;
}
 
开发者ID:jpmml,项目名称:jpmml-hive,代码行数:24,代码来源:PMMLUtil.java

示例11: loadPrimitiveList

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
static
private Map<FieldName, FieldValue> loadPrimitiveList(Evaluator evaluator, ObjectInspector[] inspectors, GenericUDF.DeferredObject[] objects) throws HiveException {
	Map<FieldName, FieldValue> result = Maps.newLinkedHashMap();

	int i = 0;

	List<FieldName> activeFields = evaluator.getActiveFields();
	for(FieldName activeField : activeFields){
		PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)inspectors[i];

		Object primitiveObject = objects[i].get();

		FieldValue value = EvaluatorUtil.prepare(evaluator, activeField, primitiveInspector.getPrimitiveJavaObject(primitiveObject));

		result.put(activeField, value);

		i++;
	}

	return result;
}
 
开发者ID:jpmml,项目名称:jpmml-hive,代码行数:22,代码来源:PMMLUtil.java

示例12: evaluateSimple

import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; //导入方法依赖的package包/类
/**
 * @see #initializeArguments(Class, ObjectInspector[])
 * @see #initializeSimpleResult(Class)
 */
static
public Object evaluateSimple(Class<? extends GenericUDF> clazz, ObjectInspector[] inspectors, GenericUDF.DeferredObject[] objects) throws HiveException {
	Evaluator evaluator = getEvaluator(clazz);

	Map<FieldName, FieldValue> arguments = loadArguments(evaluator, inspectors, objects);

	Map<FieldName, ?> result = evaluator.evaluate(arguments);

	Object targetValue = result.get(evaluator.getTargetField());

	return EvaluatorUtil.decode(targetValue);
}
 
开发者ID:jpmml,项目名称:jpmml-hive,代码行数:17,代码来源:PMMLUtil.java


注:本文中的org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。