本文整理汇总了Java中org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.reset方法的典型用法代码示例。如果您正苦于以下问题:Java GenericUDAFEvaluator.reset方法的具体用法?Java GenericUDAFEvaluator.reset怎么用?Java GenericUDAFEvaluator.reset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator
的用法示例。
在下文中一共展示了GenericUDAFEvaluator.reset方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: test
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void test() throws Exception {
final TransposeAndDotUDAF tad = new TransposeAndDotUDAF();
final double[][] matrix0 = new double[][] { {1, -2}, {-1, 3}};
final double[][] matrix1 = new double[][] { {1, 2}, {3, 4}};
final ObjectInspector[] OIs = new ObjectInspector[] {
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector)};
final GenericUDAFEvaluator evaluator = tad.getEvaluator(new SimpleGenericUDAFParameterInfo(
OIs, false, false));
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, OIs);
TransposeAndDotUDAF.TransposeAndDotUDAFEvaluator.TransposeAndDotAggregationBuffer agg = (TransposeAndDotUDAF.TransposeAndDotUDAFEvaluator.TransposeAndDotAggregationBuffer) evaluator.getNewAggregationBuffer();
evaluator.reset(agg);
for (int i = 0; i < matrix0.length; i++) {
evaluator.iterate(agg, new Object[] {WritableUtils.toWritableList(matrix0[i]),
WritableUtils.toWritableList(matrix1[i])});
}
final double[][] answer = new double[][] { {-2.0, -2.0}, {7.0, 8.0}};
for (int i = 0; i < answer.length; i++) {
Assert.assertArrayEquals(answer[i], agg.aggMatrix[i], 0.d);
}
}
示例2: completeModeIntKeysDefaultParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeIntKeysDefaultParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { intInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
checkFinalResultInspector(resultInspector);
State state = (State) eval.getNewAggregationBuffer();
eval.iterate(state, new Object[] {new IntWritable(1)});
eval.iterate(state, new Object[] {new IntWritable(2)});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例3: completeModeIntValuesDefaultParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeIntValuesDefaultParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { intInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
checkFinalResultInspector(resultInspector);
UnionState state = (UnionState) eval.getNewAggregationBuffer();
eval.iterate(state, new Object[] {new IntWritable(1)});
eval.iterate(state, new Object[] {new IntWritable(2)});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
Sketch resultSketch = Sketches.wrapSketch(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例4: testPartial2
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void testPartial2() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector)
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.PARTIAL2, inputObjectInspectorList);
// Setup the two partial results
List<Long> partialResults1 = new ArrayList<>();
partialResults1.add(1L);
partialResults1.add(1L);
partialResults1.add(0L);
List<Long> partialResults2 = new ArrayList<>();
partialResults2.add(1L);
partialResults2.add(1L);
partialResults2.add(1L);
// Merge the partial results
MergeAggregateBuffer agg = (MergeAggregateBuffer) udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.merge(agg, partialResults1);
udafEvaluator.merge(agg, partialResults2);
Object result = udafEvaluator.terminatePartial(agg);
// Expected results
List<Long> expected = new ArrayList<>();
expected.add(2L);
expected.add(2L);
expected.add(1L);
Assert.assertEquals(expected, result);
}
示例5: testCompleteFunnelSizeMismatch
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test(expected = UDFArgumentTypeException.class)
public void testCompleteFunnelSizeMismatch() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector)
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.COMPLETE, inputObjectInspectorList);
// Setup two funnels, different sizes.
List<Long> funnel1 = new ArrayList<>();
funnel1.add(1L);
funnel1.add(1L);
funnel1.add(0L);
List<Long> funnel2 = new ArrayList<>();
funnel2.add(1L);
funnel2.add(0L);
Object[] parameters1 = new Object[]{funnel1};
Object[] parameters2 = new Object[]{funnel2};
// Should cause an error when merging funnels of different sizes
AggregationBuffer agg = udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.iterate(agg, parameters1);
udafEvaluator.iterate(agg, parameters2);
}
示例6: testPartial2FunnelSizeMismatch
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test(expected = UDFArgumentTypeException.class)
public void testPartial2FunnelSizeMismatch() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector)
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.PARTIAL2, inputObjectInspectorList);
// Setup the two partial results, should fail when merging list of different sizes
List<Long> partialResults1 = new ArrayList<>();
partialResults1.add(1L);
partialResults1.add(1L);
List<Long> partialResults2 = new ArrayList<>();
partialResults2.add(1L);
partialResults2.add(0L);
partialResults2.add(0L);
// Merge the partial results, should throw error due to list size difference
MergeAggregateBuffer agg = (MergeAggregateBuffer) udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.merge(agg, partialResults1);
udafEvaluator.merge(agg, partialResults2);
}
示例7: completeModeDefaultParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeDefaultParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
State state = (State) eval.getNewAggregationBuffer();
HllSketch sketch1 = new HllSketch(SketchEvaluator.DEFAULT_LG_K);
sketch1.update(1);
eval.iterate(state, new Object[] {new BytesWritable(sketch1.toCompactByteArray())});
HllSketch sketch2 = new HllSketch(SketchEvaluator.DEFAULT_LG_K);
sketch2.update(2);
eval.iterate(state, new Object[] {new BytesWritable(sketch2.toCompactByteArray())});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例8: completeModeExplicitParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeExplicitParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector, intConstantInspector, stringConstantInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
final int lgK = 4;
final TgtHllType hllType = TgtHllType.HLL_6;
State state = (State) eval.getNewAggregationBuffer();
HllSketch sketch1 = new HllSketch(lgK, hllType);
sketch1.update(1);
eval.iterate(state, new Object[] {new BytesWritable(sketch1.toCompactByteArray()),
new IntWritable(lgK), new Text(hllType.toString())});
HllSketch sketch2 = new HllSketch(lgK, hllType);
sketch2.update(2);
eval.iterate(state, new Object[] {new BytesWritable(sketch2.toCompactByteArray()),
new IntWritable(lgK), new Text(hllType.toString())});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getLgConfigK(), lgK);
Assert.assertEquals(resultSketch.getTgtHllType(), hllType);
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例9: completeModeDoubleKeysExplicitParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeDoubleKeysExplicitParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { doubleInspector, intConstantInspector, stringConstantInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
checkFinalResultInspector(resultInspector);
final int lgK = 4;
final TgtHllType hllType = TgtHllType.HLL_6;
State state = (State) eval.getNewAggregationBuffer();
eval.iterate(state, new Object[] {new DoubleWritable(1), new IntWritable(lgK), new Text(hllType.toString())});
eval.iterate(state, new Object[] {new DoubleWritable(2), new IntWritable(lgK), new Text(hllType.toString())});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getLgConfigK(), lgK);
Assert.assertEquals(resultSketch.getTgtHllType(), hllType);
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例10: completeModelDefaultK
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModelDefaultK() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionDoublesSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToDoublesSketchUDAFTest.checkResultInspector(resultInspector);
DoublesUnionState state = (DoublesUnionState) eval.getNewAggregationBuffer();
UpdateDoublesSketch sketch1 = DoublesSketch.builder().build();
sketch1.update(1.0);
eval.iterate(state, new Object[] { new BytesWritable(sketch1.toByteArray()) });
UpdateDoublesSketch sketch2 = DoublesSketch.builder().build();
sketch2.update(2.0);
eval.iterate(state, new Object[] { new BytesWritable(sketch2.toByteArray()) });
BytesWritable bytes = (BytesWritable) eval.terminatePartial(state);
DoublesSketch resultSketch = DoublesSketch.wrap(Memory.wrap(bytes.getBytes()));
Assert.assertEquals(resultSketch.getK(), 128);
Assert.assertEquals(resultSketch.getRetainedItems(), 2);
Assert.assertEquals(resultSketch.getMinValue(), 1.0);
Assert.assertEquals(resultSketch.getMaxValue(), 2.0);
eval.reset(state);
Assert.assertNull(eval.terminate(state));
eval.close();
}
示例11: complete1ModeDefaultK
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void complete1ModeDefaultK() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionStringsSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToDoublesSketchUDAFTest.checkResultInspector(resultInspector);
@SuppressWarnings("unchecked")
ItemsUnionState<String> state = (ItemsUnionState<String>) eval.getNewAggregationBuffer();
ItemsSketch<String> sketch1 = ItemsSketch.getInstance(comparator);
sketch1.update("a");
eval.iterate(state, new Object[] { new BytesWritable(sketch1.toByteArray(serDe)) });
ItemsSketch<String> sketch2 = ItemsSketch.getInstance(comparator);
sketch2.update("b");
eval.iterate(state, new Object[] { new BytesWritable(sketch2.toByteArray(serDe)) });
BytesWritable bytes = (BytesWritable) eval.terminate(state);
ItemsSketch<String> resultSketch = ItemsSketch.getInstance(Memory.wrap(bytes.getBytes()), comparator, serDe);
Assert.assertEquals(resultSketch.getK(), 128);
Assert.assertEquals(resultSketch.getRetainedItems(), 2);
Assert.assertEquals(resultSketch.getMinValue(), "a");
Assert.assertEquals(resultSketch.getMaxValue(), "b");
eval.reset(state);
Assert.assertNull(eval.terminate(state));
eval.close();
}
示例12: completeModeDefaultSeed
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeDefaultSeed() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new IntersectSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
IntersectSketchUDAF.IntersectSketchUDAFEvaluator.IntersectionState state =
(IntersectSketchUDAF.IntersectSketchUDAFEvaluator.IntersectionState) eval.getNewAggregationBuffer();
UpdateSketch sketch1 = UpdateSketch.builder().build();
sketch1.update(1);
sketch1.update(2);
sketch1.update(3);
eval.iterate(state, new Object[] {new BytesWritable(sketch1.compact().toByteArray())});
UpdateSketch sketch2 = UpdateSketch.builder().build();
sketch2.update(2);
sketch2.update(3);
sketch2.update(4);
eval.iterate(state, new Object[] {new BytesWritable(sketch2.compact().toByteArray())});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
Sketch resultSketch = Sketches.wrapSketch(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例13: completeModeDefaultSizeAndSeed
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeDefaultSizeAndSeed() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
UnionState state = (UnionState) eval.getNewAggregationBuffer();
UpdateSketch sketch1 = UpdateSketch.builder().build();
sketch1.update(1);
eval.iterate(state, new Object[] {new BytesWritable(sketch1.compact().toByteArray())});
UpdateSketch sketch2 = UpdateSketch.builder().build();
sketch2.update(2);
eval.iterate(state, new Object[] {new BytesWritable(sketch2.compact().toByteArray())});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
Sketch resultSketch = Sketches.wrapSketch(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例14: testComplete
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void testComplete() throws HiveException {
Funnel udaf = new Funnel();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
PrimitiveObjectInspectorFactory.javaStringObjectInspector, // action_column
PrimitiveObjectInspectorFactory.javaLongObjectInspector, // timestamp_column
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), // funnel_step_1
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector) // funnel_step_1
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.COMPLETE, inputObjectInspectorList);
// Order will be "alpha, beta, gamma, delta" when ordered on timestamp_column
// Funnel is "beta" -> "gamma" -> "epsilon"
// Should return [1, 1, 0] as we don't have an epsilon
Object[] parameters1 = new Object[]{ "beta", 200L, new ArrayList<Object>(), Arrays.asList("beta", "BAD"), null, "gamma", Arrays.asList("epsilon")}; // Test empty list funnel step, and null in funnel step
Object[] parameters2 = new Object[]{"alpha", 100L, Arrays.asList("beta", "BAD"), "gamma", Arrays.asList("epsilon")};
Object[] parameters3 = new Object[]{"delta", 400L, Arrays.asList("beta", "BAD"), "gamma", Arrays.asList("epsilon")};
Object[] parameters4 = new Object[]{"gamma", 200L, Arrays.asList("beta", "BAD"), "gamma", Arrays.asList("epsilon")}; // gamma and beta happen at the same time, beta should come first (sorted on action after timestamp)
Object[] parameters5 = new Object[]{ null, 800L, Arrays.asList("beta", "BAD"), "gamma", Arrays.asList("epsilon")}; // Check null action_column
Object[] parameters6 = new Object[]{"omega", null, Arrays.asList("beta", "BAD"), "gamma", Arrays.asList("epsilon")}; // Check null timestamp
// Process the data
AggregationBuffer agg = udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.iterate(agg, parameters1);
udafEvaluator.iterate(agg, parameters2);
udafEvaluator.iterate(agg, parameters3);
udafEvaluator.iterate(agg, parameters4);
udafEvaluator.iterate(agg, parameters5);
udafEvaluator.iterate(agg, parameters6);
Object result = udafEvaluator.terminate(agg);
// Expected
List<Long> expected = new ArrayList<>();
expected.add(1L);
expected.add(1L);
expected.add(0L);
Assert.assertEquals(expected, result);
}
示例15: testPartial1
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void testPartial1() throws HiveException {
Funnel udaf = new Funnel();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
PrimitiveObjectInspectorFactory.javaStringObjectInspector, // action_column
PrimitiveObjectInspectorFactory.javaLongObjectInspector, // timestamp_column
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), // funnel_step_1
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector) // funnel_step_1
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.PARTIAL1, inputObjectInspectorList);
// Order will be "alpha, beta, gamma, delta" when ordered on timestamp_column
// Funnel is "beta" -> "gamma" -> "epsilon"
// Should return [1, 1, 0] as we don't have an epsilon
Object[] parameters1 = new Object[]{ "beta", 200L, Arrays.asList("beta"), "gamma", Arrays.asList("epsilon")};
Object[] parameters2 = new Object[]{"alpha", 100L, Arrays.asList("beta"), "gamma", Arrays.asList("epsilon")};
Object[] parameters3 = new Object[]{"delta", 400L, Arrays.asList("beta"), "gamma", Arrays.asList("epsilon")};
Object[] parameters4 = new Object[]{"gamma", 300L, Arrays.asList("beta"), "gamma", Arrays.asList("epsilon")};
// Process the data
AggregationBuffer agg = udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.iterate(agg, parameters1);
udafEvaluator.iterate(agg, parameters2);
udafEvaluator.iterate(agg, parameters3);
udafEvaluator.iterate(agg, parameters4);
Object result = udafEvaluator.terminatePartial(agg);
// Expected partial output
List<Object> expected = new ArrayList<>();
expected.add(Arrays.asList("beta", "gamma"));
expected.add(Arrays.asList(200L, 300L));
expected.add(Arrays.asList("beta", null, "gamma", null, "epsilon", null));
Assert.assertEquals(expected, result);
}