本文整理汇总了Java中org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.getNewAggregationBuffer方法的典型用法代码示例。如果您正苦于以下问题:Java GenericUDAFEvaluator.getNewAggregationBuffer方法的具体用法?Java GenericUDAFEvaluator.getNewAggregationBuffer怎么用?Java GenericUDAFEvaluator.getNewAggregationBuffer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator
的用法示例。
在下文中一共展示了GenericUDAFEvaluator.getNewAggregationBuffer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: test
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void test() throws Exception {
final TransposeAndDotUDAF tad = new TransposeAndDotUDAF();
final double[][] matrix0 = new double[][] { {1, -2}, {-1, 3}};
final double[][] matrix1 = new double[][] { {1, 2}, {3, 4}};
final ObjectInspector[] OIs = new ObjectInspector[] {
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector)};
final GenericUDAFEvaluator evaluator = tad.getEvaluator(new SimpleGenericUDAFParameterInfo(
OIs, false, false));
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, OIs);
TransposeAndDotUDAF.TransposeAndDotUDAFEvaluator.TransposeAndDotAggregationBuffer agg = (TransposeAndDotUDAF.TransposeAndDotUDAFEvaluator.TransposeAndDotAggregationBuffer) evaluator.getNewAggregationBuffer();
evaluator.reset(agg);
for (int i = 0; i < matrix0.length; i++) {
evaluator.iterate(agg, new Object[] {WritableUtils.toWritableList(matrix0[i]),
WritableUtils.toWritableList(matrix1[i])});
}
final double[][] answer = new double[][] { {-2.0, -2.0}, {7.0, 8.0}};
for (int i = 0; i < answer.length; i++) {
Assert.assertArrayEquals(answer[i], agg.aggMatrix[i], 0.d);
}
}
示例2: partial2Mode
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void partial2Mode() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionDoublesSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.PARTIAL2, inspectors);
DataToDoublesSketchUDAFTest.checkResultInspector(resultInspector);
DoublesUnionState state = (DoublesUnionState) eval.getNewAggregationBuffer();
UpdateDoublesSketch sketch1 = DoublesSketch.builder().setK(256).build();
sketch1.update(1.0);
eval.merge(state, new BytesWritable(sketch1.toByteArray()));
UpdateDoublesSketch sketch2 = DoublesSketch.builder().setK(256).build();
sketch2.update(2.0);
eval.merge(state, new BytesWritable(sketch2.toByteArray()));
BytesWritable bytes = (BytesWritable) eval.terminatePartial(state);
DoublesSketch resultSketch = DoublesSketch.wrap(Memory.wrap(bytes.getBytes()));
Assert.assertEquals(resultSketch.getK(), 256);
Assert.assertEquals(resultSketch.getRetainedItems(), 2);
Assert.assertEquals(resultSketch.getMinValue(), 1.0);
Assert.assertEquals(resultSketch.getMaxValue(), 2.0);
eval.close();
}
示例3: partial1ModeGetStateBeforeInit
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void partial1ModeGetStateBeforeInit() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { intInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToSketchUDAF().getEvaluator(info);
State state = (State) eval.getNewAggregationBuffer();
ObjectInspector resultInspector = eval.init(Mode.PARTIAL1, inspectors);
checkIntermediateResultInspector(resultInspector);
eval.iterate(state, new Object[] {new IntWritable(1)});
eval.iterate(state, new Object[] {new IntWritable(2)});
Object result = eval.terminatePartial(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof List);
List<?> r = (List<?>) result;
Assert.assertEquals(r.size(), 3);
Assert.assertEquals(((IntWritable) r.get(0)).get(), SketchEvaluator.DEFAULT_LG_K);
Assert.assertEquals(((Text) r.get(1)).toString(), SketchEvaluator.DEFAULT_HLL_TYPE.toString());
HllSketch resultSketch = HllSketch.wrap(Memory.wrap(((BytesWritable) r.get(2)).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.close();
}
示例4: completeModeIntKeysDefaultParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeIntKeysDefaultParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { intInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
checkFinalResultInspector(resultInspector);
State state = (State) eval.getNewAggregationBuffer();
eval.iterate(state, new Object[] {new IntWritable(1)});
eval.iterate(state, new Object[] {new IntWritable(2)});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}
示例5: finalMode
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void finalMode() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionDoublesSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.FINAL, inspectors);
DataToDoublesSketchUDAFTest.checkResultInspector(resultInspector);
DoublesUnionState state = (DoublesUnionState) eval.getNewAggregationBuffer();
UpdateDoublesSketch sketch1 = DoublesSketch.builder().setK(256).build();
sketch1.update(1.0);
eval.merge(state, new BytesWritable(sketch1.toByteArray()));
UpdateDoublesSketch sketch2 = DoublesSketch.builder().setK(256).build();
sketch2.update(2.0);
eval.merge(state, new BytesWritable(sketch2.toByteArray()));
BytesWritable bytes = (BytesWritable) eval.terminate(state);
DoublesSketch resultSketch = DoublesSketch.wrap(Memory.wrap(bytes.getBytes()));
Assert.assertEquals(resultSketch.getK(), 256);
Assert.assertEquals(resultSketch.getRetainedItems(), 2);
Assert.assertEquals(resultSketch.getMinValue(), 1.0);
Assert.assertEquals(resultSketch.getMaxValue(), 2.0);
eval.close();
}
示例6: partial1ModeIntValuesDefaultParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void partial1ModeIntValuesDefaultParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { intInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.PARTIAL1, inspectors);
checkIntermediateResultInspector(resultInspector);
UnionState state = (UnionState) eval.getNewAggregationBuffer();
eval.iterate(state, new Object[] {new IntWritable(1)});
eval.iterate(state, new Object[] {new IntWritable(2)});
Object result = eval.terminatePartial(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof List);
List<?> r = (List<?>) result;
Assert.assertEquals(r.size(), 3);
Assert.assertEquals(((IntWritable) (r.get(0))).get(), DEFAULT_NOMINAL_ENTRIES);
Assert.assertEquals(((LongWritable) (r.get(1))).get(), DEFAULT_UPDATE_SEED);
Sketch resultSketch = Sketches.wrapSketch(Memory.wrap(((BytesWritable) (r.get(2))).getBytes()));
Assert.assertFalse(resultSketch.isEstimationMode());
Assert.assertEquals(resultSketch.getEstimate(), 2.0);
eval.close();
}
示例7: completeModeDefaultK
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeDefaultK() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { stringInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new DataToStringsSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToDoublesSketchUDAFTest.checkResultInspector(resultInspector);
@SuppressWarnings("unchecked")
ItemsUnionState<String> state = (ItemsUnionState<String>) eval.getNewAggregationBuffer();
eval.iterate(state, new Object[] { new org.apache.hadoop.io.Text("a") });
eval.iterate(state, new Object[] { new org.apache.hadoop.io.Text("b") });
BytesWritable bytes = (BytesWritable) eval.terminate(state);
ItemsSketch<String> resultSketch = ItemsSketch.getInstance(Memory.wrap(bytes.getBytes()), comparator, serDe);
Assert.assertEquals(resultSketch.getK(), 128);
Assert.assertEquals(resultSketch.getRetainedItems(), 2);
Assert.assertEquals(resultSketch.getMinValue(), "a");
Assert.assertEquals(resultSketch.getMaxValue(), "b");
eval.close();
}
示例8: testPartial2
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void testPartial2() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector)
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.PARTIAL2, inputObjectInspectorList);
// Setup the two partial results
List<Long> partialResults1 = new ArrayList<>();
partialResults1.add(1L);
partialResults1.add(1L);
partialResults1.add(0L);
List<Long> partialResults2 = new ArrayList<>();
partialResults2.add(1L);
partialResults2.add(1L);
partialResults2.add(1L);
// Merge the partial results
MergeAggregateBuffer agg = (MergeAggregateBuffer) udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.merge(agg, partialResults1);
udafEvaluator.merge(agg, partialResults2);
Object result = udafEvaluator.terminatePartial(agg);
// Expected results
List<Long> expected = new ArrayList<>();
expected.add(2L);
expected.add(2L);
expected.add(1L);
Assert.assertEquals(expected, result);
}
示例9: testCompleteFunnelSizeMismatch
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test(expected = UDFArgumentTypeException.class)
public void testCompleteFunnelSizeMismatch() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector)
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.COMPLETE, inputObjectInspectorList);
// Setup two funnels, different sizes.
List<Long> funnel1 = new ArrayList<>();
funnel1.add(1L);
funnel1.add(1L);
funnel1.add(0L);
List<Long> funnel2 = new ArrayList<>();
funnel2.add(1L);
funnel2.add(0L);
Object[] parameters1 = new Object[]{funnel1};
Object[] parameters2 = new Object[]{funnel2};
// Should cause an error when merging funnels of different sizes
AggregationBuffer agg = udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.iterate(agg, parameters1);
udafEvaluator.iterate(agg, parameters2);
}
示例10: testPartial2FunnelSizeMismatch
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test(expected = UDFArgumentTypeException.class)
public void testPartial2FunnelSizeMismatch() throws HiveException {
Merge udaf = new Merge();
ObjectInspector[] inputObjectInspectorList = new ObjectInspector[]{
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector)
};
GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(inputObjectInspectorList, false, false);
GenericUDAFEvaluator udafEvaluator = udaf.getEvaluator(paramInfo);
ObjectInspector outputObjectInspector = udafEvaluator.init(Mode.PARTIAL2, inputObjectInspectorList);
// Setup the two partial results, should fail when merging list of different sizes
List<Long> partialResults1 = new ArrayList<>();
partialResults1.add(1L);
partialResults1.add(1L);
List<Long> partialResults2 = new ArrayList<>();
partialResults2.add(1L);
partialResults2.add(0L);
partialResults2.add(0L);
// Merge the partial results, should throw error due to list size difference
MergeAggregateBuffer agg = (MergeAggregateBuffer) udafEvaluator.getNewAggregationBuffer();
udafEvaluator.reset(agg);
udafEvaluator.merge(agg, partialResults1);
udafEvaluator.merge(agg, partialResults2);
}
示例11: finalMode
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void finalMode() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new IntersectSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.FINAL, new ObjectInspector[] {structInspector});
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
IntersectSketchUDAF.IntersectSketchUDAFEvaluator.IntersectionState state =
(IntersectSketchUDAF.IntersectSketchUDAFEvaluator.IntersectionState) eval.getNewAggregationBuffer();
UpdateSketch sketch1 = UpdateSketch.builder().build();
sketch1.update(1);
sketch1.update(2);
sketch1.update(3);
eval.merge(state, Arrays.asList(
new LongWritable(DEFAULT_UPDATE_SEED),
new BytesWritable(sketch1.compact().toByteArray())
));
UpdateSketch sketch2 = UpdateSketch.builder().build();
sketch2.update(2);
sketch2.update(3);
sketch2.update(4);
eval.merge(state, Arrays.asList(
new LongWritable(DEFAULT_UPDATE_SEED),
new BytesWritable(sketch2.compact().toByteArray())
));
BytesWritable bytes = (BytesWritable) eval.terminate(state);
Sketch resultSketch = Sketches.wrapSketch(Memory.wrap(bytes.getBytes()));
Assert.assertEquals(resultSketch.getRetainedEntries(true), 2);
Assert.assertEquals(resultSketch.getEstimate(), 2.0);
eval.close();
}
示例12: partial1ModeExplicitParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void partial1ModeExplicitParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector, intConstantInspector, stringConstantInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.PARTIAL1, inspectors);
DataToSketchUDAFTest.checkIntermediateResultInspector(resultInspector);
final int lgK = 10;
final TgtHllType hllType = TgtHllType.HLL_6;
State state = (State) eval.getNewAggregationBuffer();
HllSketch sketch1 = new HllSketch(lgK, hllType);
sketch1.update(1);
eval.iterate(state, new Object[] {new BytesWritable(sketch1.toCompactByteArray()),
new IntWritable(lgK), new Text(hllType.toString())});
HllSketch sketch2 = new HllSketch(lgK, hllType);
sketch2.update(2);
eval.iterate(state, new Object[] {new BytesWritable(sketch2.toCompactByteArray()),
new IntWritable(lgK), new Text(hllType.toString())});
Object result = eval.terminatePartial(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof List);
List<?> r = (List<?>) result;
Assert.assertEquals(r.size(), 3);
Assert.assertEquals(((IntWritable) (r.get(0))).get(), lgK);
Assert.assertEquals(((Text) (r.get(1))).toString(), hllType.toString());
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) (r.get(2))).getBytes()));
Assert.assertEquals(resultSketch.getLgConfigK(), lgK);
Assert.assertEquals(resultSketch.getTgtHllType(), hllType);
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.close();
}
示例13: completeModelDefaultK
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModelDefaultK() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionDoublesSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToDoublesSketchUDAFTest.checkResultInspector(resultInspector);
DoublesUnionState state = (DoublesUnionState) eval.getNewAggregationBuffer();
UpdateDoublesSketch sketch1 = DoublesSketch.builder().build();
sketch1.update(1.0);
eval.iterate(state, new Object[] { new BytesWritable(sketch1.toByteArray()) });
UpdateDoublesSketch sketch2 = DoublesSketch.builder().build();
sketch2.update(2.0);
eval.iterate(state, new Object[] { new BytesWritable(sketch2.toByteArray()) });
BytesWritable bytes = (BytesWritable) eval.terminatePartial(state);
DoublesSketch resultSketch = DoublesSketch.wrap(Memory.wrap(bytes.getBytes()));
Assert.assertEquals(resultSketch.getK(), 128);
Assert.assertEquals(resultSketch.getRetainedItems(), 2);
Assert.assertEquals(resultSketch.getMinValue(), 1.0);
Assert.assertEquals(resultSketch.getMaxValue(), 2.0);
eval.reset(state);
Assert.assertNull(eval.terminate(state));
eval.close();
}
示例14: finalMode
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void finalMode() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.FINAL, new ObjectInspector[] {structInspector});
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
State state = (State) eval.getNewAggregationBuffer();
HllSketch sketch1 = new HllSketch(SketchEvaluator.DEFAULT_LG_K);
sketch1.update(1);
eval.merge(state, Arrays.asList(
new IntWritable(SketchEvaluator.DEFAULT_LG_K),
new Text(SketchEvaluator.DEFAULT_HLL_TYPE.toString()),
new BytesWritable(sketch1.toCompactByteArray()))
);
HllSketch sketch2 = new HllSketch(SketchEvaluator.DEFAULT_LG_K);
sketch2.update(2);
eval.merge(state, Arrays.asList(
new IntWritable(SketchEvaluator.DEFAULT_LG_K),
new Text(SketchEvaluator.DEFAULT_HLL_TYPE.toString()),
new BytesWritable(sketch2.toCompactByteArray()))
);
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.close();
}
示例15: completeModeExplicitParams
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; //导入方法依赖的package包/类
@Test
public void completeModeExplicitParams() throws Exception {
ObjectInspector[] inspectors = new ObjectInspector[] { binaryInspector, intConstantInspector, stringConstantInspector };
GenericUDAFParameterInfo info = new SimpleGenericUDAFParameterInfo(inspectors, false, false);
GenericUDAFEvaluator eval = new UnionSketchUDAF().getEvaluator(info);
ObjectInspector resultInspector = eval.init(Mode.COMPLETE, inspectors);
DataToSketchUDAFTest.checkFinalResultInspector(resultInspector);
final int lgK = 4;
final TgtHllType hllType = TgtHllType.HLL_6;
State state = (State) eval.getNewAggregationBuffer();
HllSketch sketch1 = new HllSketch(lgK, hllType);
sketch1.update(1);
eval.iterate(state, new Object[] {new BytesWritable(sketch1.toCompactByteArray()),
new IntWritable(lgK), new Text(hllType.toString())});
HllSketch sketch2 = new HllSketch(lgK, hllType);
sketch2.update(2);
eval.iterate(state, new Object[] {new BytesWritable(sketch2.toCompactByteArray()),
new IntWritable(lgK), new Text(hllType.toString())});
Object result = eval.terminate(state);
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof BytesWritable);
HllSketch resultSketch = HllSketch.heapify(Memory.wrap(((BytesWritable) result).getBytes()));
Assert.assertEquals(resultSketch.getLgConfigK(), lgK);
Assert.assertEquals(resultSketch.getTgtHllType(), hllType);
Assert.assertEquals(resultSketch.getEstimate(), 2.0, 0.01);
eval.reset(state);
result = eval.terminate(state);
Assert.assertNull(result);
eval.close();
}