本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector方法的典型用法代碼示例。如果您正苦於以下問題:Java PrimitiveObjectInspectorFactory.javaDoubleObjectInspector方法的具體用法?Java PrimitiveObjectInspectorFactory.javaDoubleObjectInspector怎麽用?Java PrimitiveObjectInspectorFactory.javaDoubleObjectInspector使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorFactory.javaDoubleObjectInspector方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: detectSST
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
private static int detectSST(@Nonnull final ScoreFunction scoreFunc,
@Nonnull final double threshold) throws IOException, HiveException {
Parameters params = new Parameters();
params.set(scoreFunc);
PrimitiveObjectInspector oi = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
SingularSpectrumTransform sst = new SingularSpectrumTransform(params, oi);
double[] outScores = new double[1];
BufferedReader reader = readFile("cf1d.csv.gz");
println("x change");
String line;
int numChangepoints = 0;
while ((line = reader.readLine()) != null) {
double x = Double.parseDouble(line);
sst.update(x, outScores);
printf("%f %f%n", x, outScores[0]);
if (outScores[0] > threshold) {
numChangepoints++;
}
}
return numChangepoints;
}
示例2: testTailKWithKey
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testTailKWithKey() throws Exception {
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-k -2")};
final String[] values = new String[] {"banana", "apple", "candy"};
final double[] keys = new double[] {0.7, 0.5, 0.8};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i], keys[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(2, res.size());
Assert.assertEquals("apple", res.get(0));
Assert.assertEquals("banana", res.get(1));
}
示例3: testOneArgument
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testOneArgument() throws HiveException, IOException {
VectorizeFeaturesUDF udf = new VectorizeFeaturesUDF();
ObjectInspector[] argOIs = new ObjectInspector[2];
List<String> featureNames = Arrays.asList("a");
argOIs[0] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, featureNames);
argOIs[1] = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
udf.initialize(argOIs);
DeferredObject[] arguments = new DeferredObject[2];
arguments[1] = new DeferredJavaObject(new Double(0.1));
List<Text> actuals = udf.evaluate(arguments);
//System.out.println(actuals);
List<Text> expected = WritableUtils.val(new String[] {"a:0.1"});
Assert.assertEquals(expected, actuals);
udf.close();
}
示例4: testTwoArguments
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testTwoArguments() throws HiveException, IOException {
VectorizeFeaturesUDF udf = new VectorizeFeaturesUDF();
ObjectInspector[] argOIs = new ObjectInspector[3];
List<String> featureNames = Arrays.asList("a", "b");
argOIs[0] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, featureNames);
argOIs[1] = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
argOIs[2] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
udf.initialize(argOIs);
DeferredObject[] arguments = new DeferredObject[3];
arguments[1] = new DeferredJavaObject(new Double(0.1));
arguments[2] = new DeferredJavaObject("1.1");
List<Text> actuals = udf.evaluate(arguments);
//System.out.println(actuals);
List<Text> expected = WritableUtils.val("a:0.1", "b:1.1");
Assert.assertEquals(expected, actuals);
udf.close();
}
示例5: testReverseTopKWithKey
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testReverseTopKWithKey() throws Exception {
// = tail-k
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-k 2 -reverse")};
final String[] values = new String[] {"banana", "apple", "candy"};
final double[] keys = new double[] {0.7, 0.5, 0.8};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i], keys[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(2, res.size());
Assert.assertEquals("apple", res.get(0));
Assert.assertEquals("banana", res.get(1));
}
示例6: testNullMixed
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testNullMixed() throws Exception {
ObjectInspector[] inputOIs = new ObjectInspector[] {PrimitiveObjectInspectorFactory.javaDoubleObjectInspector};
final String[] values = new String[] {"banana", "apple", null, "candy"};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(3, res.size());
Assert.assertEquals("apple", res.get(0));
Assert.assertEquals("banana", res.get(1));
Assert.assertEquals("candy", res.get(2));
}
示例7: testTopKWithKey
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testTopKWithKey() throws Exception {
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-k 2")};
final String[] values = new String[] {"banana", "apple", "candy"};
final double[] keys = new double[] {0.7, 0.5, 0.8};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i], keys[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(2, res.size());
Assert.assertEquals("candy", res.get(0));
Assert.assertEquals("banana", res.get(1));
}
示例8: testNaturalOrder
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testNaturalOrder() throws Exception {
NaturalOrderedMapEvaluator evaluator = new NaturalOrderedMapEvaluator();
NaturalOrderedMapEvaluator.MapAggregationBuffer agg = (NaturalOrderedMapEvaluator.MapAggregationBuffer) evaluator.getNewAggregationBuffer();
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
PrimitiveObjectInspectorFactory.javaStringObjectInspector};
final double[] keys = new double[] {0.7, 0.5, 0.8};
final String[] values = new String[] {"banana", "apple", "candy"};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < keys.length; i++) {
evaluator.iterate(agg, new Object[] {keys[i], values[i]});
}
Map<Object, Object> res = evaluator.terminate(agg);
Object[] sortedValues = res.values().toArray();
Assert.assertEquals(3, sortedValues.length);
Assert.assertEquals("apple", sortedValues[0]);
Assert.assertEquals("banana", sortedValues[1]);
Assert.assertEquals("candy", sortedValues[2]);
evaluator.close();
}
示例9: testCf1d
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testCf1d() throws IOException, HiveException {
Parameters params = new Parameters();
params.set(LossFunction.logloss);
PrimitiveObjectInspector oi = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
ChangeFinder1D cf = new ChangeFinder1D(params, oi);
double[] outScores = new double[2];
BufferedReader reader = readFile("cf1d.csv.gz");
println("x outlier change");
String line;
int numOutliers = 0, numChangepoints = 0;
while ((line = reader.readLine()) != null) {
double x = Double.parseDouble(line);
cf.update(x, outScores);
printf("%f %f %f%n", x, outScores[0], outScores[1]);
if (outScores[0] > 10.d) {
numOutliers++;
}
if (outScores[1] > 10.d) {
numChangepoints++;
}
}
Assert.assertTrue("#outliers SHOULD be greater than 10: " + numOutliers, numOutliers > 10);
Assert.assertTrue("#outliers SHOULD be less than 20: " + numOutliers, numOutliers < 20);
Assert.assertTrue("#changepoints SHOULD be greater than 0: " + numChangepoints,
numChangepoints > 0);
Assert.assertTrue("#changepoints SHOULD be less than 5: " + numChangepoints,
numChangepoints < 5);
}
示例10: testReverseOrder
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testReverseOrder() throws Exception {
ReverseOrderedMapEvaluator evaluator = new ReverseOrderedMapEvaluator();
ReverseOrderedMapEvaluator.MapAggregationBuffer agg = (ReverseOrderedMapEvaluator.MapAggregationBuffer) evaluator.getNewAggregationBuffer();
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector};
final double[] keys = new double[] {0.7, 0.5, 0.8};
final String[] values = new String[] {"banana", "apple", "candy"};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < keys.length; i++) {
evaluator.iterate(agg, new Object[] {keys[i], values[i]});
}
Map<Object, Object> res = evaluator.terminate(agg);
Object[] sortedValues = res.values().toArray();
Assert.assertEquals(3, sortedValues.length);
Assert.assertEquals("candy", sortedValues[0]);
Assert.assertEquals("banana", sortedValues[1]);
Assert.assertEquals("apple", sortedValues[2]);
evaluator.close();
}
示例11: testTwitterData
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testTwitterData() throws IOException, HiveException {
Parameters params = new Parameters();
params.set(LossFunction.logloss);
params.r1 = 0.01d;
params.k = 6;
params.T1 = 10;
params.T2 = 5;
PrimitiveObjectInspector oi = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
ChangeFinder1D cf = new ChangeFinder1D(params, oi);
double[] outScores = new double[2];
BufferedReader reader = readFile("twitter.csv.gz");
println("# time x outlier change");
String line;
int i = 1, numOutliers = 0, numChangepoints = 0;
while ((line = reader.readLine()) != null) {
double x = Double.parseDouble(line);
cf.update(x, outScores);
printf("%d %f %f %f%n", i, x, outScores[0], outScores[1]);
if (outScores[0] > 30.d) {
numOutliers++;
}
if (outScores[1] > 8.d) {
numChangepoints++;
}
i++;
}
Assert.assertTrue("#outliers SHOULD be greater than 5: " + numOutliers, numOutliers > 5);
Assert.assertTrue("#outliers SHOULD be less than 10: " + numOutliers, numOutliers < 10);
Assert.assertTrue("#changepoints SHOULD be greater than 0: " + numChangepoints,
numChangepoints > 0);
Assert.assertTrue("#changepoints SHOULD be less than 5: " + numChangepoints,
numChangepoints < 5);
}
示例12: testReverseOrderWithKey
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testReverseOrderWithKey() throws Exception {
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-reverse_order")};
final String[] values = new String[] {"banana", "apple", "candy"};
final double[] keys = new double[] {0.7, 0.5, 0.7};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i], keys[i]});
}
List<Object> res = (List<Object>) evaluator.terminate(agg);
Assert.assertEquals(3, res.size());
if (res.get(0) == "banana") { // duplicated key (0.7)
Assert.assertEquals("candy", res.get(1));
} else {
Assert.assertEquals("banana", res.get(1));
}
Assert.assertEquals("apple", res.get(2));
}
示例13: testCategoricalVariable
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testCategoricalVariable() throws HiveException, IOException {
VectorizeFeaturesUDF udf = new VectorizeFeaturesUDF();
ObjectInspector[] argOIs = new ObjectInspector[3];
List<String> featureNames = Arrays.asList("a", "b");
argOIs[0] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, featureNames);
argOIs[1] = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
argOIs[2] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
udf.initialize(argOIs);
DeferredObject[] arguments = new DeferredObject[3];
arguments[1] = new DeferredJavaObject(new Double(0.1));
arguments[2] = new DeferredJavaObject("dayofweek");
List<Text> actuals = udf.evaluate(arguments);
//System.out.println(actuals);
List<Text> expected = WritableUtils.val("a:0.1", "b#dayofweek");
Assert.assertEquals(expected, actuals);
arguments[2] = new DeferredJavaObject("1.0");
actuals = udf.evaluate(arguments);
//System.out.println(actuals);
expected = WritableUtils.val("a:0.1", "b:1.0");
Assert.assertEquals(expected, actuals);
arguments[2] = new DeferredJavaObject("1");
actuals = udf.evaluate(arguments);
//System.out.println(actuals);
expected = WritableUtils.val("a:0.1", "b:1.0");
Assert.assertEquals(expected, actuals);
arguments[2] = new DeferredJavaObject("0");
actuals = udf.evaluate(arguments);
//System.out.println(actuals);
expected = WritableUtils.val(new String[] {"a:0.1"});
Assert.assertEquals(expected, actuals);
udf.close();
}
示例14: testLongDouble
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testLongDouble() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {new DeferredJavaObject(1L),
new DeferredJavaObject(2.5d)});
Assert.assertEquals("1:2.5", ret.toString());
}
示例15: testTextDouble
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //導入方法依賴的package包/類
@Test
public void testTextDouble() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {
new DeferredJavaObject(new Text("f1")), new DeferredJavaObject(2.5d)});
Assert.assertEquals("f1:2.5", ret.toString());
}