本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveObjectInspectorFactory.javaFloatObjectInspector方法的具体用法?Java PrimitiveObjectInspectorFactory.javaFloatObjectInspector怎么用?Java PrimitiveObjectInspectorFactory.javaFloatObjectInspector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorFactory.javaFloatObjectInspector方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testNoOptions
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testNoOptions() throws Exception {
List<String> x = Arrays.asList("1:-2", "2:-1");
float y = 0.f;
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
udtf.initialize(new ObjectInspector[] {stringListOI, intOI});
udtf.process(new Object[] {x, y});
udtf.finalizeTraining();
float predicted = udtf.predict(udtf.parseFeatures(x));
Assert.assertEquals(y, predicted, 1E-5);
}
示例2: testDefaultInit
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testDefaultInit() throws HiveException {
println("--------------------------\n testGaussian()");
OnlineMatrixFactorizationUDTF mf = new MatrixFactorizationSGDUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
//ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector, new String("-factor 3 -eta 0.0002"));
ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, new String("-factor 3"));
ObjectInspector[] argOIs = new ObjectInspector[] {intOI, intOI, floatOI, param};
mf.initialize(argOIs);
Assert.assertTrue(mf.rankInit == RankInitScheme.random);
float[][] rating = { {5, 3, 0, 1}, {4, 0, 0, 1}, {1, 1, 0, 5}, {1, 0, 0, 4}, {0, 1, 5, 4}};
Object[] args = new Object[3];
final int num_iters = 100;
for (int iter = 0; iter < num_iters; iter++) {
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
//print(row + "," + col + ",");
args[0] = row;
args[1] = col;
args[2] = (float) rating[row][col];
//println((float) rating[row][col]);
mf.process(args);
}
}
}
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
double predicted = mf.predict(row, col);
print(rating[row][col] + "[" + predicted + "]\t");
Assert.assertEquals(rating[row][col], predicted, 0.2d);
}
println();
}
}
示例3: testRandInit
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testRandInit() throws HiveException {
println("--------------------------\n testRandInit()");
OnlineMatrixFactorizationUDTF mf = new MatrixFactorizationSGDUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, new String(
"-factor 3 -rankinit random"));
ObjectInspector[] argOIs = new ObjectInspector[] {intOI, intOI, floatOI, param};
mf.initialize(argOIs);
Assert.assertTrue(mf.rankInit == RankInitScheme.random);
float[][] rating = { {5, 3, 0, 1}, {4, 0, 0, 1}, {1, 1, 0, 5}, {1, 0, 0, 4}, {0, 1, 5, 4}};
Object[] args = new Object[3];
final int num_iters = 100;
for (int iter = 0; iter < num_iters; iter++) {
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
args[0] = row;
args[1] = col;
args[2] = (float) rating[row][col];
mf.process(args);
}
}
}
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
double predicted = mf.predict(row, col);
print(rating[row][col] + "[" + predicted + "]\t");
Assert.assertEquals(rating[row][col], predicted, 0.2d);
}
println();
}
}
示例4: testGaussianInit
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testGaussianInit() throws HiveException {
println("--------------------------\n testGaussianInit()");
OnlineMatrixFactorizationUDTF mf = new MatrixFactorizationSGDUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, new String(
"-factor 3 -rankinit gaussian"));
ObjectInspector[] argOIs = new ObjectInspector[] {intOI, intOI, floatOI, param};
mf.initialize(argOIs);
Assert.assertTrue(mf.rankInit == RankInitScheme.gaussian);
float[][] rating = { {5, 3, 0, 1}, {4, 0, 0, 1}, {1, 1, 0, 5}, {1, 0, 0, 4}, {0, 1, 5, 4}};
Object[] args = new Object[3];
final int num_iters = 100;
for (int iter = 0; iter < num_iters; iter++) {
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
args[0] = row;
args[1] = col;
args[2] = (float) rating[row][col];
mf.process(args);
}
}
}
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
double predicted = mf.predict(row, col);
print(rating[row][col] + "[" + predicted + "]\t");
Assert.assertEquals(rating[row][col], predicted, 0.2d);
}
println();
}
}
示例5: testIterationsWithoutFile
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testIterationsWithoutFile() throws HiveException {
println("--------------------------\n testIterationsWithoutFile()");
OnlineMatrixFactorizationUDTF mf = new MatrixFactorizationSGDUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
int iters = 100;
ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, new String(
"-factor 3 -iterations " + iters));
ObjectInspector[] argOIs = new ObjectInspector[] {intOI, intOI, floatOI, param};
MapredContext mrContext = MapredContextAccessor.create(true, null);
mf.configure(mrContext);
mf.initialize(argOIs);
Assert.assertTrue(mf.rankInit == RankInitScheme.random);
float[][] rating = { {5, 3, 0, 1}, {4, 0, 0, 1}, {1, 1, 0, 5}, {1, 0, 0, 4}, {0, 1, 5, 4}};
Object[] args = new Object[3];
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
args[0] = row;
args[1] = col;
args[2] = (float) rating[row][col];
mf.process(args);
}
}
mf.runIterativeTraining(iters);
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
double predicted = mf.predict(row, col);
print(rating[row][col] + "[" + predicted + "]\t");
Assert.assertEquals(rating[row][col], predicted, 0.2d);
}
println();
}
}
示例6: test
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void test() throws HiveException {
println("--------------------------\n test()");
OnlineMatrixFactorizationUDTF mf = new MatrixFactorizationAdaGradUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, new String("-factor 3"));
ObjectInspector[] argOIs = new ObjectInspector[] {intOI, intOI, floatOI, param};
mf.initialize(argOIs);
float[][] rating = { {5, 3, 0, 1}, {4, 0, 0, 1}, {1, 1, 0, 5}, {1, 0, 0, 4}, {0, 1, 5, 4}};
Object[] args = new Object[3];
final int num_iters = 100;
for (int iter = 0; iter < num_iters; iter++) {
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
//print(row + "," + col + ",");
args[0] = row;
args[1] = col;
args[2] = (float) rating[row][col];
//println((float) rating[row][col]);
mf.process(args);
}
}
}
for (int row = 0; row < rating.length; row++) {
for (int col = 0, size = rating[row].length; col < size; col++) {
double predicted = mf.predict(row, col);
print(rating[row][col] + "[" + predicted + "]\t");
Assert.assertEquals(rating[row][col], predicted, 0.2d);
}
println();
}
}
示例7: testIntFloat
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testIntFloat() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {new DeferredJavaObject(1),
new DeferredJavaObject(2.5f)});
Assert.assertEquals("1:2.5", ret.toString());
}
示例8: testLongFloat
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testLongFloat() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {new DeferredJavaObject(1L),
new DeferredJavaObject(2.5f)});
Assert.assertEquals("1:2.5", ret.toString());
}
示例9: testStringFloat
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testStringFloat() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {new DeferredJavaObject("f1"),
new DeferredJavaObject(2.5f)});
Assert.assertEquals("f1:2.5", ret.toString());
}
示例10: testInitialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
@Test
public void testInitialize() throws UDFArgumentException {
AdaGradUDTF udtf = new AdaGradUDTF();
ObjectInspector labelOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ListObjectInspector intListOI = ObjectInspectorFactory.getStandardListObjectInspector(intOI);
/* test for INT_TYPE_NAME feature */
StructObjectInspector intListSOI = udtf.initialize(new ObjectInspector[] {intListOI,
labelOI});
assertEquals("struct<feature:int,weight:float>", intListSOI.getTypeName());
/* test for STRING_TYPE_NAME feature */
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
StructObjectInspector stringListSOI = udtf.initialize(new ObjectInspector[] {stringListOI,
labelOI});
assertEquals("struct<feature:string,weight:float>", stringListSOI.getTypeName());
/* test for BIGINT_TYPE_NAME feature */
ObjectInspector longOI = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
ListObjectInspector longListOI = ObjectInspectorFactory.getStandardListObjectInspector(longOI);
StructObjectInspector longListSOI = udtf.initialize(new ObjectInspector[] {longListOI,
labelOI});
assertEquals("struct<feature:bigint,weight:float>", longListSOI.getTypeName());
}
示例11: testUnsupportedOptimizer
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test(expected = UDFArgumentException.class)
public void testUnsupportedOptimizer() throws Exception {
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
ObjectInspector params = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-opt UnsupportedOpt");
udtf.initialize(new ObjectInspector[] {stringListOI, floatOI, params});
}
示例12: testUnsupportedLossFunction
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test(expected = UDFArgumentException.class)
public void testUnsupportedLossFunction() throws Exception {
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
ObjectInspector params = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-loss UnsupportedLoss");
udtf.initialize(new ObjectInspector[] {stringListOI, floatOI, params});
}
示例13: testInvalidLossFunction
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test(expected = UDFArgumentException.class)
public void testInvalidLossFunction() throws Exception {
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
ObjectInspector params = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-loss HingeLoss");
udtf.initialize(new ObjectInspector[] {stringListOI, floatOI, params});
}
示例14: testUnsupportedRegularization
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test(expected = UDFArgumentException.class)
public void testUnsupportedRegularization() throws Exception {
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector floatOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ObjectInspector stringOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ListObjectInspector stringListOI = ObjectInspectorFactory.getStandardListObjectInspector(stringOI);
ObjectInspector params = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-reg UnsupportedReg");
udtf.initialize(new ObjectInspector[] {stringListOI, floatOI, params});
}
示例15: testFeature
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
private <T> void testFeature(@Nonnull List<T> x, @Nonnull ObjectInspector featureOI,
@Nonnull Class<T> featureClass, @Nonnull Class<?> modelFeatureClass) throws Exception {
float y = 0.f;
GeneralRegressorUDTF udtf = new GeneralRegressorUDTF();
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
ListObjectInspector featureListOI = ObjectInspectorFactory.getStandardListObjectInspector(featureOI);
udtf.initialize(new ObjectInspector[] {featureListOI, valueOI});
final List<Object> modelFeatures = new ArrayList<Object>();
udtf.setCollector(new Collector() {
@Override
public void collect(Object input) throws HiveException {
Object[] forwardMapObj = (Object[]) input;
modelFeatures.add(forwardMapObj[0]);
}
});
udtf.process(new Object[] {x, y});
udtf.close();
Assert.assertFalse(modelFeatures.isEmpty());
for (Object modelFeature : modelFeatures) {
Assert.assertEquals("All model features must have same type", modelFeatureClass,
modelFeature.getClass());
}
}