本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector.getList方法的典型用法代码示例。如果您正苦于以下问题:Java ListObjectInspector.getList方法的具体用法?Java ListObjectInspector.getList怎么用?Java ListObjectInspector.getList使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector
的用法示例。
在下文中一共展示了ListObjectInspector.getList方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getByteBuffers
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
/**
* This method currently supports BinarySet data type of DynamoDB
*/
public List<ByteBuffer> getByteBuffers(Object data, ObjectInspector objectInspector, String
ddType) {
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
List<?> dataList = listObjectInspector.getList(data);
if (dataList == null) {
return null;
}
ObjectInspector itemObjectInspector = listObjectInspector.getListElementObjectInspector();
List<ByteBuffer> itemList = new ArrayList<ByteBuffer>();
for (Object dataItem : dataList) {
if (dataItem == null) {
throw new RuntimeException("Null element found in list: " + dataList);
}
if (ddType.equals("BS")) {
itemList.add(getByteBuffer(dataItem, itemObjectInspector));
} else {
throw new RuntimeException("Expecting BinarySet type: " + ddType);
}
}
return itemList;
}
示例2: asStringList
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
@Nullable
public static List<String> asStringList(@Nonnull final DeferredObject arg,
@Nonnull final ListObjectInspector listOI) throws HiveException {
Object argObj = arg.get();
if (argObj == null) {
return null;
}
List<?> data = listOI.getList(argObj);
int size = data.size();
if (size == 0) {
return Collections.emptyList();
}
final String[] ary = new String[size];
for (int i = 0; i < size; i++) {
Object o = data.get(i);
if (o != null) {
ary[i] = o.toString();
}
}
return Arrays.asList(ary);
}
示例3: asStringArray
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
@Nullable
public static String[] asStringArray(@Nonnull final DeferredObject arg,
@Nonnull final ListObjectInspector listOI) throws HiveException {
Object argObj = arg.get();
if (argObj == null) {
return null;
}
List<?> data = listOI.getList(argObj);
final int size = data.size();
final String[] arr = new String[size];
for (int i = 0; i < size; i++) {
Object o = data.get(i);
if (o != null) {
arr[i] = o.toString();
}
}
return arr;
}
示例4: testPA1TrainWithoutParameter
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
@Test
public void testPA1TrainWithoutParameter() throws UDFArgumentException {
PassiveAggressiveUDTF udtf = new PassiveAggressiveUDTF.PA1();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ListObjectInspector intListOI = ObjectInspectorFactory.getStandardListObjectInspector(intOI);
/* define aggressive parameter */
udtf.initialize(new ObjectInspector[] {intListOI, intOI});
/* train weights */
List<?> features = (List<?>) intListOI.getList(new Object[] {1, 2, 3});
udtf.train(features, 1);
/* check weights */
assertEquals(0.3333333f, udtf.model.get(1).get(), 1e-5f);
assertEquals(0.3333333f, udtf.model.get(2).get(), 1e-5f);
assertEquals(0.3333333f, udtf.model.get(3).get(), 1e-5f);
}
示例5: testPA1TrainWithParameter
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
@Test
public void testPA1TrainWithParameter() throws UDFArgumentException {
PassiveAggressiveUDTF udtf = new PassiveAggressiveUDTF.PA1();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ListObjectInspector intListOI = ObjectInspectorFactory.getStandardListObjectInspector(intOI);
ObjectInspector param = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-c 0.1");
/* define aggressive parameter */
udtf.initialize(new ObjectInspector[] {intListOI, intOI, param});
/* train weights */
List<?> features = (List<?>) intListOI.getList(new Object[] {1, 2, 3});
udtf.train(features, 1);
/* check weights */
assertEquals(0.1000000f, udtf.model.get(1).get(), 1e-5f);
assertEquals(0.1000000f, udtf.model.get(2).get(), 1e-5f);
assertEquals(0.1000000f, udtf.model.get(3).get(), 1e-5f);
}
示例6: createArray
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
private ArrayWritable createArray(final Object obj, final ListObjectInspector inspector)
throws SerDeException {
final List<?> sourceArray = inspector.getList(obj);
final ObjectInspector subInspector = inspector.getListElementObjectInspector();
final List<Writable> array = new ArrayList<Writable>();
if (sourceArray != null) {
for (final Object curObj : sourceArray) {
final Writable newObj = createObject(curObj, subInspector);
if (newObj != null) {
array.add(newObj);
}
}
}
if (array.size() > 0) {
final ArrayWritable subArray = new ArrayWritable(array.get(0).getClass(),
array.toArray(new Writable[array.size()]));
return new ArrayWritable(Writable.class, new Writable[] {subArray});
} else {
return null;
}
}
示例7: deparseObject
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
public static Object deparseObject(Object field, ObjectInspector fieldOI) throws SerDeException {
switch(fieldOI.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) fieldOI;
return oi.getPrimitiveJavaObject(field);
}
case LIST: {
ListObjectInspector listOI = (ListObjectInspector) fieldOI;
List<?> elements = listOI.getList(field);
List<Object> list = new ArrayList<Object>(elements.size());
ObjectInspector elemOI = listOI.getListElementObjectInspector();
for(Object elem : elements) {
Object o = deparseObject(elem, elemOI);
list.add(o);
}
return list;
}
default:
throw new SerDeException("Unexpected fieldOI: " + fieldOI);
}
}
示例8: getListAttribute
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
/**
* This method currently supports StringSet and NumberSet data type of DynamoDB
*/
public List<String> getListAttribute(Object data, ObjectInspector objectInspector, String
ddType) {
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
List<?> dataList = listObjectInspector.getList(data);
if (dataList == null) {
return null;
}
ObjectInspector itemObjectInspector = listObjectInspector.getListElementObjectInspector();
List<String> itemList = new ArrayList<String>();
for (Object dataItem : dataList) {
if (dataItem == null) {
throw new RuntimeException("Null element found in list: " + dataList);
}
if (ddType.equals("SS")) {
itemList.add(getString(dataItem, itemObjectInspector));
} else if (ddType.equals("NS")) {
itemList.add(getNumber(dataItem, itemObjectInspector));
} else {
throw new RuntimeException("Unsupported dynamodb type: " + ddType);
}
}
return itemList;
}
示例9: testTrain
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
@Test
public void testTrain() throws HiveException {
PassiveAggressiveUDTF udtf = new PassiveAggressiveUDTF();
ObjectInspector intOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ListObjectInspector intListOI = ObjectInspectorFactory.getStandardListObjectInspector(intOI);
udtf.initialize(new ObjectInspector[] {intListOI, intOI});
/* train weights by List<Object> */
List<Integer> features1 = new ArrayList<Integer>();
features1.add(1);
features1.add(2);
features1.add(3);
udtf.train(features1, 1);
/* check weights */
assertEquals(0.3333333f, udtf.model.get(1).get(), 1e-5f);
assertEquals(0.3333333f, udtf.model.get(2).get(), 1e-5f);
assertEquals(0.3333333f, udtf.model.get(3).get(), 1e-5f);
/* train weights by Object[] */
List<?> features2 = (List<?>) intListOI.getList(new Object[] {3, 4, 5});
udtf.train(features2, 1);
/* check weights */
assertEquals(0.3333333f, udtf.model.get(1).get(), 1e-5f);
assertEquals(0.3333333f, udtf.model.get(2).get(), 1e-5f);
assertEquals(0.5555555f, udtf.model.get(3).get(), 1e-5f);
assertEquals(0.2222222f, udtf.model.get(4).get(), 1e-5f);
assertEquals(0.2222222f, udtf.model.get(5).get(), 1e-5f);
}
示例10: serializeList
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
private static Block serializeList(Type type, BlockBuilder builder, Object object, ListObjectInspector inspector)
{
List<?> list = inspector.getList(object);
if (list == null) {
requireNonNull(builder, "parent builder is null").appendNull();
return null;
}
List<Type> typeParameters = type.getTypeParameters();
checkArgument(typeParameters.size() == 1, "list must have exactly 1 type parameter");
Type elementType = typeParameters.get(0);
ObjectInspector elementInspector = inspector.getListElementObjectInspector();
BlockBuilder currentBuilder;
if (builder != null) {
currentBuilder = builder.beginBlockEntry();
}
else {
currentBuilder = elementType.createBlockBuilder(new BlockBuilderStatus(), list.size());
}
for (Object element : list) {
serializeObject(elementType, currentBuilder, element, elementInspector);
}
if (builder != null) {
builder.closeEntry();
return null;
}
else {
Block resultBlock = currentBuilder.build();
return resultBlock;
}
}
示例11: deparseList
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
/**
* Deparses a list and its elements.
*
* @param obj - Hive object to deparse
* @param oi - ObjectInspector for the object
* @return - A deparsed object
*/
private Object deparseList(Object obj, ListObjectInspector listOI) {
List<Object> list = new ArrayList<Object>();
List<?> field = listOI.getList(obj);
ObjectInspector elemOI = listOI.getListElementObjectInspector();
for (Object elem : field) {
list.add(deparseObject(elem, elemOI));
}
return list;
}
示例12: deparseList
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
/**
* Deparses a list and its elements.
*
* @param obj - Hive object to deparse
* @param listOI - ObjectInspector for the object
* @return - A deparsed object
*/
private Object deparseList(Object obj, ListObjectInspector listOI) {
List<Object> list = new ArrayList<Object>();
List<?> field = listOI.getList(obj);
ObjectInspector elemOI = listOI.getListElementObjectInspector();
for (Object elem : field) {
list.add(deparseObject(elem, elemOI));
}
return list;
}
示例13: deparseList
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
/**
* Deparses a list and its elements.
*
* @param obj
* - Hive object to deparse
* @param oi
* - ObjectInspector for the object
* @return - A deparsed object
*/
private Object deparseList(final Object obj,
final ListObjectInspector listOI) {
final List<Object> list = new ArrayList<Object>();
final List<?> field = listOI.getList(obj);
final ObjectInspector elemOI = listOI.getListElementObjectInspector();
for (final Object elem : field) {
list.add(deparseObject(elem, elemOI));
}
return list;
}
示例14: add
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
private void add(BlurRecord blurRecord, String columnName, ObjectInspector objectInspector, Object data)
throws SerDeException {
if (data == null) {
return;
}
if (objectInspector instanceof PrimitiveObjectInspector) {
PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) objectInspector;
String strValue = toString(columnName, data, primitiveObjectInspector);
if (columnName.equals(BlurObjectInspectorGenerator.ROWID)) {
blurRecord.setRowId(strValue);
} else if (columnName.equals(BlurObjectInspectorGenerator.RECORDID)) {
blurRecord.setRecordId(strValue);
} else {
blurRecord.addColumn(columnName, strValue);
}
} else if (objectInspector instanceof StructObjectInspector) {
StructObjectInspector structObjectInspector = (StructObjectInspector) objectInspector;
Map<String, StructField> allStructFieldRefs = toMap(structObjectInspector.getAllStructFieldRefs());
String latitude = getFieldData(columnName, data, structObjectInspector, allStructFieldRefs,
BlurObjectInspectorGenerator.LATITUDE);
String longitude = getFieldData(columnName, data, structObjectInspector, allStructFieldRefs,
BlurObjectInspectorGenerator.LONGITUDE);
blurRecord.addColumn(columnName, toLatLong(latitude, longitude));
} else if (objectInspector instanceof ListObjectInspector) {
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
List<?> list = listObjectInspector.getList(data);
ObjectInspector listElementObjectInspector = listObjectInspector.getListElementObjectInspector();
for (Object obj : list) {
add(blurRecord, columnName, listElementObjectInspector, obj);
}
} else {
throw new SerDeException("ObjectInspector [" + objectInspector + "] of type ["
+ (objectInspector != null ? objectInspector.getClass() : null) + "] not supported.");
}
}
示例15: processNetwork
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; //导入方法依赖的package包/类
/** processes the single row of data passed as UDF arguments by loading the network,
* setting the targets and evidence and updating the beliefs.
*
* @param net the network
* @param arguments the UDF arguments: network filename, targets and node/outcome evidence pairs
* @param loi a ListObjectInspector instance initialized at Hive compile time. If null, the targets argument is null or a string
* @return the list of target identifiers
*/
public static List<?> processNetwork(Network net, Object[] arguments, ListObjectInspector loi) {
List<?> targets = new ArrayList<String>();
net.readFile(arguments[0].toString());
boolean targetsSpecified = false;
if (loi == null) {
Object a1 = arguments[1];
ArrayList<String> t = new ArrayList<String>();
if (a1 == null) {
for (String id: net.getAllNodeIds()) {
if (net.isTarget(id)) {
t.add(id);
}
}
if (t.isEmpty()) {
targets = Arrays.asList(net.getAllNodeIds());
} else {
targets = t;
}
} else {
t.add(arguments[1].toString());
targets = t;
targetsSpecified = true;
}
} else {
targets = loi.getList(arguments[1]);
targetsSpecified = true;
}
if (targetsSpecified) {
net.clearAllTargets();
ArrayList<String> t2 = new ArrayList<String>(targets.size());
for (Object o: targets) {
int handle = findNode(net, o.toString());
net.setTarget(handle, true);
t2.add(net.getNodeId(handle));
}
targets = t2;
}
for (int i = 2; i < arguments.length; i += 2) {
Object node = arguments[i];
Object outcome = arguments[i + 1];
net.setEvidence(findNode(net, node.toString()), outcome.toString());
}
net.updateBeliefs();
return targets;
}