当前位置: 首页>>代码示例>>Java>>正文


Java ShortWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.ShortWritable的典型用法代码示例。如果您正苦于以下问题:Java ShortWritable类的具体用法?Java ShortWritable怎么用?Java ShortWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ShortWritable类属于org.apache.hadoop.hive.serde2.io包,在下文中一共展示了ShortWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getConstantIntValue

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
public static Integer getConstantIntValue(ObjectInspector[] arguments, int i)
        throws UDFArgumentTypeException {
    Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
    if (constValue == null) {
        return null;
    }
    int v;
    if (constValue instanceof IntWritable) {
        v = ((IntWritable) constValue).get();
    } else if (constValue instanceof ShortWritable) {
        v = ((ShortWritable) constValue).get();
    } else if (constValue instanceof ByteWritable) {
        v = ((ByteWritable) constValue).get();
    } else {
        throw new UDFArgumentTypeException(i, "_FUNC_ only takes INT/SHORT/BYTE types as "
                + getArgOrder(i) + " argument, got " + constValue.getClass());
    }
    return v;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:20,代码来源:BackportUtils.java

示例2: write

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Override
public void write(Writable w) throws IOException {
	MapWritable map = (MapWritable) w;
	SolrInputDocument doc = new SolrInputDocument();
	for (final Map.Entry<Writable, Writable> entry : map.entrySet()) {
		String key = entry.getKey().toString();
		
		if (entry.getValue() instanceof TimestampWritable) {
			Timestamp t = ((TimestampWritable)entry.getValue()).getTimestamp();
			doc.setField(key, dateFormat.format( new Date(t.getTime()) ));
		} else if (entry.getValue() instanceof ShortWritable) {
		    doc.setField(key, ((ShortWritable)entry.getValue()).get());
		} else {
			doc.setField(key, entry.getValue().toString());
		}
		
	}
	log.debug("doc:"+doc.toString());
	table.save(doc);
}
 
开发者ID:vroyer,项目名称:hive-solr-search,代码行数:21,代码来源:SolrWriter.java

示例3: writePrimitive

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
private void writePrimitive(final Writable value) {
  if (value == null) {
    return;
  }
  if (value instanceof DoubleWritable) {
    recordConsumer.addDouble(((DoubleWritable) value).get());
  } else if (value instanceof BooleanWritable) {
    recordConsumer.addBoolean(((BooleanWritable) value).get());
  } else if (value instanceof FloatWritable) {
    recordConsumer.addFloat(((FloatWritable) value).get());
  } else if (value instanceof IntWritable) {
    recordConsumer.addInteger(((IntWritable) value).get());
  } else if (value instanceof LongWritable) {
    recordConsumer.addLong(((LongWritable) value).get());
  } else if (value instanceof ShortWritable) {
    recordConsumer.addInteger(((ShortWritable) value).get());
  } else if (value instanceof ByteWritable) {
    recordConsumer.addInteger(((ByteWritable) value).get());
  } else if (value instanceof BigDecimalWritable) {
    throw new UnsupportedOperationException("BigDecimal writing not implemented");
  } else if (value instanceof BinaryWritable) {
    recordConsumer.addBinary(((BinaryWritable) value).getBinary());
  } else {
    throw new IllegalArgumentException("Unknown value type: " + value + " " + value.getClass());
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:27,代码来源:DataWritableWriter.java

示例4: testHashMap

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Test
public void testHashMap() {
  final Map<Writable, Writable> map = new HashMap<Writable, Writable>();
  map.put(new IntWritable(0), new IntWritable(1));
  map.put(new IntWritable(2), new IntWritable(3));
  map.put(new IntWritable(4), new IntWritable(5));
  map.put(new IntWritable(6), new IntWritable(7));

  assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
  assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
  assertEquals("Wrong result of inspection", new IntWritable(5), inspector.getMapValueElement(map, new IntWritable(4)));
  assertEquals("Wrong result of inspection", new IntWritable(7), inspector.getMapValueElement(map, new IntWritable(6)));
  assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
  assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 2)));
  assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 4)));
  assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 6)));
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:18,代码来源:TestStandardParquetHiveMapInspector.java

示例5: testHashMap

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Test
public void testHashMap() {
  final Map<Writable, Writable> map = new HashMap<Writable, Writable>();
  map.put(new IntWritable(0), new IntWritable(1));
  map.put(new IntWritable(2), new IntWritable(3));
  map.put(new IntWritable(4), new IntWritable(5));
  map.put(new IntWritable(6), new IntWritable(7));


  assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
  assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
  assertEquals("Wrong result of inspection", new IntWritable(5), inspector.getMapValueElement(map, new IntWritable(4)));
  assertEquals("Wrong result of inspection", new IntWritable(7), inspector.getMapValueElement(map, new IntWritable(6)));
  assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new ShortWritable((short) 0)));
  assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new ShortWritable((short) 2)));
  assertEquals("Wrong result of inspection", new IntWritable(5), inspector.getMapValueElement(map, new ShortWritable((short) 4)));
  assertEquals("Wrong result of inspection", new IntWritable(7), inspector.getMapValueElement(map, new ShortWritable((short) 6)));
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:19,代码来源:TestDeepParquetHiveMapInspector.java

示例6: evaluate

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
public IntWritable evaluate(ShortWritable a, IntWritable b) {
  if (a == null || b == null) {
    return null;
  }
  intWritable.set(a.get() >> b.get());
  return intWritable;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:8,代码来源:UDFOPBitShiftRight.java

示例7: evaluate

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
public IntWritable evaluate(ShortWritable a, IntWritable b) {
  if (a == null || b == null) {
    return null;
  }
  intWritable.set(a.get() << b.get());
  return intWritable;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:8,代码来源:UDFOPBitShiftLeft.java

示例8: evaluate

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
public IntWritable evaluate(ShortWritable a, IntWritable b) {
  if (a == null || b == null) {
    return null;
  }
  intWritable.set(a.get() >>> b.get());
  return intWritable;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:8,代码来源:UDFOPBitShiftRightUnsigned.java

示例9: write

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Override
public Result write(Writable writable, Generator generator) {
    if (writable instanceof ByteWritable) {
        generator.writeNumber(((ByteWritable) writable).get());
    }
    else if (writable instanceof DoubleWritable) {
        generator.writeNumber(((DoubleWritable) writable).get());
    }
    else if (writable instanceof ShortWritable) {
        generator.writeNumber(((ShortWritable) writable).get());
    }
    // HiveDecimal - Hive 0.11+
    else if (writable != null && HiveConstants.DECIMAL_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(writable.toString());
    }
    // pass the UNIX epoch
    else if (writable instanceof TimestampWritable) {
        long ts = ((TimestampWritable) writable).getTimestamp().getTime();
        Calendar cal = Calendar.getInstance();
        cal.setTimeInMillis(ts);
        generator.writeString(DatatypeConverter.printDateTime(cal));
    }
    // HiveDate - Hive 0.12+
    else if (writable != null && HiveConstants.DATE_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(DateWritableWriter.toES(writable));
    }
    // HiveVarcharWritable - Hive 0.12+
    else if (writable != null && HiveConstants.VARCHAR_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(writable.toString());
    }
    // HiveChar - Hive 0.13+
    else if (writable != null && HiveConstants.CHAR_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(StringUtils.trim(writable.toString()));
    }
    else {
        return super.write(writable, generator);
    }

    return Result.SUCCESFUL();
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:41,代码来源:HiveWritableValueWriter.java

示例10: toComparable

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
  String stringLiteral;
  switch (category) {
  case STRING:
    return new Text((String) literal);
  case BOOLEAN:
    return new BooleanWritable((Boolean) literal);
  case BYTE:
    return new ByteWritable(((Long) literal).byteValue());
  case SHORT:
    return new ShortWritable(((Long) literal).shortValue());
  case INT:
    return new IntWritable(((Long) literal).intValue());
  case LONG:
    return new LongWritable((Long) literal);
  case FLOAT:
    return new FloatWritable(((Double) literal).floatValue());
  case DOUBLE:
    return new DoubleWritable((Double) literal);
  case TIMESTAMP:
    return new TimestampWritable((Timestamp) literal);
  case DATE:
    return (DateWritable) literal;
  case CHAR:
    stringLiteral = (String) literal;
    return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
  case VARCHAR:
    stringLiteral = (String) literal;
    return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
  case DECIMAL:
    return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
  default:
    throw new IllegalArgumentException("Unsupported category: " + category);
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:36,代码来源:EvaluatorFactory.java

示例11: getObjectFromWritable

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
private Object getObjectFromWritable(Writable w) {
	if (w instanceof IntWritable) {
		// int
		return ((IntWritable) w).get();
	} else if (w instanceof ShortWritable) {
		// short
		return ((ShortWritable) w).get();
	} else if (w instanceof ByteWritable) {
		// byte
		return ((ByteWritable) w).get();
	} else if (w instanceof BooleanWritable) {
		// boolean
		return ((BooleanWritable) w).get();
	} else if (w instanceof LongWritable) {
		// long
		return ((LongWritable) w).get();
	} else if (w instanceof FloatWritable) {
		// float
		return ((FloatWritable) w).get();
	} else if (w instanceof DoubleWritable) {
		// double
		return ((DoubleWritable) w).get();
	}else if (w instanceof NullWritable) {
		//null
		return null;
	} else {
		// treat as string
		return w.toString();
	}
}
 
开发者ID:vroyer,项目名称:hive-solr-search,代码行数:31,代码来源:SolrWriter.java

示例12: get

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Override
public short get(Object o) {
  // Accept int writables and convert them.
  if (o instanceof IntWritable) {
    return (short) ((IntWritable) o).get();
  }
  return ((ShortWritable) o).get();
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:9,代码来源:ParquetShortInspector.java

示例13: createPrimitive

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
    throws SerDeException {
  if (obj == null) {
    return null;
  }
  switch (inspector.getPrimitiveCategory()) {
  case VOID:
    return null;
  case BOOLEAN:
    return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
  case BYTE:
    return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
  case DOUBLE:
    return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
  case FLOAT:
    return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
  case INT:
    return new IntWritable(((IntObjectInspector) inspector).get(obj));
  case LONG:
    return new LongWritable(((LongObjectInspector) inspector).get(obj));
  case SHORT:
    return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
  case STRING:
    return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
  default:
    throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:29,代码来源:ParquetHiveSerDe.java

示例14: testRegularMap

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Test
public void testRegularMap() {
  final Writable[] entry1 = new Writable[]{new IntWritable(0), new IntWritable(1)};
  final Writable[] entry2 = new Writable[]{new IntWritable(2), new IntWritable(3)};

  final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, new Writable[]{
    new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2)});

  final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{internalMap});

  assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
  assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
  assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
  assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 2)));
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:16,代码来源:TestStandardParquetHiveMapInspector.java

示例15: testRegularMap

import org.apache.hadoop.hive.serde2.io.ShortWritable; //导入依赖的package包/类
@Test
public void testRegularMap() {
  final Writable[] entry1 = new Writable[]{new IntWritable(0), new IntWritable(1)};
  final Writable[] entry2 = new Writable[]{new IntWritable(2), new IntWritable(3)};

  final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, new Writable[]{
    new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2)});

  final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{internalMap});

  assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
  assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
  assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new ShortWritable((short) 0)));
  assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new ShortWritable((short) 2)));
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:16,代码来源:TestDeepParquetHiveMapInspector.java


注:本文中的org.apache.hadoop.hive.serde2.io.ShortWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。