当前位置: 首页>>代码示例>>Java>>正文


Java TimestampWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.TimestampWritable的典型用法代码示例。如果您正苦于以下问题:Java TimestampWritable类的具体用法?Java TimestampWritable怎么用?Java TimestampWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


TimestampWritable类属于org.apache.hadoop.hive.serde2.io包,在下文中一共展示了TimestampWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: narrowToTimestamp

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
protected TimestampWritable narrowToTimestamp(JsonParser parser)
        throws IOException {
    switch (parser.getCurrentToken()) {
        case VALUE_STRING:
            try {
                return new TimestampWritable(new Timestamp(JsonHelper.parseTimestamp(parser.getValueAsString()).getTime()));
            } catch (Exception e) {
                // String wasn't an ISO8601 timestamp
                return null;
            }
        case VALUE_NUMBER_INT:
            return new TimestampWritable(new Timestamp(parser.getLongValue()));
        case VALUE_NUMBER_FLOAT:
            return new TimestampWritable(new Timestamp((long) parser.getFloatValue()));
        default:
            return null;

    }
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:20,代码来源:AbstractEmoFieldUDF.java

示例2: write

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public void write(Writable w) throws IOException {
	MapWritable map = (MapWritable) w;
	SolrInputDocument doc = new SolrInputDocument();
	for (final Map.Entry<Writable, Writable> entry : map.entrySet()) {
		String key = entry.getKey().toString();
		
		if (entry.getValue() instanceof TimestampWritable) {
			Timestamp t = ((TimestampWritable)entry.getValue()).getTimestamp();
			doc.setField(key, dateFormat.format( new Date(t.getTime()) ));
		} else if (entry.getValue() instanceof ShortWritable) {
		    doc.setField(key, ((ShortWritable)entry.getValue()).get());
		} else {
			doc.setField(key, entry.getValue().toString());
		}
		
	}
	log.debug("doc:"+doc.toString());
	table.save(doc);
}
 
开发者ID:vroyer,项目名称:hive-solr-search,代码行数:21,代码来源:SolrWriter.java

示例3: getPrimitiveWritableObject

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public TimestampWritable getPrimitiveWritableObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableTimeStampHolder h = (NullableTimeStampHolder) o;
<#else>
  final TimeStampHolder h = (TimeStampHolder) o;
</#if>
  return new TimestampWritable(new java.sql.Timestamp(h.value));
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:13,代码来源:ObjectInspectors.java

示例4: copyToNewInstance

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
  super.copyToNewInstance(newInstance);
  // Need to preserve currentTimestamp
  GenericUDFCurrentTimestamp other = (GenericUDFCurrentTimestamp) newInstance;
  if (this.currentTimestamp != null) {
    other.currentTimestamp = new TimestampWritable(this.currentTimestamp);
  }
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:10,代码来源:GenericUDFCurrentTimestamp.java

示例5: getTimestampValue

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
public static Timestamp getTimestampValue(DeferredObject[] arguments, int i,
        Converter[] converters) throws HiveException {
    Object obj;
    if ((obj = arguments[i].get()) == null) {
        return null;
    }
    Object writableValue = converters[i].convert(obj);
    // if string can not be parsed converter will return null
    if (writableValue == null) {
        return null;
    }
    Timestamp ts = ((TimestampWritable) writableValue).getTimestamp();
    return ts;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:15,代码来源:BackportUtils.java

示例6: setSafeValue

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
  final TimestampWritable value = ((TimestampObjectInspector)oi).getPrimitiveWritableObject(hiveFieldValue);
  long seconds = value.getSeconds();
  long nanos = value.getNanos();
  long millis = seconds * 1000 + nanos/1000/1000;
  ((NullableTimeStampMilliVector) outputVV).getMutator().setSafe(outputIndex, millis);
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:9,代码来源:HiveFieldConverter.java

示例7: getPrimitiveWritableObject

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public TimestampWritable getPrimitiveWritableObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableTimeStampMilliHolder h = (NullableTimeStampMilliHolder) o;
<#else>
  final TimeStampMilliHolder h = (TimeStampMilliHolder) o;
</#if>
  return new TimestampWritable(new java.sql.Timestamp(h.value));
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:13,代码来源:ObjectInspectors.java

示例8: write

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public Result write(Writable writable, Generator generator) {
    if (writable instanceof ByteWritable) {
        generator.writeNumber(((ByteWritable) writable).get());
    }
    else if (writable instanceof DoubleWritable) {
        generator.writeNumber(((DoubleWritable) writable).get());
    }
    else if (writable instanceof ShortWritable) {
        generator.writeNumber(((ShortWritable) writable).get());
    }
    // HiveDecimal - Hive 0.11+
    else if (writable != null && HiveConstants.DECIMAL_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(writable.toString());
    }
    // pass the UNIX epoch
    else if (writable instanceof TimestampWritable) {
        long ts = ((TimestampWritable) writable).getTimestamp().getTime();
        Calendar cal = Calendar.getInstance();
        cal.setTimeInMillis(ts);
        generator.writeString(DatatypeConverter.printDateTime(cal));
    }
    // HiveDate - Hive 0.12+
    else if (writable != null && HiveConstants.DATE_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(DateWritableWriter.toES(writable));
    }
    // HiveVarcharWritable - Hive 0.12+
    else if (writable != null && HiveConstants.VARCHAR_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(writable.toString());
    }
    // HiveChar - Hive 0.13+
    else if (writable != null && HiveConstants.CHAR_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(StringUtils.trim(writable.toString()));
    }
    else {
        return super.write(writable, generator);
    }

    return Result.SUCCESFUL();
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:41,代码来源:HiveWritableValueWriter.java

示例9: getPrimitiveWritableObject

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Override
public TimestampWritable getPrimitiveWritableObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableTimeStampHolder h = (NullableTimeStampHolder) o;
<#else>
  final TimeStampHolder h = (TimeStampHolder) o;
</#if>
  org.joda.time.LocalDateTime dateTime = new org.joda.time.LocalDateTime(h.value, org.joda.time.DateTimeZone.UTC);
  // use "toDate()" to get java.util.Date object with exactly the same fields as this Joda date-time.
  // See more in Javadoc for "LocalDateTime#toDate()"
  return new TimestampWritable(new java.sql.Timestamp(dateTime.toDate().getTime()));
}
 
开发者ID:axbaretto,项目名称:drill,代码行数:16,代码来源:ObjectInspectors.java

示例10: getTimestamp

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
private static Timestamp getTimestamp(Object object, TimestampObjectInspector inspector)
{
    // handle broken ObjectInspectors
    if (object instanceof TimestampWritable) {
        return ((TimestampWritable) object).getTimestamp();
    }
    return inspector.getPrimitiveJavaObject(object);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:9,代码来源:SerDeUtils.java

示例11: toComparable

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
  String stringLiteral;
  switch (category) {
  case STRING:
    return new Text((String) literal);
  case BOOLEAN:
    return new BooleanWritable((Boolean) literal);
  case BYTE:
    return new ByteWritable(((Long) literal).byteValue());
  case SHORT:
    return new ShortWritable(((Long) literal).shortValue());
  case INT:
    return new IntWritable(((Long) literal).intValue());
  case LONG:
    return new LongWritable((Long) literal);
  case FLOAT:
    return new FloatWritable(((Double) literal).floatValue());
  case DOUBLE:
    return new DoubleWritable((Double) literal);
  case TIMESTAMP:
    return new TimestampWritable((Timestamp) literal);
  case DATE:
    return (DateWritable) literal;
  case CHAR:
    stringLiteral = (String) literal;
    return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
  case VARCHAR:
    stringLiteral = (String) literal;
    return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
  case DECIMAL:
    return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
  default:
    throw new IllegalArgumentException("Unsupported category: " + category);
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:36,代码来源:EvaluatorFactory.java

示例12: convertDatum2Writable

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
public static Writable convertDatum2Writable(Datum value) {
  switch(value.kind()) {
    case INT1: return new ByteWritable(value.asByte());
    case INT2: return new ShortWritable(value.asInt2());
    case INT4: return new IntWritable(value.asInt4());
    case INT8: return new LongWritable(value.asInt8());

    case FLOAT4: return new FloatWritable(value.asFloat4());
    case FLOAT8: return new DoubleWritable(value.asFloat8());

    // NOTE: value should be DateDatum
    case DATE: return new DateWritable(value.asInt4() - DateTimeConstants.UNIX_EPOCH_JDATE);

    // NOTE: value should be TimestampDatum
    case TIMESTAMP:
      TimestampWritable result = new TimestampWritable();
      result.setTime(DateTimeUtil.julianTimeToJavaTime(value.asInt8()));
      return result;

    case CHAR: {
      String str = value.asChars();
      return new HiveCharWritable(new HiveChar(str, str.length()));
    }
    case TEXT: return new Text(value.asChars());
    case VARBINARY: return new BytesWritable(value.asByteArray());

    case NULL_TYPE: return null;
  }

  throw new TajoRuntimeException(new NotImplementedException(TypeStringEncoder.encode(value.type())));
}
 
开发者ID:apache,项目名称:tajo,代码行数:32,代码来源:WritableTypeConverter.java

示例13: convertWritable2Datum

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
public static Datum convertWritable2Datum(Writable value) throws UnsupportedDataTypeException {
  if (value == null) {
    return NullDatum.get();
  }

  DataType type = convertWritableToTajoType(value.getClass());

  switch(type.getType()) {
    case INT1: return new Int2Datum(((ByteWritable)value).get());
    case INT2: return new Int2Datum(((ShortWritable)value).get());
    case INT4: return new Int4Datum(((IntWritable)value).get());
    case INT8: return new Int8Datum(((LongWritable)value).get());

    case FLOAT4: return new Float4Datum(((FloatWritable)value).get());
    case FLOAT8: return new Float8Datum(((DoubleWritable)value).get());

    case DATE: return new DateDatum(((DateWritable)value).getDays() + DateTimeConstants.UNIX_EPOCH_JDATE);
    case TIMESTAMP: return new TimestampDatum(DateTimeUtil.javaTimeToJulianTime(
        ((TimestampWritable)value).getTimestamp().getTime()));

    case CHAR: return new CharDatum(value.toString());
    case TEXT: return new TextDatum(value.toString());
    case VARBINARY: return new BlobDatum(((BytesWritable)value).getBytes());
  }

  throw new TajoRuntimeException(new UnsupportedDataTypeException(value.getClass().getTypeName()));
}
 
开发者ID:apache,项目名称:tajo,代码行数:28,代码来源:WritableTypeConverter.java

示例14: TestTimeParse

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
@Test
public void TestTimeParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new EsriJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "timestamp");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"attributes\":{\"when\":\"2020-02-20\"}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals(
		new java.text.SimpleDateFormat("yyyy-MM-dd").parse("2020-02-20").getTime(),
		((TimestampWritable)fieldData).getTimestamp().getTime());
       value.set("{\"attributes\":{\"when\":\"2017-05-05 05:05\"}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals(
		new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").parse("2017-05-05 05:05").getTime(),
		((TimestampWritable)fieldData).getTimestamp().getTime());
       value.set("{\"attributes\":{\"when\":\"2017-08-09 10:11:12\"}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals(
	  new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2017-08-09 10:11:12").getTime(),
	  ((TimestampWritable)fieldData).getTimestamp().getTime());
       value.set("{\"attributes\":{\"when\":\"2017-06-05 04:03:02.123456789\"}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals(
	  new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse("2017-06-05 04:03:02.123").getTime(),
	  ((TimestampWritable)fieldData).getTimestamp().getTime());  // ns parsed but not checked
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:39,代码来源:TestEsriJsonSerDe.java

示例15: TestCopyTimestamp

import org.apache.hadoop.hive.serde2.io.TimestampWritable; //导入依赖的package包/类
/**
 * Tests that after copying a lazy timestamp object, calling materialize on the original and the
 * copy doesn't advance the tree reader twice
 * @throws Exception
 */
@Test
public void TestCopyTimestamp() throws Exception {
  ReaderWriterProfiler.setProfilerOptions(null);
  OrcLazyTimestamp lazyTimestamp = new OrcLazyTimestamp(new LazyTimestampTreeReader(0, 0) {
    int nextCalls = 0;

    @Override
    public Object next(Object previous) throws IOException {
      if (nextCalls == 0) {
        return new TimestampWritable(new Timestamp(1));
      }

      throw new IOException("next should only be called once");
    }

    @Override
    protected boolean seekToRow(long currentRow) throws IOException {
      return true;
    }
  });

  TimestampObjectInspector timestampOI = (TimestampObjectInspector)
      OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.timestampTypeInfo);

  OrcLazyTimestamp lazyTimestamp2 = (OrcLazyTimestamp) timestampOI.copyObject(lazyTimestamp);

  Assert.assertEquals(new Timestamp(1), ((TimestampWritable) lazyTimestamp.materialize()).getTimestamp());
  Assert.assertEquals(new Timestamp(1), ((TimestampWritable) lazyTimestamp2.materialize()).getTimestamp());
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:35,代码来源:TestObjectInspector.java


注:本文中的org.apache.hadoop.hive.serde2.io.TimestampWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。