当前位置: 首页>>代码示例>>Java>>正文


Java DateWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.DateWritable的典型用法代码示例。如果您正苦于以下问题:Java DateWritable类的具体用法?Java DateWritable怎么用?Java DateWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DateWritable类属于org.apache.hadoop.hive.serde2.io包,在下文中一共展示了DateWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: formatDateAsLong

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
private static long formatDateAsLong(Object object, DateObjectInspector inspector)
{
    if (object instanceof LazyDate) {
        return ((LazyDate) object).getWritableObject().getDays();
    }
    if (object instanceof DateWritable) {
        return ((DateWritable) object).getDays();
    }

    // Hive will return java.sql.Date at midnight in JVM time zone
    long millisLocal = inspector.getPrimitiveJavaObject(object).getTime();
    // Convert it to midnight in UTC
    long millisUtc = DateTimeZone.getDefault().getMillisKeepLocal(DateTimeZone.UTC, millisLocal);
    // Convert midnight UTC to days
    return TimeUnit.MILLISECONDS.toDays(millisUtc);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:17,代码来源:SerDeUtils.java

示例2: testDateTypeConverting

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void testDateTypeConverting() throws UnsupportedDataTypeException {
  Datum testDatum;
  TimeMeta testTM = new TimeMeta();

  testTM.years = 1977;
  testTM.monthOfYear = 6;
  testTM.dayOfMonth = 28;

  testDatum = new DateDatum(testTM);

  Writable resultWritable = WritableTypeConverter.convertDatum2Writable(testDatum);
  assertEquals("1977-06-28", ((DateWritable)resultWritable).get().toString());

  Datum resultDatum = WritableTypeConverter.convertWritable2Datum(resultWritable);
  assertEquals(testDatum, resultDatum);
}
 
开发者ID:apache,项目名称:tajo,代码行数:18,代码来源:TestWritableTypeConverter.java

示例3: TestDateParse

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void TestDateParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new GeoJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"properties\":{\"when\":\"2020-02-20\"}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals("2020-02-20",
						((DateWritable)fieldData).get().toString());
       value.set("{\"properties\":{\"when\":\"2017-05-05\"}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals("2017-05-05",
						((DateWritable)fieldData).get().toString());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:25,代码来源:TestGeoJsonSerDe.java

示例4: TestEpochParse

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void TestEpochParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new GeoJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"properties\":{\"when\":147147147147}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	//Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime());
	Assert.assertEquals(new java.sql.Date(147147147147L).toString(),
						((DateWritable)fieldData).get().toString());
       value.set("{\"properties\":{\"when\":142857142857}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	//Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get());
	Assert.assertEquals(new java.sql.Date(142857142857L).toString(),
						((DateWritable)fieldData).get().toString());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:27,代码来源:TestGeoJsonSerDe.java

示例5: TestDateParse

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void TestDateParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new EsriJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"attributes\":{\"when\":\"2020-02-20\"}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals("2020-02-20",
						((DateWritable)fieldData).get().toString());
       value.set("{\"attributes\":{\"when\":\"2017-05-05\"}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	Assert.assertEquals("2017-05-05",
						((DateWritable)fieldData).get().toString());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:25,代码来源:TestEsriJsonSerDe.java

示例6: TestEpochParse

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void TestEpochParse() throws Exception {
	Configuration config = new Configuration();
	Text value = new Text();

	AbstractSerDe jserde = new EsriJsonSerDe();
	Properties proptab = new Properties();
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when");
	proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date");
	jserde.initialize(config, proptab);
       StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector();

       value.set("{\"attributes\":{\"when\":147147147147}}");
	Object row = jserde.deserialize(value);
	StructField f0 = rowOI.getStructFieldRef("when");
	Object fieldData = rowOI.getStructFieldData(row, f0);
	//Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime());
	Assert.assertEquals(new java.sql.Date(147147147147L).toString(),
						((DateWritable)fieldData).get().toString());
       value.set("{\"attributes\":{\"when\":142857142857}}");
       row = jserde.deserialize(value);
	fieldData = rowOI.getStructFieldData(row, f0);
	//Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get());
	Assert.assertEquals(new java.sql.Date(142857142857L).toString(),
						((DateWritable)fieldData).get().toString());
}
 
开发者ID:Esri,项目名称:spatial-framework-for-hadoop,代码行数:27,代码来源:TestEsriJsonSerDe.java

示例7: getPrimitiveWritableObject

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Override
public DateWritable getPrimitiveWritableObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableDateHolder h = (NullableDateHolder) o;
<#else>
  final DateHolder h = (DateHolder) o;
</#if>
  return new DateWritable(new java.sql.Date(h.value));
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:13,代码来源:ObjectInspectors.java

示例8: copyToNewInstance

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Override
public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
  super.copyToNewInstance(newInstance);
  // Need to preserve currentDate
  GenericUDFCurrentDate other = (GenericUDFCurrentDate) newInstance;
  if (this.currentDate != null) {
    other.currentDate = new DateWritable(this.currentDate);
  }
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:10,代码来源:GenericUDFCurrentDate.java

示例9: getDateValue

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
public static Date getDateValue(DeferredObject[] arguments, int i,
        PrimitiveCategory[] inputTypes, Converter[] converters) throws HiveException {
    Object obj;
    if ((obj = arguments[i].get()) == null) {
        return null;
    }

    Date date;
    switch (inputTypes[i]) {
        case STRING:
        case VARCHAR:
        case CHAR:
            String dateStr = converters[i].convert(obj).toString();
            try {
                date = getDateFormat().parse(dateStr);
            } catch (ParseException e) {
                return null;
            }
            break;
        case TIMESTAMP:
        case DATE:
            //case TIMESTAMPTZ:
            Object writableValue = converters[i].convert(obj);
            date = ((DateWritable) writableValue).get();
            break;
        default:
            throw new UDFArgumentTypeException(0,
                "_FUNC_ only takes STRING_GROUP and DATE_GROUP types, got " + inputTypes[i]);
    }
    return date;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:32,代码来源:BackportUtils.java

示例10: getPrimitiveWritableObject

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Override
public DateWritable getPrimitiveWritableObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableDateMilliHolder h = (NullableDateMilliHolder) o;
<#else>
  final DateMilliHolder h = (DateMilliHolder) o;
</#if>
  return new DateWritable(new java.sql.Date(h.value));
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:13,代码来源:ObjectInspectors.java

示例11: testDateTime

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void testDateTime() {
    LocalDate date = LocalDate.now();
    Date sqlDate = Date.valueOf(date);
    long epochMillis = DateTimeUtil.getEpochMillisecond(sqlDate);
    Assert.assertEquals(date, DateTimeUtil.getLocalDate(epochMillis));
    Assert.assertEquals(sqlDate, DateTimeUtil.getJavaSQLDate(epochMillis));

    DateWritable dateWritable = new DateWritable(DateTimeUtil.getJavaSQLDate(epochMillis));
    Assert.assertEquals(epochMillis, DateTimeUtil.getEpochMillisecond(dateWritable.get()));
}
 
开发者ID:shunfei,项目名称:indexr,代码行数:12,代码来源:HiveDateTimeTest.java

示例12: getPrimitiveWritableObject

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Override
public DateWritable getPrimitiveWritableObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableDateHolder h = (NullableDateHolder) o;
<#else>
  final DateHolder h = (DateHolder) o;
</#if>
  org.joda.time.LocalDate localDate = new org.joda.time.LocalDate(h.value, org.joda.time.DateTimeZone.UTC);
  // Use "toDate()" to get java.util.Date object with exactly the same year the same year, month and day as Joda date.
  // See more in Javadoc for "LocalDate#toDate()"
  return new DateWritable(new java.sql.Date(localDate.toDate().getTime()));
}
 
开发者ID:axbaretto,项目名称:drill,代码行数:16,代码来源:ObjectInspectors.java

示例13: toComparable

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
  String stringLiteral;
  switch (category) {
  case STRING:
    return new Text((String) literal);
  case BOOLEAN:
    return new BooleanWritable((Boolean) literal);
  case BYTE:
    return new ByteWritable(((Long) literal).byteValue());
  case SHORT:
    return new ShortWritable(((Long) literal).shortValue());
  case INT:
    return new IntWritable(((Long) literal).intValue());
  case LONG:
    return new LongWritable((Long) literal);
  case FLOAT:
    return new FloatWritable(((Double) literal).floatValue());
  case DOUBLE:
    return new DoubleWritable((Double) literal);
  case TIMESTAMP:
    return new TimestampWritable((Timestamp) literal);
  case DATE:
    return (DateWritable) literal;
  case CHAR:
    stringLiteral = (String) literal;
    return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
  case VARCHAR:
    stringLiteral = (String) literal;
    return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
  case DECIMAL:
    return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
  default:
    throw new IllegalArgumentException("Unsupported category: " + category);
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:36,代码来源:EvaluatorFactory.java

示例14: readDatePredicatePushdown

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
@Test
public void readDatePredicatePushdown() throws IOException {
  TypeInfo typeInfo = TypeInfoFactory.dateTypeInfo;

  Date date1 = Date.valueOf("1970-01-01");
  Date date2 = Date.valueOf("1970-01-02");

  try (OrcWriter writer = getOrcWriter(typeInfo)) {
    writer.addRow(date1);
    writer.addRow(date2);
  }

  StructTypeInfo structTypeInfo = new StructTypeInfoBuilder().add("a", TypeInfoFactory.dateTypeInfo).build();

  SearchArgument searchArgument = SearchArgumentFactory
      .newBuilder()
      .startAnd()
      .equals("a", new DateWritable(date1))
      .end()
      .build();

  OrcFile orcFile = OrcFile.source().columns(structTypeInfo).schemaFromFile().searchArgument(searchArgument).build();
  Tap<?, ?, ?> tap = new Hfs(orcFile, path);

  List<Tuple> list = Plunger.readDataFromTap(tap).asTupleList();

  assertThat(list.size(), is(1));
  assertThat(((Date) list.get(0).getObject(0)).getTime(), is(date1.getTime()));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:30,代码来源:OrcFileTest.java

示例15: convertDatum2Writable

import org.apache.hadoop.hive.serde2.io.DateWritable; //导入依赖的package包/类
public static Writable convertDatum2Writable(Datum value) {
  switch(value.kind()) {
    case INT1: return new ByteWritable(value.asByte());
    case INT2: return new ShortWritable(value.asInt2());
    case INT4: return new IntWritable(value.asInt4());
    case INT8: return new LongWritable(value.asInt8());

    case FLOAT4: return new FloatWritable(value.asFloat4());
    case FLOAT8: return new DoubleWritable(value.asFloat8());

    // NOTE: value should be DateDatum
    case DATE: return new DateWritable(value.asInt4() - DateTimeConstants.UNIX_EPOCH_JDATE);

    // NOTE: value should be TimestampDatum
    case TIMESTAMP:
      TimestampWritable result = new TimestampWritable();
      result.setTime(DateTimeUtil.julianTimeToJavaTime(value.asInt8()));
      return result;

    case CHAR: {
      String str = value.asChars();
      return new HiveCharWritable(new HiveChar(str, str.length()));
    }
    case TEXT: return new Text(value.asChars());
    case VARBINARY: return new BytesWritable(value.asByteArray());

    case NULL_TYPE: return null;
  }

  throw new TajoRuntimeException(new NotImplementedException(TypeStringEncoder.encode(value.type())));
}
 
开发者ID:apache,项目名称:tajo,代码行数:32,代码来源:WritableTypeConverter.java


注:本文中的org.apache.hadoop.hive.serde2.io.DateWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。