当前位置: 首页>>代码示例>>Java>>正文


Java Timestamp类代码示例

本文整理汇总了Java中org.apache.kafka.connect.data.Timestamp的典型用法代码示例。如果您正苦于以下问题:Java Timestamp类的具体用法?Java Timestamp怎么用?Java Timestamp使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Timestamp类属于org.apache.kafka.connect.data包,在下文中一共展示了Timestamp类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: timestampToJson

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void timestampToJson() throws IOException {
    GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
    calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
    calendar.add(Calendar.MILLISECOND, 2000000000);
    calendar.add(Calendar.MILLISECOND, 2000000000);
    java.util.Date date = calendar.getTime();

    JsonNode converted = parse(converter.fromConnectData(TOPIC, Timestamp.SCHEMA, date));
    validateEnvelope(converted);
    assertEquals(parse("{ \"type\": \"int64\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1 }"),
            converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
    JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME);
    assertTrue(payload.isLong());
    assertEquals(4000000000L, payload.longValue());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:17,代码来源:JsonConverterTest.java

示例2: alterAddColumns

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void alterAddColumns() {
  final Schema keySchema = SchemaBuilder.struct()
      .field("username", Schema.STRING_SCHEMA)
      .field("companyID", Schema.INT64_SCHEMA)
      .build();
  final Schema valueSchema = SchemaBuilder.struct()
      .field("username", Schema.STRING_SCHEMA)
      .field("companyID", Schema.INT64_SCHEMA)
      .field("firstName", Schema.STRING_SCHEMA)
      .field("lastName", Schema.STRING_SCHEMA)
      .field("created", Timestamp.SCHEMA)
      .field("updated", Timestamp.SCHEMA)
      .build();

  CassandraTableMetadata tableMetadata = mock(CassandraTableMetadata.class);
  when(tableMetadata.columnMetadata("username")).thenReturn(mock(CassandraColumnMetadata.class));
  when(tableMetadata.columnMetadata("companyID")).thenReturn(mock(CassandraColumnMetadata.class));
  when(tableMetadata.columnMetadata("firstName")).thenReturn(mock(CassandraColumnMetadata.class));
  when(tableMetadata.columnMetadata("lastName")).thenReturn(mock(CassandraColumnMetadata.class));
  when(this.builder.session.tableMetadata("foo")).thenReturn(tableMetadata);
  this.builder.build("foo", keySchema, valueSchema);
  verify(this.session, times(1)).executeStatement(any(Alter.Options.class));
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cassandra,代码行数:25,代码来源:ConnectSchemaBuilderTest.java

示例3: createComplexPrimaryKey

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void createComplexPrimaryKey() {
  final Schema keySchema = SchemaBuilder.struct()
      .field("username", Schema.STRING_SCHEMA)
      .field("companyID", Schema.INT64_SCHEMA)
      .build();
  final Schema valueSchema = SchemaBuilder.struct()
      .field("username", Schema.STRING_SCHEMA)
      .field("companyID", Schema.INT64_SCHEMA)
      .field("firstName", Schema.STRING_SCHEMA)
      .field("lastName", Schema.STRING_SCHEMA)
      .field("created", Timestamp.SCHEMA)
      .field("updated", Timestamp.SCHEMA)
      .build();

  this.builder.build("foo", keySchema, valueSchema);
  verify(this.session, times(1)).executeStatement(any(Create.class));
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cassandra,代码行数:19,代码来源:ConnectSchemaBuilderTest.java

示例4: timestampTypeFromSchema

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
/**
 * Determine the type/format of the timestamp based on the schema
 */
private String timestampTypeFromSchema(Schema schema) {
    if (Timestamp.LOGICAL_NAME.equals(schema.name())) {
        return TYPE_TIMESTAMP;
    } else if (org.apache.kafka.connect.data.Date.LOGICAL_NAME.equals(schema.name())) {
        return TYPE_DATE;
    } else if (Time.LOGICAL_NAME.equals(schema.name())) {
        return TYPE_TIME;
    } else if (schema.type().equals(Schema.Type.STRING)) {
        // If not otherwise specified, string == user-specified string format for timestamps
        return TYPE_STRING;
    } else if (schema.type().equals(Schema.Type.INT64)) {
        // If not otherwise specified, long == unix time
        return TYPE_UNIX;
    }
    throw new ConnectException("Schema " + schema + " does not correspond to a known timestamp type format");
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:20,代码来源:TimestampConverter.java

示例5: makeUpdatedSchema

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
private Schema makeUpdatedSchema(Schema schema) {
    final SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());

    for (Field field : schema.fields()) {
        builder.field(field.name(), field.schema());
    }

    if (topicField != null) {
        builder.field(topicField.name, topicField.optional ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA);
    }
    if (partitionField != null) {
        builder.field(partitionField.name, partitionField.optional ? Schema.OPTIONAL_INT32_SCHEMA : Schema.INT32_SCHEMA);
    }
    if (offsetField != null) {
        builder.field(offsetField.name, offsetField.optional ? Schema.OPTIONAL_INT64_SCHEMA : Schema.INT64_SCHEMA);
    }
    if (timestampField != null) {
        builder.field(timestampField.name, timestampField.optional ? OPTIONAL_TIMESTAMP_SCHEMA : Timestamp.SCHEMA);
    }
    if (staticField != null) {
        builder.field(staticField.name, staticField.optional ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA);
    }

    return builder.build();
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:26,代码来源:InsertField.java

示例6: Parser

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
public Parser() {
  this.typeParsers = new HashMap<>();
  registerTypeParser(Schema.BOOLEAN_SCHEMA, new BooleanParser());
  registerTypeParser(Schema.BOOLEAN_SCHEMA, new BooleanParser());
  registerTypeParser(Schema.FLOAT32_SCHEMA, new Float32TypeParser());
  registerTypeParser(Schema.FLOAT64_SCHEMA, new Float64TypeParser());
  registerTypeParser(Schema.INT8_SCHEMA, new Int8TypeParser());
  registerTypeParser(Schema.INT16_SCHEMA, new Int16TypeParser());
  registerTypeParser(Schema.INT32_SCHEMA, new Int32TypeParser());
  registerTypeParser(Schema.INT64_SCHEMA, new Int64TypeParser());
  registerTypeParser(Schema.STRING_SCHEMA, new StringTypeParser());
  registerTypeParser(Decimal.schema(1), new DecimalTypeParser());
  registerTypeParser(Date.SCHEMA, new DateTypeParser());
  registerTypeParser(Time.SCHEMA, new TimeTypeParser());
  registerTypeParser(Timestamp.SCHEMA, new TimestampTypeParser());
}
 
开发者ID:jcustenborder,项目名称:connect-utils,代码行数:17,代码来源:Parser.java

示例7: nullableTests

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void nullableTests() {
  final Schema[] schemas = new Schema[]{
      Schema.OPTIONAL_BOOLEAN_SCHEMA,
      Schema.OPTIONAL_FLOAT32_SCHEMA,
      Schema.OPTIONAL_FLOAT64_SCHEMA,
      Schema.OPTIONAL_INT8_SCHEMA,
      Schema.OPTIONAL_INT16_SCHEMA,
      Schema.OPTIONAL_INT32_SCHEMA,
      Schema.OPTIONAL_INT64_SCHEMA,
      Schema.OPTIONAL_STRING_SCHEMA,
      Decimal.builder(1).optional().build(),
      Timestamp.builder().optional().build(),
      Date.builder().optional().build(),
      Time.builder().optional().build(),
  };

  for (Schema schema : schemas) {
    Object actual = this.parser.parseString(schema, null);
    assertNull(actual);
  }

}
 
开发者ID:jcustenborder,项目名称:connect-utils,代码行数:24,代码来源:StringParserTest.java

示例8: start

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Override
public void start(Map<String, String> settings) {
  this.config = config(settings);

  checkDirectory(SpoolDirSourceConnectorConfig.INPUT_PATH_CONFIG, this.config.inputPath);
  checkDirectory(SpoolDirSourceConnectorConfig.FINISHED_PATH_CONFIG, this.config.finishedPath);
  checkDirectory(SpoolDirSourceConnectorConfig.ERROR_PATH_CONFIG, this.config.errorPath);

  this.parser = new Parser();
  Map<Schema, TypeParser> dateTypeParsers = ImmutableMap.of(
      Timestamp.SCHEMA, new TimestampTypeParser(this.config.parserTimestampTimezone, this.config.parserTimestampDateFormats),
      Date.SCHEMA, new DateTypeParser(this.config.parserTimestampTimezone, this.config.parserTimestampDateFormats),
      Time.SCHEMA, new TimeTypeParser(this.config.parserTimestampTimezone, this.config.parserTimestampDateFormats)
  );

  for (Map.Entry<Schema, TypeParser> kvp : dateTypeParsers.entrySet()) {
    this.parser.registerTypeParser(kvp.getKey(), kvp.getValue());
  }
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-spooldir,代码行数:20,代码来源:SpoolDirSourceTask.java

示例9: inferLogicalMapping

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
private static JsonNode inferLogicalMapping(Schema schema) {
  String schemaName = schema.name();
  Object defaultValue = schema.defaultValue();
  if (schemaName == null) {
    return null;
  }

  switch (schemaName) {
    case Date.LOGICAL_NAME:
    case Time.LOGICAL_NAME:
    case Timestamp.LOGICAL_NAME:
      return inferPrimitive(ElasticsearchSinkConnectorConstants.DATE_TYPE, defaultValue);
    case Decimal.LOGICAL_NAME:
      return inferPrimitive(ElasticsearchSinkConnectorConstants.DOUBLE_TYPE, defaultValue);
    default:
      // User-defined type or unknown built-in
      return null;
  }
}
 
开发者ID:confluentinc,项目名称:kafka-connect-elasticsearch,代码行数:20,代码来源:Mapping.java

示例10: createSchema

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
protected Schema createSchema() {
  Schema structSchema = createInnerSchema();
  return SchemaBuilder.struct().name("record")
      .field("boolean", Schema.BOOLEAN_SCHEMA)
      .field("bytes", Schema.BYTES_SCHEMA)
      .field("int8", Schema.INT8_SCHEMA)
      .field("int16", Schema.INT16_SCHEMA)
      .field("int32", Schema.INT32_SCHEMA)
      .field("int64", Schema.INT64_SCHEMA)
      .field("float32", Schema.FLOAT32_SCHEMA)
      .field("float64", Schema.FLOAT64_SCHEMA)
      .field("string", Schema.STRING_SCHEMA)
      .field("array", SchemaBuilder.array(Schema.STRING_SCHEMA).build())
      .field("map", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA).build())
      .field("struct", structSchema)
      .field("decimal", Decimal.schema(2))
      .field("date", Date.SCHEMA)
      .field("time", Time.SCHEMA)
      .field("timestamp", Timestamp.SCHEMA)
      .build();
}
 
开发者ID:confluentinc,项目名称:kafka-connect-elasticsearch,代码行数:22,代码来源:MappingTest.java

示例11: createInnerSchema

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
private Schema createInnerSchema() {
  return SchemaBuilder.struct().name("inner")
      .field("boolean", Schema.BOOLEAN_SCHEMA)
      .field("bytes", Schema.BYTES_SCHEMA)
      .field("int8", Schema.INT8_SCHEMA)
      .field("int16", Schema.INT16_SCHEMA)
      .field("int32", Schema.INT32_SCHEMA)
      .field("int64", Schema.INT64_SCHEMA)
      .field("float32", Schema.FLOAT32_SCHEMA)
      .field("float64", Schema.FLOAT64_SCHEMA)
      .field("string", Schema.STRING_SCHEMA)
      .field("array", SchemaBuilder.array(Schema.STRING_SCHEMA).build())
      .field("map", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA).build())
      .field("decimal", Decimal.schema(2))
      .field("date", Date.SCHEMA)
      .field("time", Time.SCHEMA)
      .field("timestamp", Timestamp.SCHEMA)
      .build();
}
 
开发者ID:confluentinc,项目名称:kafka-connect-elasticsearch,代码行数:20,代码来源:MappingTest.java

示例12: testTimestamp

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void testTimestamp() {
  final String fieldName = "Timestamp";

  com.google.cloud.bigquery.Schema bigQueryExpectedSchema =
      com.google.cloud.bigquery.Schema.of(
          com.google.cloud.bigquery.Field.newBuilder(
              fieldName,
              com.google.cloud.bigquery.Field.Type.timestamp()
          ).setMode(
              com.google.cloud.bigquery.Field.Mode.REQUIRED
          ).build()
      );

  Schema kafkaConnectTestSchema = SchemaBuilder
      .struct()
      .field(fieldName, Timestamp.SCHEMA)
      .build();

  com.google.cloud.bigquery.Schema bigQueryTestSchema =
      new BigQuerySchemaConverter().convertSchema(kafkaConnectTestSchema);
  assertEquals(bigQueryExpectedSchema, bigQueryTestSchema);
}
 
开发者ID:wepay,项目名称:kafka-connect-bigquery,代码行数:24,代码来源:BigQuerySchemaConverterTest.java

示例13: testWithSchemaIdentity

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void testWithSchemaIdentity() {
    TimestampConverter<SourceRecord> xform = new TimestampConverter.Value<>();
    xform.configure(Collections.singletonMap(TimestampConverter.TARGET_TYPE_CONFIG, "Timestamp"));
    SourceRecord transformed = xform.apply(new SourceRecord(null, null, "topic", 0, Timestamp.SCHEMA, DATE_PLUS_TIME.getTime()));

    assertEquals(Timestamp.SCHEMA, transformed.valueSchema());
    assertEquals(DATE_PLUS_TIME.getTime(), transformed.value());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:10,代码来源:TimestampConverterTest.java

示例14: testWithSchemaTimestampToDate

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void testWithSchemaTimestampToDate() {
    TimestampConverter<SourceRecord> xform = new TimestampConverter.Value<>();
    xform.configure(Collections.singletonMap(TimestampConverter.TARGET_TYPE_CONFIG, "Date"));
    SourceRecord transformed = xform.apply(new SourceRecord(null, null, "topic", 0, Timestamp.SCHEMA, DATE_PLUS_TIME.getTime()));

    assertEquals(Date.SCHEMA, transformed.valueSchema());
    assertEquals(DATE.getTime(), transformed.value());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:10,代码来源:TimestampConverterTest.java

示例15: testWithSchemaTimestampToTime

import org.apache.kafka.connect.data.Timestamp; //导入依赖的package包/类
@Test
public void testWithSchemaTimestampToTime() {
    TimestampConverter<SourceRecord> xform = new TimestampConverter.Value<>();
    xform.configure(Collections.singletonMap(TimestampConverter.TARGET_TYPE_CONFIG, "Time"));
    SourceRecord transformed = xform.apply(new SourceRecord(null, null, "topic", 0, Timestamp.SCHEMA, DATE_PLUS_TIME.getTime()));

    assertEquals(Time.SCHEMA, transformed.valueSchema());
    assertEquals(TIME.getTime(), transformed.value());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:10,代码来源:TimestampConverterTest.java


注:本文中的org.apache.kafka.connect.data.Timestamp类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。