当前位置: 首页>>代码示例>>Java>>正文


Java JsonCodec.jsonCodec方法代码示例

本文整理汇总了Java中io.airlift.json.JsonCodec.jsonCodec方法的典型用法代码示例。如果您正苦于以下问题:Java JsonCodec.jsonCodec方法的具体用法?Java JsonCodec.jsonCodec怎么用?Java JsonCodec.jsonCodec使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在io.airlift.json.JsonCodec的用法示例。


在下文中一共展示了JsonCodec.jsonCodec方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: assertJsonRoundTrip

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
private void assertJsonRoundTrip(DataSize dataSize)
        throws IOException
{
    JsonCodec<DataSize> dataSizeCodec = JsonCodec.jsonCodec(DataSize.class);
    String json = dataSizeCodec.toJson(dataSize);
    DataSize dataSizeCopy = dataSizeCodec.fromJson(json);
    double delta = dataSize.toBytes() * 0.01;
    Assert.assertEquals(dataSize.toBytes(), dataSizeCopy.toBytes(), delta);
}
 
开发者ID:airlift,项目名称:units,代码行数:10,代码来源:TestDataSize.java

示例2: assertJsonRoundTrip

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
private void assertJsonRoundTrip(Duration duration)
        throws IOException
{
    JsonCodec<Duration> durationCodec = JsonCodec.jsonCodec(Duration.class);
    String json = durationCodec.toJson(duration);
    Duration durationCopy = durationCodec.fromJson(json);
    double delta = duration.getValue(MILLISECONDS) * 0.01;
    assertEquals(duration.getValue(MILLISECONDS), durationCopy.getValue(MILLISECONDS), delta);
}
 
开发者ID:airlift,项目名称:units,代码行数:10,代码来源:TestDuration.java

示例3: testJson

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Test
public void testJson()
{
    JsonCodec<PipelineStats> codec = JsonCodec.jsonCodec(PipelineStats.class);

    String json = codec.toJson(EXPECTED);
    PipelineStats actual = codec.fromJson(json);

    assertExpectedPipelineStats(actual);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:11,代码来源:TestPipelineStats.java

示例4: testJson

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Test
public void testJson()
{
    JsonCodec<OperatorStats> codec = JsonCodec.jsonCodec(OperatorStats.class);

    String json = codec.toJson(EXPECTED);
    OperatorStats actual = codec.fromJson(json);

    assertExpectedOperatorStats(actual);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:11,代码来源:TestOperatorStats.java

示例5: testJson

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Test
public void testJson()
{
    JsonCodec<TaskStats> codec = JsonCodec.jsonCodec(TaskStats.class);

    String json = codec.toJson(EXPECTED);
    TaskStats actual = codec.fromJson(json);

    assertExpectedTaskStats(actual);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:11,代码来源:TestTaskStats.java

示例6: testJson

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Test
public void testJson()
{
    JsonCodec<DriverStats> codec = JsonCodec.jsonCodec(DriverStats.class);

    String json = codec.toJson(EXPECTED);
    DriverStats actual = codec.fromJson(json);

    assertExpectedDriverStats(actual);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:11,代码来源:TestDriverStats.java

示例7: testJson

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Test
public void testJson()
{
    JsonCodec<StageStats> codec = JsonCodec.jsonCodec(StageStats.class);

    String json = codec.toJson(EXPECTED);
    StageStats actual = codec.fromJson(json);

    assertExpectedStageStats(actual);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:11,代码来源:TestStageStats.java

示例8: testJson

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Test
public void testJson()
{
    JsonCodec<QueryStats> codec = JsonCodec.jsonCodec(QueryStats.class);

    String json = codec.toJson(EXPECTED);
    QueryStats actual = codec.fromJson(json);

    assertExpectedQueryStats(actual);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:11,代码来源:TestQueryStats.java

示例9: PrestoEventStream

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
@Inject
public PrestoEventStream(@ForStreamer HttpClient httpClient, AWSKinesisModule.PrestoStreamConfig config, PrestoConfig prestoConfig) {
    this.httpClient = httpClient;
    this.streamingPort = config.getPort();
    this.prestoAddress = prestoConfig.getAddress();
    this.queryCodec = JsonCodec.jsonCodec(StreamQuery.class);
}
 
开发者ID:rakam-io,项目名称:rakam,代码行数:8,代码来源:PrestoEventStream.java

示例10: setup

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
protected void setup(String host, int port, String databaseName, String awsAccessKey, String awsSecretKey, String writableBucket)
{
    this.writableBucket = writableBucket;

    setupHive(databaseName);

    HiveClientConfig hiveClientConfig = new HiveClientConfig()
            .setS3AwsAccessKey(awsAccessKey)
            .setS3AwsSecretKey(awsSecretKey);

    String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
    if (proxy != null) {
        hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
    }

    HiveConnectorId connectorId = new HiveConnectorId("hive-test");
    HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port);
    ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("hive-s3-%s"));
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig));
    HivePartitionManager hivePartitionManager = new HivePartitionManager(connectorId, hiveClientConfig);

    hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig);
    metastoreClient = new TestingHiveMetastore(hiveCluster, executor, hiveClientConfig, writableBucket, hdfsEnvironment);
    locationService = new HiveLocationService(metastoreClient, hdfsEnvironment);
    TypeRegistry typeManager = new TypeRegistry();
    JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class);
    metadata = new HiveMetadata(
            connectorId,
            hiveClientConfig,
            metastoreClient,
            hdfsEnvironment,
            hivePartitionManager,
            newDirectExecutorService(),
            typeManager,
            locationService,
            partitionUpdateCodec);
    splitManager = new HiveSplitManager(
            connectorId,
            hiveClientConfig,
            metastoreClient,
            new NamenodeStats(),
            hdfsEnvironment,
            new HadoopDirectoryLister(),
            executor);
    pageSinkProvider = new HivePageSinkProvider(hdfsEnvironment, metastoreClient, new GroupByHashPageIndexerFactory(), typeManager, new HiveClientConfig(), locationService, partitionUpdateCodec);
    pageSourceProvider = new HivePageSourceProvider(hiveClientConfig, hdfsEnvironment, DEFAULT_HIVE_RECORD_CURSOR_PROVIDER, DEFAULT_HIVE_DATA_STREAM_FACTORIES, TYPE_MANAGER);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:48,代码来源:AbstractTestHiveClientS3.java

示例11: setup

import io.airlift.json.JsonCodec; //导入方法依赖的package包/类
protected final void setup(String host, int port, String databaseName, String timeZoneId, String connectorName, int maxOutstandingSplits, int maxThreads)
{
    setupHive(connectorName, databaseName, timeZoneId);

    HiveClientConfig hiveClientConfig = new HiveClientConfig();
    hiveClientConfig.setTimeZone(timeZoneId);
    String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
    if (proxy != null) {
        hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
    }

    HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port);
    metastoreClient = new CachingHiveMetastore(hiveCluster, executor, Duration.valueOf("1m"), Duration.valueOf("15s"));
    HiveConnectorId connectorId = new HiveConnectorId(connectorName);
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig));

    hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig);
    locationService = new HiveLocationService(metastoreClient, hdfsEnvironment);
    TypeManager typeManager = new TypeRegistry();
    JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class);
    metadata = new HiveMetadata(
            connectorId,
            metastoreClient,
            hdfsEnvironment,
            new HivePartitionManager(connectorId, hiveClientConfig),
            timeZone,
            10,
            true,
            true,
            true,
            true,
            true,
            typeManager,
            locationService,
            partitionUpdateCodec,
            newFixedThreadPool(2));
    splitManager = new HiveSplitManager(
            connectorId,
            metastoreClient,
            new NamenodeStats(),
            hdfsEnvironment,
            new HadoopDirectoryLister(),
            newDirectExecutorService(),
            maxOutstandingSplits,
            hiveClientConfig.getMinPartitionBatchSize(),
            hiveClientConfig.getMaxPartitionBatchSize(),
            hiveClientConfig.getMaxSplitSize(),
            hiveClientConfig.getMaxInitialSplitSize(),
            hiveClientConfig.getMaxInitialSplits(),
            false
    );
    pageSinkProvider = new HivePageSinkProvider(hdfsEnvironment, metastoreClient, new GroupByHashPageIndexerFactory(), typeManager, new HiveClientConfig(), locationService, partitionUpdateCodec);
    pageSourceProvider = new HivePageSourceProvider(hiveClientConfig, hdfsEnvironment, DEFAULT_HIVE_RECORD_CURSOR_PROVIDER, DEFAULT_HIVE_DATA_STREAM_FACTORIES, TYPE_MANAGER);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:55,代码来源:AbstractTestHiveClient.java


注:本文中的io.airlift.json.JsonCodec.jsonCodec方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。