当前位置: 首页>>代码示例>>Java>>正文


Java TFileImpl类代码示例

本文整理汇总了Java中com.datatorrent.contrib.hdht.tfile.TFileImpl的典型用法代码示例。如果您正苦于以下问题:Java TFileImpl类的具体用法?Java TFileImpl怎么用?Java TFileImpl使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


TFileImpl类属于com.datatorrent.contrib.hdht.tfile包,在下文中一共展示了TFileImpl类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setupStore

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
public TestStoreHDHT setupStore(TestInfo testMeta)
{
  String eventSchemaString = SchemaUtils.jarResourceFileToString(configureFile);

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  store = new TestStoreHDHT();

  store.setCacheWindowDuration(2);
  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));

  return store;
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:21,代码来源:CompositeDimensionComputationTester.java

示例2: serializationTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void serializationTest() throws Exception
{
  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");
  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
  KryoCloneUtils.cloneObject(new Kryo(), store);

  store.beginWindow(0L);
  store.endWindow();
  store.teardown();
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:23,代码来源:AppDataSingleSchemaDimensionStoreHDHTTest.java

示例3: createStore

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
protected AppDataSingleSchemaDimensionStoreHDHT createStore(DAG dag, Configuration conf,  String eventSchema)
{
  AppDataSingleSchemaDimensionStoreHDHT store = dag.addOperator("Store", ProcessTimeAwareStore.class);
  store.setUpdateEnumValues(true);
  String basePath = Preconditions.checkNotNull(conf.get(PROP_STORE_PATH),
        "base path should be specified in the properties.xml");
  TFileImpl hdsFile = new TFileImpl.DTFileImpl();
  basePath += System.currentTimeMillis();
  hdsFile.setBasePath(basePath);

  store.setFileStore(hdsFile);
  dag.setAttribute(store, Context.OperatorContext.COUNTERS_AGGREGATOR,
      new BasicCounters.LongAggregator<MutableLong>());
  store.setConfigurationSchemaJSON(eventSchema);
  store.setPartitionCount(storePartitionCount);
  if(storePartitionCount > 1)
  {
    store.setPartitionCount(storePartitionCount);
    store.setQueryResultUnifier(new DimensionStoreHDHTNonEmptyQueryResultUnifier());
  }
  return store;
}
 
开发者ID:yahoo,项目名称:streaming-benchmarks,代码行数:23,代码来源:ApplicationDimensionComputation.java

示例4: storeFormatTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void storeFormatTest() throws Exception
{
  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  long windowId = 0L;
  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
  store.beginWindow(windowId);
  store.endWindow();
  store.checkpointed(windowId);
  store.committed(windowId);

  windowId++;
  store.beginWindow(windowId);

  byte[] storeFormat = store.load(AppDataSingleSchemaDimensionStoreHDHT.DEFAULT_BUCKET_ID,
                                  DimensionsStoreHDHT.STORE_FORMAT_KEY);
  Assert.assertEquals(DimensionsStoreHDHT.STORE_FORMAT_VERSION, GPOUtils.deserializeInt(storeFormat));
  store.endWindow();
  store.teardown();
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:33,代码来源:AppDataSingleSchemaDimensionStoreHDHTTest.java

示例5: testTFile

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
private void testTFile(String compression) throws IOException
{

  TFileImpl timpl = new TFileImpl.DefaultTFileImpl();
  timpl.setCompressName(compression);
  writeFile(0, timpl, "TFileUnit" + compression);
  testSeqRead(0, timpl, "TFileUnit" + compression);
  testRandomRead(0, timpl, "TFileUnit" + compression);

}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:11,代码来源:HDHTFileAccessTest.java

示例6: testDTFile

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
private void testDTFile(String compression) throws IOException
{

  TFileImpl timpl = new TFileImpl.DTFileImpl();
  timpl.setCompressName(compression);
  writeFile(0, timpl, "TFileUnit" + compression);
  testSeqRead(0, timpl, "TFileUnit" + compression);
  testRandomRead(0, timpl, "TFileUnit" + compression);

}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:11,代码来源:HDHTFileAccessTest.java

示例7: testDefaultTFileHDSFileAccess

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void testDefaultTFileHDSFileAccess() throws Exception
{
  //Create DefaultTFileImpl
  TFileImpl timpl = new TFileImpl.DefaultTFileImpl();
  testHDSFileAccess(timpl);
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:8,代码来源:HDHTWriterTest.java

示例8: testDTFileHDSFileAccess

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void testDTFileHDSFileAccess() throws Exception
{
  //Create DefaultTFileImpl
  TFileImpl timpl = new TFileImpl.DTFileImpl();
  testHDSFileAccess(timpl);
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:8,代码来源:HDHTWriterTest.java

示例9: simpleQueueManagerTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void simpleQueueManagerTest() throws Exception
{
  final int numQueries = 3;

  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setCacheWindowDuration(2);
  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));

  DimensionalConfigurationSchema eventSchema = store.configurationSchema;
  DimensionsQueueManager dqm = new DimensionsQueueManager(store, store.schemaRegistry);

  Map<String, Set<String>> fieldToAggregator = Maps.newHashMap();
  fieldToAggregator.put("impressions", Sets.newHashSet("SUM"));
  fieldToAggregator.put("cost", Sets.newHashSet("SUM"));

  FieldsAggregatable fieldsAggregatable = new FieldsAggregatable(fieldToAggregator);

  GPOMutable key = AppDataSingleSchemaDimensionStoreHDHTTest.createQueryKey(eventSchema,
      "google",
      "safeway");

  DataQueryDimensional dqd = new DataQueryDimensional("1",
      DataQueryDimensional.TYPE,
      numQueries,
      TimeBucket.MINUTE,
      key,
      fieldsAggregatable,
      true);

  LOG.debug("{}", dqd.getDimensionsDescriptor());
  LOG.debug("{}", ((DimensionalSchema)store.schemaRegistry.getSchema(dqd.getSchemaKeys()))
      .getDimensionalConfigurationSchema().getDimensionsDescriptorToID());

  dqm.enqueue(dqd, null, null);

  Assert.assertEquals(numQueries, store.getQueries().size());
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:51,代码来源:DimensionsQueueManagerTest.java

示例10: simpleQueueManagerTestCustomTimeBucket

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void simpleQueueManagerTestCustomTimeBucket() throws Exception
{
  final int numQueries = 3;

  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchemaCustomTimeBucket.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setCacheWindowDuration(2);
  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);
  store.setUseSystemTimeForLatestTimeBuckets(false);
  store.setMinTimestamp(600000L);
  store.setMaxTimestamp(1000000L);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));

  DimensionalConfigurationSchema eventSchema = store.configurationSchema;
  DimensionsQueueManager dqm = new DimensionsQueueManager(store, store.schemaRegistry);

  Map<String, Set<String>> fieldToAggregator = Maps.newHashMap();
  fieldToAggregator.put("impressions", Sets.newHashSet("SUM"));
  fieldToAggregator.put("cost", Sets.newHashSet("SUM"));

  FieldsAggregatable fieldsAggregatable = new FieldsAggregatable(fieldToAggregator);

  GPOMutable key = AppDataSingleSchemaDimensionStoreHDHTTest.createQueryKey(eventSchema,
      "google",
      "safeway");

  DataQueryDimensional dqd = new DataQueryDimensional("1",
      DataQueryDimensional.TYPE,
      numQueries,
      TimeBucket.MINUTE,
      key,
      fieldsAggregatable,
      true);

  LOG.debug("{}", dqd.getDimensionsDescriptor());
  LOG.debug("{}", ((DimensionalSchema)store.schemaRegistry.getSchema(dqd.getSchemaKeys()))
      .getDimensionalConfigurationSchema().getDimensionsDescriptorToID());

  dqm.enqueue(dqd, null, null);

  QueryBundle<DataQueryDimensional, QueryMeta, MutableLong> qb = dqm.dequeue();

  for (Map<String, EventKey> eventKeys : qb.getMetaQuery().getEventKeys()) {
    Assert.assertEquals(0, eventKeys.get("SUM").getDimensionDescriptorID());
  }

  Assert.assertEquals(numQueries, store.getQueries().size());
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:60,代码来源:DimensionsQueueManagerTest.java

示例11: simpleRollingQueueManagerTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void simpleRollingQueueManagerTest() throws Exception
{
  final int numQueries = 3;
  final int rollingCount = 5;
  final int hdhtQueryCount = 7;

  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setCacheWindowDuration(2);
  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));

  DimensionalConfigurationSchema eventSchema = store.configurationSchema;
  DimensionsQueueManager dqm = new DimensionsQueueManager(store, store.schemaRegistry);

  Map<String, Set<String>> fieldToAggregator = Maps.newHashMap();
  fieldToAggregator.put("impressions", Sets.newHashSet("SUM"));
  fieldToAggregator.put("cost", Sets.newHashSet("SUM"));

  FieldsAggregatable fieldsAggregatable = new FieldsAggregatable(fieldToAggregator);

  GPOMutable key = AppDataSingleSchemaDimensionStoreHDHTTest.createQueryKey(eventSchema,
      "google",
      "safeway");

  DataQueryDimensional dqd = new DataQueryDimensional("1",
      DataQueryDimensional.TYPE,
      numQueries,
      TimeBucket.MINUTE,
      key,
      fieldsAggregatable,
      true);
  dqd.setSlidingAggregateSize(rollingCount);

  dqm.enqueue(dqd, null, null);

  Assert.assertEquals(hdhtQueryCount, store.getQueries().size());
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:50,代码来源:DimensionsQueueManagerTest.java

示例12: queryStarQueueManagerTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void queryStarQueueManagerTest() throws Exception
{
  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setCacheWindowDuration(2);
  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));

  DimensionalConfigurationSchema eventSchema = store.configurationSchema;
  @SuppressWarnings("unchecked")
  DimensionsQueueManager dqm = new DimensionsQueueManager(store, store.schemaRegistry,
      new SimpleDataQueryDimensionalExpander((Map)store.seenEnumValues));

  Map<String, Set<String>> fieldToAggregator = Maps.newHashMap();
  fieldToAggregator.put("impressions", Sets.newHashSet("SUM"));
  fieldToAggregator.put("cost", Sets.newHashSet("SUM"));

  FieldsAggregatable fieldsAggregatable = new FieldsAggregatable(fieldToAggregator);

  GPOMutable key = AppDataSingleSchemaDimensionStoreHDHTTest.createQueryKey(eventSchema,
      "google",
      "safeway");
  Map<String, Set<Object>> keyToValues = Maps.newHashMap();
  keyToValues.put("publisher", Sets.newHashSet());
  keyToValues.put("advertiser", Sets.newHashSet());

  DataQueryDimensional dqd = new DataQueryDimensional("1",
      DataQueryDimensional.TYPE,
      1,
      new CustomTimeBucket(TimeBucket.MINUTE),
      key.getFieldDescriptor(),
      keyToValues,
      fieldsAggregatable,
      true);

  dqm.enqueue(dqd, null, null);

  Assert.assertEquals(9, store.getQueries().size());
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:51,代码来源:DimensionsQueueManagerTest.java

示例13: storeWindowIDTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void storeWindowIDTest()
{
  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  long windowId = 0L;
  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
  store.beginWindow(windowId);
  byte[] windowIDBytes = store.load(AppDataSingleSchemaDimensionStoreHDHT.DEFAULT_BUCKET_ID,
                                    DimensionsStoreHDHT.WINDOW_ID_KEY);
  Assert.assertArrayEquals(null, windowIDBytes);
  store.endWindow();
  store.checkpointed(windowId);
  store.committed(windowId);

  for (int windowCounter = 0;
      windowCounter < 2;
      windowCounter++) {
    windowId++;
    store.beginWindow(windowId);
    windowIDBytes = store.load(AppDataSingleSchemaDimensionStoreHDHT.DEFAULT_BUCKET_ID,
                               DimensionsStoreHDHT.WINDOW_ID_KEY);
    Assert.assertEquals(windowId - 1L, GPOUtils.deserializeLong(windowIDBytes));
    store.endWindow();
    store.checkpointed(windowId);
    store.committed(windowId);
  }

  store.teardown();
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:42,代码来源:AppDataSingleSchemaDimensionStoreHDHTTest.java

示例14: dataSerializationTest

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
@Test
public void dataSerializationTest()
{
  final String publisher = "google";
  final String advertiser = "safeway";

  final long impressions = 10L;
  final double cost = 1.0;

  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);

  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));

  //Aggregate Event
  DimensionalConfigurationSchema eventSchema = store.configurationSchema;
  Aggregate ae = createEvent(eventSchema,
      publisher,
      advertiser,
      60000L,
      TimeBucket.MINUTE,
      impressions,
      cost);

  //Key bytes
  byte[] keyBytes = store.getKeyBytesGAE(ae);
  //Value bytes
  byte[] valueBytes = store.getValueBytesGAE(ae);

  Aggregate deserializedAE = store.fromKeyValueGAE(new Slice(keyBytes), valueBytes);
  deserializedAE.getEventKey().getKey().setFieldDescriptor(ae.getEventKey().getKey().getFieldDescriptor());
  deserializedAE.getAggregates().setFieldDescriptor(ae.getAggregates().getFieldDescriptor());

  Assert.assertEquals("Test aggregates", ae.getAggregates(), deserializedAE.getAggregates());
  Assert.assertEquals("event keys must be equal", ae.getEventKey(), deserializedAE.getEventKey());

  store.beginWindow(0L);
  store.endWindow();
  store.teardown();
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:49,代码来源:AppDataSingleSchemaDimensionStoreHDHTTest.java

示例15: readTestHelper

import com.datatorrent.contrib.hdht.tfile.TFileImpl; //导入依赖的package包/类
private void readTestHelper(boolean useHDHTPut) throws Exception
{
  final String publisher = "google";
  final String advertiser = "safeway";

  final long impressions = 10L;
  final double cost = 1.0;

  String eventSchemaString = SchemaUtils.jarResourceFileToString("dimensionsTestSchema.json");

  String basePath = testMeta.getDir();
  TFileImpl hdsFile = new TFileImpl.DefaultTFileImpl();
  hdsFile.setBasePath(basePath);

  AppDataSingleSchemaDimensionStoreHDHT store = new AppDataSingleSchemaDimensionStoreHDHT();

  store.setConfigurationSchemaJSON(eventSchemaString);
  store.setFileStore(hdsFile);
  store.setFlushIntervalCount(1);
  store.setFlushSize(0);

  long windowId = 0;
  store.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
  //STARTING WINDOW 0
  store.beginWindow(windowId);

  DimensionalConfigurationSchema eventSchema = store.configurationSchema;

  //Aggregate Event
  Aggregate ae = createEvent(eventSchema,
      publisher,
      advertiser,
      60000L,
      TimeBucket.MINUTE,
      impressions,
      cost);

  if (!useHDHTPut) {
    store.input.put(ae);
    Assert.assertEquals("The item must be in the cache.", ae, store.cache.get(ae.getEventKey()));
  } else {
    store.put(AppDataSingleSchemaDimensionStoreHDHT.DEFAULT_BUCKET_ID,
        new Slice(store.getKeyBytesGAE(ae)),
        store.getValueBytesGAE(ae));
    Assert.assertEquals("The item must be in the cache.", ae, store.load(ae.getEventKey()));
  }

  store.endWindow();
  store.checkpointed(windowId);
  store.committed(windowId);
  //STARTING WINDOW 1
  windowId++;
  store.beginWindow(windowId);
  store.endWindow();
  store.checkpointed(windowId);
  store.committed(windowId);
  //STARTING WINDOW 2
  windowId++;
  store.beginWindow(windowId);

  byte[] keyBytes = store.getKeyBytesGAE(ae);
  byte[] valueBytes =
      store.getUncommitted(AppDataSingleSchemaDimensionStoreHDHT.DEFAULT_BUCKET_ID, new Slice(keyBytes));

  if (valueBytes == null) {
    valueBytes = store.get(AppDataSingleSchemaDimensionStoreHDHT.DEFAULT_BUCKET_ID, new Slice(keyBytes));
  }

  LOG.debug("value bytes size {}", valueBytes.length);

  Aggregate aeDeserialized = store.fromKeyValueGAE(new Slice(keyBytes), valueBytes);

  aeDeserialized.getKeys().setFieldDescriptor(ae.getKeys().getFieldDescriptor());
  aeDeserialized.getAggregates().setFieldDescriptor(ae.getAggregates().getFieldDescriptor());
  Assert.assertEquals("The values must be equal", ae, aeDeserialized);

  store.endWindow();
  store.teardown();
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:80,代码来源:AppDataSingleSchemaDimensionStoreHDHTTest.java


注:本文中的com.datatorrent.contrib.hdht.tfile.TFileImpl类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。