当前位置: 首页>>代码示例>>Java>>正文


Java BoundedReader.close方法代码示例

本文整理汇总了Java中org.apache.beam.sdk.io.BoundedSource.BoundedReader.close方法的典型用法代码示例。如果您正苦于以下问题:Java BoundedReader.close方法的具体用法?Java BoundedReader.close怎么用?Java BoundedReader.close使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.beam.sdk.io.BoundedSource.BoundedReader的用法示例。


在下文中一共展示了BoundedReader.close方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testReadersStartWhenZeroRecords

import org.apache.beam.sdk.io.BoundedSource.BoundedReader; //导入方法依赖的package包/类
/**
 * This test validates behavior of
 * {@link HadoopInputFormatBoundedSource.HadoopInputFormatReader#start() start()} method if
 * InputFormat's {@link InputFormat#getSplits() getSplits()} returns InputSplitList having zero
 * records.
 */
@Test
public void testReadersStartWhenZeroRecords() throws Exception {

  InputFormat mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
  EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
  Mockito.when(
      mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
          Mockito.any(TaskAttemptContext.class))).thenReturn(mockReader);
  Mockito.when(mockReader.nextKeyValue()).thenReturn(false);
  InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
  HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
      new HadoopInputFormatBoundedSource<Text, Employee>(
          serConf,
          WritableCoder.of(Text.class),
          AvroCoder.of(Employee.class),
          null, // No key translation required.
          null, // No value translation required.
          new SerializableSplit(mockInputSplit));
  boundedSource.setInputFormatObj(mockInputFormat);
  BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
  assertEquals(false, reader.start());
  assertEquals(Double.valueOf(1), reader.getFractionConsumed());
  reader.close();
}
 
开发者ID:apache,项目名称:beam,代码行数:31,代码来源:HadoopInputFormatIOTest.java

示例2: testGetFractionConsumedForBadProgressValue

import org.apache.beam.sdk.io.BoundedSource.BoundedReader; //导入方法依赖的package包/类
/**
 * This test validates the method getFractionConsumed()- when a bad progress value is returned by
 * the inputformat.
 */
@Test
public void testGetFractionConsumedForBadProgressValue() throws Exception {
  InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
  EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
  Mockito.when(
      mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
          Mockito.any(TaskAttemptContext.class))).thenReturn(mockReader);
  Mockito.when(mockReader.nextKeyValue()).thenReturn(true);
  // Set to a bad value , not in range of 0 to 1
  Mockito.when(mockReader.getProgress()).thenReturn(2.0F);
  InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
  HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
      new HadoopInputFormatBoundedSource<Text, Employee>(
          serConf,
          WritableCoder.of(Text.class),
          AvroCoder.of(Employee.class),
          null, // No key translation required.
          null, // No value translation required.
          new SerializableSplit(mockInputSplit));
  boundedSource.setInputFormatObj(mockInputFormat);
  BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
  assertEquals(Double.valueOf(0), reader.getFractionConsumed());
  boolean start = reader.start();
  assertEquals(true, start);
  if (start) {
    boolean advance = reader.advance();
    assertEquals(null, reader.getFractionConsumed());
    assertEquals(true, advance);
    if (advance) {
      advance = reader.advance();
      assertEquals(null, reader.getFractionConsumed());
    }
  }
  // Validate if getFractionConsumed() returns null after few number of reads as getProgress
  // returns invalid value '2' which is not in the range of 0 to 1.
  assertEquals(null, reader.getFractionConsumed());
  reader.close();
}
 
开发者ID:apache,项目名称:beam,代码行数:43,代码来源:HadoopInputFormatIOTest.java

示例3: testGetSplitPointsConsumed

import org.apache.beam.sdk.io.BoundedSource.BoundedReader; //导入方法依赖的package包/类
@Test
public void testGetSplitPointsConsumed() throws Exception {
  final String table = "TEST-TABLE";
  final int numRows = 100;
  int splitPointsConsumed = 0;

  makeTableData(table, numRows);

  BigtableSource source =
      new BigtableSource(serviceFactory, table, null, Arrays.asList(ByteKeyRange.ALL_KEYS), null);

  BoundedReader<Row> reader = source.createReader(TestPipeline.testingPipelineOptions());

  reader.start();
  // Started, 0 split points consumed
  assertEquals("splitPointsConsumed starting",
      splitPointsConsumed, reader.getSplitPointsConsumed());

  // Split points consumed increases for each row read
  while (reader.advance()) {
    assertEquals("splitPointsConsumed advancing",
        ++splitPointsConsumed, reader.getSplitPointsConsumed());
  }

  // Reader marked as done, 100 split points consumed
  assertEquals("splitPointsConsumed done", numRows, reader.getSplitPointsConsumed());

  reader.close();
}
 
开发者ID:apache,项目名称:beam,代码行数:30,代码来源:BigtableIOTest.java

示例4: testReadersGetFractionConsumed

import org.apache.beam.sdk.io.BoundedSource.BoundedReader; //导入方法依赖的package包/类
/**
 * This test validates the method getFractionConsumed()- which indicates the progress of the read
 * in range of 0 to 1.
 */
@Test
public void testReadersGetFractionConsumed() throws Exception {
  List<KV<Text, Employee>> referenceRecords = TestEmployeeDataSet.getEmployeeData();
  HadoopInputFormatBoundedSource<Text, Employee> hifSource = getTestHIFSource(
      EmployeeInputFormat.class,
      Text.class,
      Employee.class,
      WritableCoder.of(Text.class),
      AvroCoder.of(Employee.class));
  long estimatedSize = hifSource.getEstimatedSizeBytes(p.getOptions());
  // Validate if estimated size is equal to the size of records.
  assertEquals(referenceRecords.size(), estimatedSize);
  List<BoundedSource<KV<Text, Employee>>> boundedSourceList =
      hifSource.split(0, p.getOptions());
  // Validate if split() has split correctly.
  assertEquals(TestEmployeeDataSet.NUMBER_OF_SPLITS, boundedSourceList.size());
  List<KV<Text, Employee>> bundleRecords = new ArrayList<>();
  for (BoundedSource<KV<Text, Employee>> source : boundedSourceList) {
    List<KV<Text, Employee>> elements = new ArrayList<KV<Text, Employee>>();
    BoundedReader<KV<Text, Employee>> reader = source.createReader(p.getOptions());
    float recordsRead = 0;
    // When start is not called, getFractionConsumed() should return 0.
    assertEquals(Double.valueOf(0), reader.getFractionConsumed());
    boolean start = reader.start();
    assertEquals(true, start);
    if (start) {
      elements.add(reader.getCurrent());
      boolean advance = reader.advance();
      // Validate if getFractionConsumed() returns the correct fraction based on
      // the number of records read in the split.
      assertEquals(
          Double.valueOf(++recordsRead / TestEmployeeDataSet.NUMBER_OF_RECORDS_IN_EACH_SPLIT),
          reader.getFractionConsumed());
      assertEquals(true, advance);
      while (advance) {
        elements.add(reader.getCurrent());
        advance = reader.advance();
        assertEquals(
            Double.valueOf(++recordsRead / TestEmployeeDataSet.NUMBER_OF_RECORDS_IN_EACH_SPLIT),
            reader.getFractionConsumed());
      }
      bundleRecords.addAll(elements);
    }
    // Validate if getFractionConsumed() returns 1 after reading is complete.
    assertEquals(Double.valueOf(1), reader.getFractionConsumed());
    reader.close();
  }
  assertThat(bundleRecords, containsInAnyOrder(referenceRecords.toArray()));
}
 
开发者ID:apache,项目名称:beam,代码行数:54,代码来源:HadoopInputFormatIOTest.java


注:本文中的org.apache.beam.sdk.io.BoundedSource.BoundedReader.close方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。