当前位置: 首页>>代码示例>>Java>>正文


Java DataflowPipelineOptions.setRegion方法代码示例

本文整理汇总了Java中org.apache.beam.runners.dataflow.options.DataflowPipelineOptions.setRegion方法的典型用法代码示例。如果您正苦于以下问题:Java DataflowPipelineOptions.setRegion方法的具体用法?Java DataflowPipelineOptions.setRegion怎么用?Java DataflowPipelineOptions.setRegion使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.beam.runners.dataflow.options.DataflowPipelineOptions的用法示例。


在下文中一共展示了DataflowPipelineOptions.setRegion方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: buildPipelineOptions

import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; //导入方法依赖的package包/类
private static DataflowPipelineOptions buildPipelineOptions() throws IOException {
  GcsUtil mockGcsUtil = mock(GcsUtil.class);
  when(mockGcsUtil.expand(any(GcsPath.class))).then(new Answer<List<GcsPath>>() {
    @Override
    public List<GcsPath> answer(InvocationOnMock invocation) throws Throwable {
      return ImmutableList.of((GcsPath) invocation.getArguments()[0]);
    }
  });
  when(mockGcsUtil.bucketAccessible(any(GcsPath.class))).thenReturn(true);

  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setGcpCredential(new TestCredential());
  options.setJobName("some-job-name");
  options.setProject("some-project");
  options.setRegion("some-region");
  options.setTempLocation(GcsPath.fromComponents("somebucket", "some/path").toString());
  options.setFilesToStage(new LinkedList<String>());
  options.setDataflowClient(buildMockDataflow(new IsValidCreateRequest()));
  options.setGcsUtil(mockGcsUtil);
  return options;
}
 
开发者ID:apache,项目名称:beam,代码行数:23,代码来源:DataflowPipelineTranslatorTest.java

示例2: buildPipelineOptions

import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; //导入方法依赖的package包/类
private static DataflowPipelineOptions buildPipelineOptions(String ... args) throws IOException {
  GcsUtil mockGcsUtil = mock(GcsUtil.class);
  when(mockGcsUtil.expand(any(GcsPath.class))).then(new Answer<List<GcsPath>>() {
    @Override
    public List<GcsPath> answer(InvocationOnMock invocation) throws Throwable {
      return ImmutableList.of((GcsPath) invocation.getArguments()[0]);
    }
  });
  when(mockGcsUtil.bucketAccessible(any(GcsPath.class))).thenReturn(true);

  DataflowPipelineOptions options =
      PipelineOptionsFactory.fromArgs(args).as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setGcpCredential(new TestCredential());
  options.setJobName("some-job-name");
  options.setProject("some-project");
  options.setRegion("some-region");
  options.setTempLocation(GcsPath.fromComponents("somebucket", "some/path").toString());
  options.setFilesToStage(new LinkedList<String>());
  options.setGcsUtil(mockGcsUtil);
  return options;
}
 
开发者ID:apache,项目名称:beam,代码行数:23,代码来源:BatchStatefulParDoOverridesTest.java

示例3: buildPipelineOptions

import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; //导入方法依赖的package包/类
private DataflowPipelineOptions buildPipelineOptions() throws IOException {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject(PROJECT_ID);
  options.setTempLocation(VALID_TEMP_BUCKET);
  options.setRegion(REGION_ID);
  // Set FILES_PROPERTY to empty to prevent a default value calculated from classpath.
  options.setFilesToStage(new LinkedList<String>());
  options.setDataflowClient(buildMockDataflow());
  options.setGcsUtil(mockGcsUtil);
  options.setGcpCredential(new TestCredential());

  // Configure the FileSystem registrar to use these options.
  FileSystems.setDefaultPipelineOptions(options);

  return options;
}
 
开发者ID:apache,项目名称:beam,代码行数:18,代码来源:DataflowRunnerTest.java

示例4: testRunWithFiles

import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; //导入方法依赖的package包/类
@Test
public void testRunWithFiles() throws IOException {
  // Test that the function DataflowRunner.stageFiles works as expected.
  final String cloudDataflowDataset = "somedataset";

  // Create some temporary files.
  File temp1 = File.createTempFile("DataflowRunnerTest", "txt");
  temp1.deleteOnExit();
  File temp2 = File.createTempFile("DataflowRunnerTest2", "txt");
  temp2.deleteOnExit();

  String overridePackageName = "alias.txt";

  when(mockGcsUtil.getObjects(anyListOf(GcsPath.class)))
      .thenReturn(ImmutableList.of(GcsUtil.StorageObjectOrIOException.create(
          new FileNotFoundException("some/path"))));

  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setFilesToStage(ImmutableList.of(
      temp1.getAbsolutePath(),
      overridePackageName + "=" + temp2.getAbsolutePath()));
  options.setStagingLocation(VALID_STAGING_BUCKET);
  options.setTempLocation(VALID_TEMP_BUCKET);
  options.setTempDatasetId(cloudDataflowDataset);
  options.setProject(PROJECT_ID);
  options.setRegion(REGION_ID);
  options.setJobName("job");
  options.setDataflowClient(buildMockDataflow());
  options.setGcsUtil(mockGcsUtil);
  options.setGcpCredential(new TestCredential());

  when(mockGcsUtil.create(any(GcsPath.class), anyString(), anyInt()))
      .then(
          new Answer<SeekableByteChannel>() {
            @Override
            public SeekableByteChannel answer(InvocationOnMock invocation) throws Throwable {
              return FileChannel.open(
                  Files.createTempFile("channel-", ".tmp"),
                  StandardOpenOption.CREATE,
                  StandardOpenOption.WRITE,
                  StandardOpenOption.DELETE_ON_CLOSE);
            }
          });

  Pipeline p = buildDataflowPipeline(options);

  DataflowPipelineJob job = (DataflowPipelineJob) p.run();
  assertEquals("newid", job.getJobId());

  ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class);
  Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());
  Job workflowJob = jobCaptor.getValue();
  assertValidJob(workflowJob);

  assertEquals(
      2,
      workflowJob.getEnvironment().getWorkerPools().get(0).getPackages().size());
  DataflowPackage workflowPackage1 =
      workflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(0);
  assertThat(workflowPackage1.getName(), startsWith(temp1.getName()));
  DataflowPackage workflowPackage2 =
      workflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(1);
  assertEquals(overridePackageName, workflowPackage2.getName());

  assertEquals(
      GcsPath.fromUri(VALID_TEMP_BUCKET).toResourceName(),
      workflowJob.getEnvironment().getTempStoragePrefix());
  assertEquals(
      cloudDataflowDataset,
      workflowJob.getEnvironment().getDataset());
  assertEquals(
      DataflowRunnerInfo.getDataflowRunnerInfo().getName(),
      workflowJob.getEnvironment().getUserAgent().get("name"));
  assertEquals(
      DataflowRunnerInfo.getDataflowRunnerInfo().getVersion(),
      workflowJob.getEnvironment().getUserAgent().get("version"));
}
 
开发者ID:apache,项目名称:beam,代码行数:78,代码来源:DataflowRunnerTest.java


注:本文中的org.apache.beam.runners.dataflow.options.DataflowPipelineOptions.setRegion方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。