当前位置: 首页>>代码示例>>Python>>正文


Python dataproc_operator.DataprocClusterCreateOperator类代码示例

本文整理汇总了Python中airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator的典型用法代码示例。如果您正苦于以下问题:Python DataprocClusterCreateOperator类的具体用法?Python DataprocClusterCreateOperator怎么用?Python DataprocClusterCreateOperator使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DataprocClusterCreateOperator类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: create_cluster_with_invalid_internal_ip_only_setup

        def create_cluster_with_invalid_internal_ip_only_setup():
            # Given
            create_cluster = DataprocClusterCreateOperator(
                task_id=TASK_ID,
                cluster_name=CLUSTER_NAME,
                project_id=GCP_PROJECT_ID,
                num_workers=NUM_WORKERS,
                zone=GCE_ZONE,
                dag=self.dag,
                internal_ip_only=True)

            # When
            create_cluster._build_cluster_data()
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:13,代码来源:test_dataproc_operator.py

示例2: test_build_cluster_data_with_auto_zone

 def test_build_cluster_data_with_auto_zone(self):
     dataproc_operator = DataprocClusterCreateOperator(
         task_id=TASK_ID,
         cluster_name=CLUSTER_NAME,
         project_id=GCP_PROJECT_ID,
         num_workers=NUM_WORKERS,
         master_machine_type=MASTER_MACHINE_TYPE,
         worker_machine_type=WORKER_MACHINE_TYPE
     )
     cluster_data = dataproc_operator._build_cluster_data()
     self.assertNotIn('zoneUri', cluster_data['config']['gceClusterConfig'])
     self.assertEqual(cluster_data['config']['masterConfig']['machineTypeUri'], MASTER_MACHINE_TYPE)
     self.assertEqual(cluster_data['config']['workerConfig']['machineTypeUri'], WORKER_MACHINE_TYPE)
开发者ID:apache,项目名称:incubator-airflow,代码行数:13,代码来源:test_dataproc_operator.py

示例3: test_build_cluster_data_with_autoDeleteTtl

 def test_build_cluster_data_with_autoDeleteTtl(self):
     dataproc_operator = DataprocClusterCreateOperator(
         task_id=TASK_ID,
         cluster_name=CLUSTER_NAME,
         project_id=GCP_PROJECT_ID,
         num_workers=NUM_WORKERS,
         zone=GCE_ZONE,
         dag=self.dag,
         auto_delete_ttl=AUTO_DELETE_TTL,
     )
     cluster_data = dataproc_operator._build_cluster_data()
     self.assertEqual(cluster_data['config']['lifecycleConfig']['autoDeleteTtl'],
                      "654s")
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:13,代码来源:test_dataproc_operator.py

示例4: test_build_cluster_data_with_autoDeleteTime

 def test_build_cluster_data_with_autoDeleteTime(self):
     dataproc_operator = DataprocClusterCreateOperator(
         task_id=TASK_ID,
         cluster_name=CLUSTER_NAME,
         project_id=PROJECT_ID,
         num_workers=NUM_WORKERS,
         zone=ZONE,
         dag=self.dag,
         auto_delete_time=AUTO_DELETE_TIME,
     )
     cluster_data = dataproc_operator._build_cluster_data()
     self.assertEqual(cluster_data['config']['lifecycleConfig']['autoDeleteTime'],
                      "2017-06-07T00:00:00.000000Z")
开发者ID:AdamUnger,项目名称:incubator-airflow,代码行数:13,代码来源:test_dataproc_operator.py

示例5: test_build_single_node_cluster

 def test_build_single_node_cluster(self):
     dataproc_operator = DataprocClusterCreateOperator(
         task_id=TASK_ID,
         cluster_name=CLUSTER_NAME,
         project_id=GCP_PROJECT_ID,
         num_workers=0,
         num_preemptible_workers=0,
         zone=GCE_ZONE,
         dag=self.dag
     )
     cluster_data = dataproc_operator._build_cluster_data()
     self.assertEqual(
         cluster_data['config']['softwareConfig']['properties']
         ['dataproc:dataproc.allow.zero.workers'], "true")
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:14,代码来源:test_dataproc_operator.py

示例6: test_cluster_name_log_no_sub

 def test_cluster_name_log_no_sub(self):
     with patch('airflow.contrib.operators.dataproc_operator.DataProcHook') as mock_hook:
         mock_hook.return_value.get_conn = self.mock_conn
         dataproc_task = DataprocClusterCreateOperator(
             task_id=TASK_ID,
             cluster_name=CLUSTER_NAME,
             project_id=GCP_PROJECT_ID,
             num_workers=NUM_WORKERS,
             zone=GCE_ZONE,
             dag=self.dag
         )
         with patch.object(dataproc_task.log, 'info') as mock_info:
             with self.assertRaises(TypeError):
                 dataproc_task.execute(None)
             mock_info.assert_called_with('Creating cluster: %s', CLUSTER_NAME)
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:15,代码来源:test_dataproc_operator.py

示例7: test_cluster_name_log_no_sub

    def test_cluster_name_log_no_sub(self):
        with patch('airflow.contrib.operators.dataproc_operator.DataProcHook') \
            as mock_hook, patch('logging.info') as l:
            dataproc_task = DataprocClusterCreateOperator(
                task_id=TASK_ID,
                cluster_name=CLUSTER_NAME,
                project_id=PROJECT_ID,
                num_workers=NUM_WORKERS,
                zone=ZONE,
                dag=self.dag
            )

            with self.assertRaises(TypeError) as _:
                dataproc_task.execute(None)
            l.assert_called_with(('Creating cluster: ' + CLUSTER_NAME))
开发者ID:lauralorenz,项目名称:airflow,代码行数:15,代码来源:test_dataproc_operator.py

示例8: test_build_cluster_data_with_autoDeleteTime_and_autoDeleteTtl

 def test_build_cluster_data_with_autoDeleteTime_and_autoDeleteTtl(self):
     dataproc_operator = DataprocClusterCreateOperator(
         task_id=TASK_ID,
         cluster_name=CLUSTER_NAME,
         project_id=GCP_PROJECT_ID,
         num_workers=NUM_WORKERS,
         zone=GCE_ZONE,
         dag=self.dag,
         auto_delete_time=AUTO_DELETE_TIME,
         auto_delete_ttl=AUTO_DELETE_TTL,
     )
     cluster_data = dataproc_operator._build_cluster_data()
     if 'autoDeleteTtl' in cluster_data['config']['lifecycleConfig']:
         self.fail("If 'auto_delete_time' and 'auto_delete_ttl' is set, " +
                   "only `auto_delete_time` is used")
     self.assertEqual(cluster_data['config']['lifecycleConfig']['autoDeleteTime'],
                      "2017-06-07T00:00:00.000000Z")
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:17,代码来源:test_dataproc_operator.py

示例9: test_init_with_custom_image

    def test_init_with_custom_image(self):
        dataproc_operator = DataprocClusterCreateOperator(
            task_id=TASK_ID,
            cluster_name=CLUSTER_NAME,
            project_id=GCP_PROJECT_ID,
            num_workers=NUM_WORKERS,
            zone=GCE_ZONE,
            dag=self.dag,
            custom_image=CUSTOM_IMAGE
        )

        cluster_data = dataproc_operator._build_cluster_data()
        expected_custom_image_url = \
            'https://www.googleapis.com/compute/beta/projects/' \
            '{}/global/images/{}'.format(GCP_PROJECT_ID, CUSTOM_IMAGE)
        self.assertEqual(cluster_data['config']['masterConfig']['imageUri'],
                         expected_custom_image_url)
        self.assertEqual(cluster_data['config']['workerConfig']['imageUri'],
                         expected_custom_image_url)
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:19,代码来源:test_dataproc_operator.py

示例10: test_cluster_name_log_sub

    def test_cluster_name_log_sub(self):
        with patch('airflow.contrib.operators.dataproc_operator.DataProcHook') as mock_hook:
            mock_hook.return_value.get_conn = self.mock_conn
            dataproc_task = DataprocClusterCreateOperator(
                task_id=TASK_ID,
                cluster_name='smoke-cluster-{{ ts_nodash }}',
                project_id=PROJECT_ID,
                num_workers=NUM_WORKERS,
                zone=ZONE,
                dag=self.dag
            )
            with patch.object(dataproc_task.log, 'info') as mock_info:
                context = { 'ts_nodash' : 'testnodash'}

                rendered = dataproc_task.render_template('cluster_name', getattr(dataproc_task,'cluster_name'), context)
                setattr(dataproc_task, 'cluster_name', rendered)
                with self.assertRaises(TypeError) as _:
                    dataproc_task.execute(None)
                mock_info.assert_called_with('Creating cluster: %s', u'smoke-cluster-testnodash')
开发者ID:7digital,项目名称:incubator-airflow,代码行数:19,代码来源:test_dataproc_operator.py

示例11: setUp

 def setUp(self):
     self.dataproc = DataprocClusterCreateOperator(
         task_id=TASK_ID,
         cluster_name=CLUSTER_NAME,
         project_id=PROJECT_ID,
         num_workers=NUM_WORKERS,
         zone=ZONE,
         storage_bucket=STORAGE_BUCKET,
         image_version=IMAGE_VERSION,
         master_machine_type=MASTER_MACHINE_TYPE,
         master_disk_size=MASTER_DISK_SIZE,
         worker_machine_type=WORKER_MACHINE_TYPE,
         worker_disk_size=WORKER_DISK_SIZE,
         num_preemptible_workers=NUM_PREEMPTIBLE_WORKERS)
开发者ID:bioteam,项目名称:incubator-airflow,代码行数:14,代码来源:test_dataproc_operator.py

示例12: DataprocClusterCreateOperatorTest

class DataprocClusterCreateOperatorTest(unittest.TestCase):

    def setUp(self):
        self.dataproc = DataprocClusterCreateOperator(
            task_id=TASK_ID,
            cluster_name=CLUSTER_NAME,
            project_id=PROJECT_ID,
            num_workers=NUM_WORKERS,
            zone=ZONE,
            storage_bucket=STORAGE_BUCKET,
            image_version=IMAGE_VERSION,
            master_machine_type=MASTER_MACHINE_TYPE,
            master_disk_size=MASTER_DISK_SIZE,
            worker_machine_type=WORKER_MACHINE_TYPE,
            worker_disk_size=WORKER_DISK_SIZE,
            num_preemptible_workers=NUM_PREEMPTIBLE_WORKERS)

    def test_init(self):
        """Test DataFlowPythonOperator instance is properly initialized."""
        self.assertEqual(self.dataproc.cluster_name, CLUSTER_NAME)
        self.assertEqual(self.dataproc.project_id, PROJECT_ID)
        self.assertEqual(self.dataproc.num_workers, NUM_WORKERS)
        self.assertEqual(self.dataproc.zone, ZONE)
        self.assertEqual(self.dataproc.storage_bucket, STORAGE_BUCKET)
        self.assertEqual(self.dataproc.image_version, IMAGE_VERSION)
        self.assertEqual(self.dataproc.master_machine_type, MASTER_MACHINE_TYPE)
        self.assertEqual(self.dataproc.master_disk_size, MASTER_DISK_SIZE)
        self.assertEqual(self.dataproc.worker_machine_type, WORKER_MACHINE_TYPE)
        self.assertEqual(self.dataproc.worker_disk_size, WORKER_DISK_SIZE)
        self.assertEqual(self.dataproc.num_preemptible_workers, NUM_PREEMPTIBLE_WORKERS)

    def test_build_cluster_data(self):
        cluster_data = self.dataproc._build_cluster_data()
        self.assertEqual(cluster_data['clusterName'], CLUSTER_NAME)
        self.assertEqual(cluster_data['projectId'], PROJECT_ID)
        self.assertEqual(cluster_data['config']['softwareConfig'], {'imageVersion': IMAGE_VERSION})
        self.assertEqual(cluster_data['config']['configBucket'], STORAGE_BUCKET)
        self.assertEqual(cluster_data['config']['workerConfig']['numInstances'], NUM_WORKERS)
        self.assertEqual(cluster_data['config']['secondaryWorkerConfig']['numInstances'],
                         NUM_PREEMPTIBLE_WORKERS)
开发者ID:bioteam,项目名称:incubator-airflow,代码行数:40,代码来源:test_dataproc_operator.py


注:本文中的airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。