当前位置: 首页>>代码示例>>Python>>正文


Python ssh_hook.SSHHook类代码示例

本文整理汇总了Python中airflow.contrib.hooks.ssh_hook.SSHHook的典型用法代码示例。如果您正苦于以下问题:Python SSHHook类的具体用法?Python SSHHook怎么用?Python SSHHook使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了SSHHook类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setUp

    def setUp(self):
        configuration.load_test_config()
        from airflow.contrib.hooks.ssh_hook import SSHHook
        from airflow.hooks.S3_hook import S3Hook

        hook = SSHHook(ssh_conn_id='ssh_default')
        s3_hook = S3Hook('aws_default')
        hook.no_host_key_check = True
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
            'provide_context': True
        }
        dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'

        self.hook = hook
        self.s3_hook = s3_hook

        self.ssh_client = self.hook.get_conn()
        self.sftp_client = self.ssh_client.open_sftp()

        self.dag = dag
        self.s3_bucket = BUCKET
        self.sftp_path = SFTP_PATH
        self.s3_key = S3_KEY
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:26,代码来源:test_s3_to_sftp_operator.py

示例2: setUp

 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook(ssh_conn_id='ssh_default')
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
     self.test_dir = "/tmp"
     self.test_local_dir = "/tmp/tmp2"
     self.test_remote_dir = "/tmp/tmp1"
     self.test_local_filename = 'test_local_file'
     self.test_remote_filename = 'test_remote_file'
     self.test_local_filepath = '{0}/{1}'.format(self.test_dir,
                                                 self.test_local_filename)
     # Local Filepath with Intermediate Directory
     self.test_local_filepath_int_dir = '{0}/{1}'.format(self.test_local_dir,
                                                         self.test_local_filename)
     self.test_remote_filepath = '{0}/{1}'.format(self.test_dir,
                                                  self.test_remote_filename)
     # Remote Filepath with Intermediate Directory
     self.test_remote_filepath_int_dir = '{0}/{1}'.format(self.test_remote_dir,
                                                          self.test_remote_filename)
开发者ID:apache,项目名称:incubator-airflow,代码行数:29,代码来源:test_sftp_operator.py

示例3: SSHHookTest

class SSHHookTest(unittest.TestCase):
    def setUp(self):
        configuration.load_test_config()
        from airflow.contrib.hooks.ssh_hook import SSHHook
        self.hook = SSHHook(ssh_conn_id='ssh_default', keepalive_interval=10)
        self.hook.no_host_key_check = True

    def test_ssh_connection(self):
        ssh_hook = self.hook.get_conn()
        self.assertIsNotNone(ssh_hook)

    def test_tunnel(self):
        print("Setting up remote listener")
        import subprocess
        import socket

        self.server_handle = subprocess.Popen(["python", "-c", HELLO_SERVER_CMD],
                                              stdout=subprocess.PIPE)
        print("Setting up tunnel")
        with self.hook.create_tunnel(2135, 2134):
            print("Tunnel up")
            server_output = self.server_handle.stdout.read(5)
            self.assertEqual(server_output, b"ready")
            print("Connecting to server via tunnel")
            s = socket.socket()
            s.connect(("localhost", 2135))
            print("Receiving...", )
            response = s.recv(5)
            self.assertEqual(response, b"hello")
            print("Closing connection")
            s.close()
            print("Waiting for listener...")
            output, _ = self.server_handle.communicate()
            self.assertEqual(self.server_handle.returncode, 0)
            print("Closing tunnel")
开发者ID:7digital,项目名称:incubator-airflow,代码行数:35,代码来源:test_ssh_hook.py

示例4: execute

    def execute(self, context):
        self.s3_key = self.get_s3_key(self.s3_key)
        ssh_hook = SSHHook(ssh_conn_id=self.sftp_conn_id)
        s3_hook = S3Hook(self.s3_conn_id)

        s3_client = s3_hook.get_conn()
        sftp_client = ssh_hook.get_conn().open_sftp()

        with NamedTemporaryFile("w") as f:
            s3_client.download_file(self.s3_bucket, self.s3_key, f.name)
            sftp_client.put(f.name, self.sftp_path)
开发者ID:apache,项目名称:incubator-airflow,代码行数:11,代码来源:s3_to_sftp_operator.py

示例5: test_conn_with_extra_parameters

 def test_conn_with_extra_parameters(self):
     from airflow.contrib.hooks.ssh_hook import SSHHook
     db.merge_conn(
         models.Connection(conn_id='ssh_with_extra',
                           host='localhost',
                           conn_type='ssh',
                           extra='{"compress" : true, "no_host_key_check" : "true"}'
                           )
     )
     ssh_hook = SSHHook(ssh_conn_id='ssh_with_extra', keepalive_interval=10)
     ssh_hook.get_conn()
     self.assertEqual(ssh_hook.compress, True)
     self.assertEqual(ssh_hook.no_host_key_check, True)
开发者ID:arihantsurana,项目名称:incubator-airflow,代码行数:13,代码来源:test_ssh_hook.py

示例6: setUp

 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook()
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
开发者ID:bolkedebruin,项目名称:airflow,代码行数:14,代码来源:ssh_execute_operator.py

示例7: execute

    def execute(self, context):
        file_msg = None
        try:
            if self.ssh_conn_id and not self.ssh_hook:
                self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)

            if not self.ssh_hook:
                raise AirflowException("can not operate without ssh_hook or ssh_conn_id")

            if self.remote_host is not None:
                self.ssh_hook.remote_host = self.remote_host

            ssh_client = self.ssh_hook.get_conn()
            sftp_client = ssh_client.open_sftp()
            if self.operation.lower() == SFTPOperation.GET:
                file_msg = "from {0} to {1}".format(self.remote_filepath,
                                                    self.local_filepath)
                logging.debug("Starting to transfer {0}".format(file_msg))
                sftp_client.get(self.remote_filepath, self.local_filepath)
            else:
                file_msg = "from {0} to {1}".format(self.local_filepath,
                                                    self.remote_filepath)
                logging.debug("Starting to transfer file {0}".format(file_msg))
                sftp_client.put(self.local_filepath, self.remote_filepath)

        except Exception as e:
            raise AirflowException("Error while transferring {0}, error: {1}"
                                   .format(file_msg, str(e)))

        return None
开发者ID:Nextdoor,项目名称:airflow,代码行数:30,代码来源:sftp_operator.py

示例8: execute

    def execute(self, context):
        self.s3_key = self.get_s3_key(self.s3_key)
        ssh_hook = SSHHook(ssh_conn_id=self.sftp_conn_id)
        s3_hook = S3Hook(self.s3_conn_id)

        sftp_client = ssh_hook.get_conn().open_sftp()

        with NamedTemporaryFile("w") as f:
            sftp_client.get(self.sftp_path, f.name)

            s3_hook.load_file(
                filename=f.name,
                key=self.s3_key,
                bucket_name=self.s3_bucket,
                replace=True
            )
开发者ID:MiguelPeralvo,项目名称:incubator-airflow,代码行数:16,代码来源:sftp_to_s3_operator.py

示例9: test_tunnel_without_password

    def test_tunnel_without_password(self, ssh_mock):
        hook = SSHHook(remote_host='remote_host',
                       port='port',
                       username='username',
                       timeout=10,
                       key_file='fake.file')

        with hook.get_tunnel(1234):
            ssh_mock.assert_called_once_with('remote_host',
                                             ssh_port='port',
                                             ssh_username='username',
                                             ssh_pkey='fake.file',
                                             ssh_proxy=None,
                                             local_bind_address=('localhost', ),
                                             remote_bind_address=('localhost', 1234),
                                             host_pkey_directories=[],
                                             logger=hook.log)
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:17,代码来源:test_ssh_hook.py

示例10: test_ssh_connection_without_password

    def test_ssh_connection_without_password(self, ssh_mock):
        hook = SSHHook(remote_host='remote_host',
                       port='port',
                       username='username',
                       timeout=10,
                       key_file='fake.file')

        with hook.get_conn():
            ssh_mock.return_value.connect.assert_called_once_with(
                hostname='remote_host',
                username='username',
                key_filename='fake.file',
                timeout=10,
                compress=True,
                port='port',
                sock=None
            )
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:17,代码来源:test_ssh_hook.py

示例11: test_tunnel

    def test_tunnel(self):
        hook = SSHHook(ssh_conn_id='ssh_default')

        import subprocess
        import socket

        server_handle = subprocess.Popen(["python", "-c", HELLO_SERVER_CMD],
                                         stdout=subprocess.PIPE)
        with hook.create_tunnel(2135, 2134):
            server_output = server_handle.stdout.read(5)
            self.assertEqual(server_output, b"ready")
            s = socket.socket()
            s.connect(("localhost", 2135))
            response = s.recv(5)
            self.assertEqual(response, b"hello")
            s.close()
            output, _ = server_handle.communicate()
            self.assertEqual(server_handle.returncode, 0)
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:18,代码来源:test_ssh_hook.py

示例12: execute

    def execute(self, context):
        file_msg = None
        try:
            if self.ssh_conn_id:
                if self.ssh_hook and isinstance(self.ssh_hook, SSHHook):
                    self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
                else:
                    self.log.info("ssh_hook is not provided or invalid. " +
                                  "Trying ssh_conn_id to create SSHHook.")
                    self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)

            if not self.ssh_hook:
                raise AirflowException("Cannot operate without ssh_hook or ssh_conn_id.")

            if self.remote_host is not None:
                self.log.info("remote_host is provided explicitly. " +
                              "It will replace the remote_host which was defined " +
                              "in ssh_hook or predefined in connection of ssh_conn_id.")
                self.ssh_hook.remote_host = self.remote_host

            with self.ssh_hook.get_conn() as ssh_client:
                sftp_client = ssh_client.open_sftp()
                if self.operation.lower() == SFTPOperation.GET:
                    local_folder = os.path.dirname(self.local_filepath)
                    if self.create_intermediate_dirs:
                        # Create Intermediate Directories if it doesn't exist
                        try:
                            os.makedirs(local_folder)
                        except OSError:
                            if not os.path.isdir(local_folder):
                                raise
                    file_msg = "from {0} to {1}".format(self.remote_filepath,
                                                        self.local_filepath)
                    self.log.info("Starting to transfer %s", file_msg)
                    sftp_client.get(self.remote_filepath, self.local_filepath)
                else:
                    remote_folder = os.path.dirname(self.remote_filepath)
                    if self.create_intermediate_dirs:
                        _make_intermediate_dirs(
                            sftp_client=sftp_client,
                            remote_directory=remote_folder,
                        )
                    file_msg = "from {0} to {1}".format(self.local_filepath,
                                                        self.remote_filepath)
                    self.log.info("Starting to transfer file %s", file_msg)
                    sftp_client.put(self.local_filepath,
                                    self.remote_filepath,
                                    confirm=self.confirm)

        except Exception as e:
            raise AirflowException("Error while transferring {0}, error: {1}"
                                   .format(file_msg, str(e)))

        return self.local_filepath
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:54,代码来源:sftp_operator.py

示例13: SSHHookTest

class SSHHookTest(unittest.TestCase):
    def setUp(self):
        configuration.test_mode()
        from airflow.contrib.hooks.ssh_hook import SSHHook
        self.hook = SSHHook()
        self.hook.no_host_key_check = True

    def test_remote_cmd(self):
        output = self.hook.check_output(["echo", "-n", "airflow"])
        self.assertEqual(output, b"airflow")

    def test_tunnel(self):
        print("Setting up remote listener")
        import subprocess
        import socket

        self.handle = self.hook.Popen([
            "python", "-c", '"{0}"'.format(HELLO_SERVER_CMD)
        ], stdout=subprocess.PIPE)

        print("Setting up tunnel")
        with self.hook.tunnel(2135, 2134):
            print("Tunnel up")
            server_output = self.handle.stdout.read(5)
            self.assertEqual(server_output, b"ready")
            print("Connecting to server via tunnel")
            s = socket.socket()
            s.connect(("localhost", 2135))
            print("Receiving...",)
            response = s.recv(5)
            self.assertEqual(response, b"hello")
            print("Closing connection")
            s.close()
            print("Waiting for listener...")
            output, _ = self.handle.communicate()
            self.assertEqual(self.handle.returncode, 0)
            print("Closing tunnel")
开发者ID:sepsyk,项目名称:airflow,代码行数:37,代码来源:core.py

示例14: execute

    def execute(self, context):
        try:
            if self.ssh_conn_id and not self.ssh_hook:
                self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)

            if not self.ssh_hook:
                raise AirflowException("can not operate without ssh_hook or ssh_conn_id")

            if self.remote_host is not None:
                self.ssh_hook.remote_host = self.remote_host

            ssh_client = self.ssh_hook.get_conn()

            if not self.command:
                raise AirflowException("no command specified so nothing to execute here.")

            # Auto apply tty when its required in case of sudo
            get_pty = False
            if self.command.startswith('sudo'):
                get_pty = True

            # set timeout taken as params
            stdin, stdout, stderr = ssh_client.exec_command(command=self.command,
                                                            get_pty=get_pty,
                                                            timeout=self.timeout
                                                            )
            exit_status = stdout.channel.recv_exit_status()
            if exit_status is 0:
                # only returning on output if do_xcom_push is set
                # otherwise its not suppose to be disclosed
                if self.do_xcom_push:
                    enable_pickling = configuration.getboolean('core',
                                                               'enable_xcom_pickling')
                    if enable_pickling:
                        return stdout.read()
                    else:
                        return b64encode(stdout.read()).decode('utf-8')

            else:
                error_msg = stderr.read()
                raise AirflowException("error running cmd: {0}, error: {1}"
                                        .format(self.command, error_msg))

        except Exception as e:
            raise AirflowException("SSH operator error: {0}".format(str(e)))

        return True
开发者ID:sstm2,项目名称:incubator-airflow,代码行数:47,代码来源:ssh_operator.py

示例15: execute

    def execute(self, context):
        file_msg = None
        try:
            if self.ssh_conn_id:
                if self.ssh_hook and isinstance(self.ssh_hook, SSHHook):
                    self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
                else:
                    self.log.info("ssh_hook is not provided or invalid. " +
                                  "Trying ssh_conn_id to create SSHHook.")
                    self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)

            if not self.ssh_hook:
                raise AirflowException("Cannot operate without ssh_hook or ssh_conn_id.")

            if self.remote_host is not None:
                self.log.info("remote_host is provided explicitly. " +
                              "It will replace the remote_host which was defined " +
                              "in ssh_hook or predefined in connection of ssh_conn_id.")
                self.ssh_hook.remote_host = self.remote_host

            with self.ssh_hook.get_conn() as ssh_client:
                sftp_client = ssh_client.open_sftp()
                if self.operation.lower() == SFTPOperation.GET:
                    file_msg = "from {0} to {1}".format(self.remote_filepath,
                                                        self.local_filepath)
                    self.log.debug("Starting to transfer %s", file_msg)
                    sftp_client.get(self.remote_filepath, self.local_filepath)
                else:
                    file_msg = "from {0} to {1}".format(self.local_filepath,
                                                        self.remote_filepath)
                    self.log.debug("Starting to transfer file %s", file_msg)
                    sftp_client.put(self.local_filepath,
                                    self.remote_filepath,
                                    confirm=self.confirm)

        except Exception as e:
            raise AirflowException("Error while transferring {0}, error: {1}"
                                   .format(file_msg, str(e)))

        return None
开发者ID:AdamUnger,项目名称:incubator-airflow,代码行数:40,代码来源:sftp_operator.py


注:本文中的airflow.contrib.hooks.ssh_hook.SSHHook类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。