本文整理汇总了Python中airflow.contrib.hooks.ssh_hook.SSHHook.get_conn方法的典型用法代码示例。如果您正苦于以下问题:Python SSHHook.get_conn方法的具体用法?Python SSHHook.get_conn怎么用?Python SSHHook.get_conn使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.contrib.hooks.ssh_hook.SSHHook
的用法示例。
在下文中一共展示了SSHHook.get_conn方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_conn_with_extra_parameters
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
def test_conn_with_extra_parameters(self):
from airflow.contrib.hooks.ssh_hook import SSHHook
db.merge_conn(
models.Connection(conn_id='ssh_with_extra',
host='localhost',
conn_type='ssh',
extra='{"compress" : true, "no_host_key_check" : "true"}'
)
)
ssh_hook = SSHHook(ssh_conn_id='ssh_with_extra', keepalive_interval=10)
ssh_hook.get_conn()
self.assertEqual(ssh_hook.compress, True)
self.assertEqual(ssh_hook.no_host_key_check, True)
示例2: SSHHookTest
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SSHHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
from airflow.contrib.hooks.ssh_hook import SSHHook
self.hook = SSHHook(ssh_conn_id='ssh_default', keepalive_interval=10)
self.hook.no_host_key_check = True
def test_ssh_connection(self):
ssh_hook = self.hook.get_conn()
self.assertIsNotNone(ssh_hook)
def test_tunnel(self):
print("Setting up remote listener")
import subprocess
import socket
self.server_handle = subprocess.Popen(["python", "-c", HELLO_SERVER_CMD],
stdout=subprocess.PIPE)
print("Setting up tunnel")
with self.hook.create_tunnel(2135, 2134):
print("Tunnel up")
server_output = self.server_handle.stdout.read(5)
self.assertEqual(server_output, b"ready")
print("Connecting to server via tunnel")
s = socket.socket()
s.connect(("localhost", 2135))
print("Receiving...", )
response = s.recv(5)
self.assertEqual(response, b"hello")
print("Closing connection")
s.close()
print("Waiting for listener...")
output, _ = self.server_handle.communicate()
self.assertEqual(self.server_handle.returncode, 0)
print("Closing tunnel")
示例3: execute
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
def execute(self, context):
self.s3_key = self.get_s3_key(self.s3_key)
ssh_hook = SSHHook(ssh_conn_id=self.sftp_conn_id)
s3_hook = S3Hook(self.s3_conn_id)
s3_client = s3_hook.get_conn()
sftp_client = ssh_hook.get_conn().open_sftp()
with NamedTemporaryFile("w") as f:
s3_client.download_file(self.s3_bucket, self.s3_key, f.name)
sftp_client.put(f.name, self.sftp_path)
示例4: execute
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
def execute(self, context):
self.s3_key = self.get_s3_key(self.s3_key)
ssh_hook = SSHHook(ssh_conn_id=self.sftp_conn_id)
s3_hook = S3Hook(self.s3_conn_id)
sftp_client = ssh_hook.get_conn().open_sftp()
with NamedTemporaryFile("w") as f:
sftp_client.get(self.sftp_path, f.name)
s3_hook.load_file(
filename=f.name,
key=self.s3_key,
bucket_name=self.s3_bucket,
replace=True
)
示例5: test_ssh_connection_without_password
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
def test_ssh_connection_without_password(self, ssh_mock):
hook = SSHHook(remote_host='remote_host',
port='port',
username='username',
timeout=10,
key_file='fake.file')
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
hostname='remote_host',
username='username',
key_filename='fake.file',
timeout=10,
compress=True,
port='port',
sock=None
)
示例6: SFTPOperator
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SFTPOperator(BaseOperator):
"""
SFTPOperator for transferring files from remote host to local or vice a versa.
This operator uses ssh_hook to open sftp trasport channel that serve as basis
for file transfer.
:param ssh_hook: predefined ssh_hook to use for remote execution
:type ssh_hook: :class:`SSHHook`
:param ssh_conn_id: connection id from airflow Connections
:type ssh_conn_id: str
:param remote_host: remote host to connect
:type remote_host: str
:param local_filepath: local file path to get or put
:type local_filepath: str
:param remote_filepath: remote file path to get or put
:type remote_filepath: str
:param operation: specify operation 'get' or 'put', defaults to get
:type get: bool
"""
template_fields = ('local_filepath', 'remote_filepath')
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
local_filepath=None,
remote_filepath=None,
operation=SFTPOperation.PUT,
*args,
**kwargs):
super(SFTPOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.local_filepath = local_filepath
self.remote_filepath = remote_filepath
self.operation = operation
if not (self.operation.lower() == SFTPOperation.GET or self.operation.lower() == SFTPOperation.PUT):
raise TypeError("unsupported operation value {0}, expected {1} or {2}"
.format(self.operation, SFTPOperation.GET, SFTPOperation.PUT))
def execute(self, context):
file_msg = None
try:
if self.ssh_conn_id and not self.ssh_hook:
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("can not operate without ssh_hook or ssh_conn_id")
if self.remote_host is not None:
self.ssh_hook.remote_host = self.remote_host
ssh_client = self.ssh_hook.get_conn()
sftp_client = ssh_client.open_sftp()
if self.operation.lower() == SFTPOperation.GET:
file_msg = "from {0} to {1}".format(self.remote_filepath,
self.local_filepath)
logging.debug("Starting to transfer {0}".format(file_msg))
sftp_client.get(self.remote_filepath, self.local_filepath)
else:
file_msg = "from {0} to {1}".format(self.local_filepath,
self.remote_filepath)
logging.debug("Starting to transfer file {0}".format(file_msg))
sftp_client.put(self.local_filepath, self.remote_filepath)
except Exception as e:
raise AirflowException("Error while transferring {0}, error: {1}"
.format(file_msg, str(e)))
return None
示例7: test_ssh_connection
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
def test_ssh_connection(self):
hook = SSHHook(ssh_conn_id='ssh_default')
with hook.get_conn() as client:
(_, stdout, _) = client.exec_command('ls')
self.assertIsNotNone(stdout.read())
示例8: SFTPOperator
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SFTPOperator(BaseOperator):
"""
SFTPOperator for transferring files from remote host to local or vice a versa.
This operator uses ssh_hook to open sftp transport channel that serve as basis
for file transfer.
:param ssh_hook: predefined ssh_hook to use for remote execution.
Either `ssh_hook` or `ssh_conn_id` needs to be provided.
:type ssh_hook: airflow.contrib.hooks.ssh_hook.SSHHook
:param ssh_conn_id: connection id from airflow Connections.
`ssh_conn_id` will be ignored if `ssh_hook` is provided.
:type ssh_conn_id: str
:param remote_host: remote host to connect (templated)
Nullable. If provided, it will replace the `remote_host` which was
defined in `ssh_hook` or predefined in the connection of `ssh_conn_id`.
:type remote_host: str
:param local_filepath: local file path to get or put. (templated)
:type local_filepath: str
:param remote_filepath: remote file path to get or put. (templated)
:type remote_filepath: str
:param operation: specify operation 'get' or 'put', defaults to put
:type operation: str
:param confirm: specify if the SFTP operation should be confirmed, defaults to True
:type confirm: bool
:param create_intermediate_dirs: create missing intermediate directories when
copying from remote to local and vice-versa. Default is False.
Example: The following task would copy ``file.txt`` to the remote host
at ``/tmp/tmp1/tmp2/`` while creating ``tmp``,``tmp1`` and ``tmp2`` if they
don't exist. If the parameter is not passed it would error as the directory
does not exist. ::
put_file = SFTPOperator(
task_id="test_sftp",
ssh_conn_id="ssh_default",
local_filepath="/tmp/file.txt",
remote_filepath="/tmp/tmp1/tmp2/file.txt",
operation="put",
create_intermediate_dirs=True,
dag=dag
)
:type create_intermediate_dirs: bool
"""
template_fields = ('local_filepath', 'remote_filepath', 'remote_host')
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
local_filepath=None,
remote_filepath=None,
operation=SFTPOperation.PUT,
confirm=True,
create_intermediate_dirs=False,
*args,
**kwargs):
super(SFTPOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.local_filepath = local_filepath
self.remote_filepath = remote_filepath
self.operation = operation
self.confirm = confirm
self.create_intermediate_dirs = create_intermediate_dirs
if not (self.operation.lower() == SFTPOperation.GET or
self.operation.lower() == SFTPOperation.PUT):
raise TypeError("unsupported operation value {0}, expected {1} or {2}"
.format(self.operation, SFTPOperation.GET, SFTPOperation.PUT))
def execute(self, context):
file_msg = None
try:
if self.ssh_conn_id:
if self.ssh_hook and isinstance(self.ssh_hook, SSHHook):
self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
else:
self.log.info("ssh_hook is not provided or invalid. " +
"Trying ssh_conn_id to create SSHHook.")
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("Cannot operate without ssh_hook or ssh_conn_id.")
if self.remote_host is not None:
self.log.info("remote_host is provided explicitly. " +
"It will replace the remote_host which was defined " +
"in ssh_hook or predefined in connection of ssh_conn_id.")
self.ssh_hook.remote_host = self.remote_host
with self.ssh_hook.get_conn() as ssh_client:
sftp_client = ssh_client.open_sftp()
if self.operation.lower() == SFTPOperation.GET:
local_folder = os.path.dirname(self.local_filepath)
if self.create_intermediate_dirs:
# Create Intermediate Directories if it doesn't exist
try:
os.makedirs(local_folder)
#.........这里部分代码省略.........
示例9: SFTPOperator
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SFTPOperator(BaseOperator):
"""
SFTPOperator for transferring files from remote host to local or vice a versa.
This operator uses ssh_hook to open sftp trasport channel that serve as basis
for file transfer.
:param ssh_hook: predefined ssh_hook to use for remote execution.
Either `ssh_hook` or `ssh_conn_id` needs to be provided.
:type ssh_hook: :class:`SSHHook`
:param ssh_conn_id: connection id from airflow Connections.
`ssh_conn_id` will be ingored if `ssh_hook` is provided.
:type ssh_conn_id: str
:param remote_host: remote host to connect (templated)
Nullable. If provided, it will replace the `remote_host` which was
defined in `ssh_hook` or predefined in the connection of `ssh_conn_id`.
:type remote_host: str
:param local_filepath: local file path to get or put. (templated)
:type local_filepath: str
:param remote_filepath: remote file path to get or put. (templated)
:type remote_filepath: str
:param operation: specify operation 'get' or 'put', defaults to put
:type get: bool
:param confirm: specify if the SFTP operation should be confirmed, defaults to True
:type confirm: bool
"""
template_fields = ('local_filepath', 'remote_filepath', 'remote_host')
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
local_filepath=None,
remote_filepath=None,
operation=SFTPOperation.PUT,
confirm=True,
*args,
**kwargs):
super(SFTPOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.local_filepath = local_filepath
self.remote_filepath = remote_filepath
self.operation = operation
self.confirm = confirm
if not (self.operation.lower() == SFTPOperation.GET or
self.operation.lower() == SFTPOperation.PUT):
raise TypeError("unsupported operation value {0}, expected {1} or {2}"
.format(self.operation, SFTPOperation.GET, SFTPOperation.PUT))
def execute(self, context):
file_msg = None
try:
if self.ssh_conn_id:
if self.ssh_hook and isinstance(self.ssh_hook, SSHHook):
self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
else:
self.log.info("ssh_hook is not provided or invalid. " +
"Trying ssh_conn_id to create SSHHook.")
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("Cannot operate without ssh_hook or ssh_conn_id.")
if self.remote_host is not None:
self.log.info("remote_host is provided explicitly. " +
"It will replace the remote_host which was defined " +
"in ssh_hook or predefined in connection of ssh_conn_id.")
self.ssh_hook.remote_host = self.remote_host
with self.ssh_hook.get_conn() as ssh_client:
sftp_client = ssh_client.open_sftp()
if self.operation.lower() == SFTPOperation.GET:
file_msg = "from {0} to {1}".format(self.remote_filepath,
self.local_filepath)
self.log.debug("Starting to transfer %s", file_msg)
sftp_client.get(self.remote_filepath, self.local_filepath)
else:
file_msg = "from {0} to {1}".format(self.local_filepath,
self.remote_filepath)
self.log.debug("Starting to transfer file %s", file_msg)
sftp_client.put(self.local_filepath,
self.remote_filepath,
confirm=self.confirm)
except Exception as e:
raise AirflowException("Error while transferring {0}, error: {1}"
.format(file_msg, str(e)))
return None
示例10: SSHOperator
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SSHOperator(BaseOperator):
"""
SSHOperator to execute commands on given remote host using the ssh_hook.
:param ssh_hook: predefined ssh_hook to use for remote execution
:type ssh_hook: :class:`SSHHook`
:param ssh_conn_id: connection id from airflow Connections
:type ssh_conn_id: str
:param remote_host: remote host to connect
:type remote_host: str
:param command: command to execute on remote host
:type command: str
:param timeout: timeout (in seconds) for executing the command.
:type timeout: int
:param do_xcom_push: return the stdout which also get set in xcom by airflow platform
:type do_xcom_push: bool
"""
template_fields = ('command',)
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
command=None,
timeout=10,
do_xcom_push=False,
*args,
**kwargs):
super(SSHOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.command = command
self.timeout = timeout
self.do_xcom_push = do_xcom_push
def execute(self, context):
try:
if self.ssh_conn_id and not self.ssh_hook:
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("can not operate without ssh_hook or ssh_conn_id")
if self.remote_host is not None:
self.ssh_hook.remote_host = self.remote_host
ssh_client = self.ssh_hook.get_conn()
if not self.command:
raise AirflowException("no command specified so nothing to execute here.")
# Auto apply tty when its required in case of sudo
get_pty = False
if self.command.startswith('sudo'):
get_pty = True
# set timeout taken as params
stdin, stdout, stderr = ssh_client.exec_command(command=self.command,
get_pty=get_pty,
timeout=self.timeout
)
# get channels
channel = stdout.channel
# closing stdin
stdin.close()
channel.shutdown_write()
agg_stdout = b''
agg_stderr = b''
# capture any initial output in case channel is closed already
stdout_buffer_length = len(stdout.channel.in_buffer)
if stdout_buffer_length > 0:
agg_stdout += stdout.channel.recv(stdout_buffer_length)
# read from both stdout and stderr
while not channel.closed or channel.recv_ready() or channel.recv_stderr_ready():
readq, _, _ = select([channel], [], [], self.timeout)
for c in readq:
if c.recv_ready():
line = stdout.channel.recv(len(c.in_buffer))
line = line
agg_stdout += line
self.log.info(line.decode('utf-8').strip('\n'))
if c.recv_stderr_ready():
line = stderr.channel.recv_stderr(len(c.in_stderr_buffer))
line = line
agg_stderr += line
self.log.warning(line.decode('utf-8').strip('\n'))
if stdout.channel.exit_status_ready()\
and not stderr.channel.recv_stderr_ready()\
and not stdout.channel.recv_ready():
stdout.channel.shutdown_read()
stdout.channel.close()
break
#.........这里部分代码省略.........
示例11: SSHOperator
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SSHOperator(BaseOperator):
"""
SSHOperator to execute commands on given remote host using the ssh_hook.
:param ssh_hook: predefined ssh_hook to use for remote execution.
Either `ssh_hook` or `ssh_conn_id` needs to be provided.
:type ssh_hook: airflow.contrib.hooks.ssh_hook.SSHHook
:param ssh_conn_id: connection id from airflow Connections.
`ssh_conn_id` will be ignored if `ssh_hook` is provided.
:type ssh_conn_id: str
:param remote_host: remote host to connect (templated)
Nullable. If provided, it will replace the `remote_host` which was
defined in `ssh_hook` or predefined in the connection of `ssh_conn_id`.
:type remote_host: str
:param command: command to execute on remote host. (templated)
:type command: str
:param timeout: timeout (in seconds) for executing the command.
:type timeout: int
"""
template_fields = ('command', 'remote_host')
template_ext = ('.sh',)
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
command=None,
timeout=10,
*args,
**kwargs):
super(SSHOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.command = command
self.timeout = timeout
def execute(self, context):
try:
if self.ssh_conn_id:
if self.ssh_hook and isinstance(self.ssh_hook, SSHHook):
self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
else:
self.log.info("ssh_hook is not provided or invalid. " +
"Trying ssh_conn_id to create SSHHook.")
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id,
timeout=self.timeout)
if not self.ssh_hook:
raise AirflowException("Cannot operate without ssh_hook or ssh_conn_id.")
if self.remote_host is not None:
self.log.info("remote_host is provided explicitly. " +
"It will replace the remote_host which was defined " +
"in ssh_hook or predefined in connection of ssh_conn_id.")
self.ssh_hook.remote_host = self.remote_host
if not self.command:
raise AirflowException("SSH command not specified. Aborting.")
with self.ssh_hook.get_conn() as ssh_client:
# Auto apply tty when its required in case of sudo
get_pty = False
if self.command.startswith('sudo'):
get_pty = True
self.log.info("Running command: %s", self.command)
# set timeout taken as params
stdin, stdout, stderr = ssh_client.exec_command(command=self.command,
get_pty=get_pty,
timeout=self.timeout
)
# get channels
channel = stdout.channel
# closing stdin
stdin.close()
channel.shutdown_write()
agg_stdout = b''
agg_stderr = b''
# capture any initial output in case channel is closed already
stdout_buffer_length = len(stdout.channel.in_buffer)
if stdout_buffer_length > 0:
agg_stdout += stdout.channel.recv(stdout_buffer_length)
# read from both stdout and stderr
while not channel.closed or \
channel.recv_ready() or \
channel.recv_stderr_ready():
readq, _, _ = select([channel], [], [], self.timeout)
for c in readq:
if c.recv_ready():
line = stdout.channel.recv(len(c.in_buffer))
line = line
#.........这里部分代码省略.........
示例12: SSHOperator
# 需要导入模块: from airflow.contrib.hooks.ssh_hook import SSHHook [as 别名]
# 或者: from airflow.contrib.hooks.ssh_hook.SSHHook import get_conn [as 别名]
class SSHOperator(BaseOperator):
"""
SSHOperator to execute commands on given remote host using the ssh_hook.
:param ssh_hook: predefined ssh_hook to use for remote execution
:type ssh_hook: :class:`SSHHook`
:param ssh_conn_id: connection id from airflow Connections
:type ssh_conn_id: str
:param remote_host: remote host to connect
:type remote_host: str
:param command: command to execute on remote host
:type command: str
:param timeout: timeout for executing the command.
:type timeout: int
:param do_xcom_push: return the stdout which also get set in xcom by airflow platform
:type do_xcom_push: bool
"""
template_fields = ('command',)
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
command=None,
timeout=10,
do_xcom_push=False,
*args,
**kwargs):
super(SSHOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.command = command
self.timeout = timeout
self.do_xcom_push = do_xcom_push
def execute(self, context):
try:
if self.ssh_conn_id and not self.ssh_hook:
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("can not operate without ssh_hook or ssh_conn_id")
if self.remote_host is not None:
self.ssh_hook.remote_host = self.remote_host
ssh_client = self.ssh_hook.get_conn()
if not self.command:
raise AirflowException("no command specified so nothing to execute here.")
# Auto apply tty when its required in case of sudo
get_pty = False
if self.command.startswith('sudo'):
get_pty = True
# set timeout taken as params
stdin, stdout, stderr = ssh_client.exec_command(command=self.command,
get_pty=get_pty,
timeout=self.timeout
)
exit_status = stdout.channel.recv_exit_status()
if exit_status is 0:
# only returning on output if do_xcom_push is set
# otherwise its not suppose to be disclosed
if self.do_xcom_push:
return stdout.read()
else:
error_msg = stderr.read()
raise AirflowException("error running cmd: {0}, error: {1}"
.format(self.command, error_msg))
except Exception as e:
raise AirflowException("SSH operator error: {0}".format(str(e)))
return True
def tunnel(self):
ssh_client = self.ssh_hook.get_conn()
ssh_client.get_transport()