本文整理汇总了Python中fabric.api.put方法的典型用法代码示例。如果您正苦于以下问题:Python api.put方法的具体用法?Python api.put怎么用?Python api.put使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类fabric.api
的用法示例。
在下文中一共展示了api.put方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: stage
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def stage(self, config_dir):
"""Stage a zip (built by `python butler.py package`)."""
os.environ['CONFIG_DIR_OVERRIDE'] = config_dir
self.restart()
time.sleep(1)
zip_path = package.package(
revision=butler_common.compute_staging_revision(),
platform_name='windows')
remote_zip_path = (
'{clusterfuzz_parent_path}\\{staging_source_filename}'.format(
clusterfuzz_parent_path=self.clusterfuzz_parent_path,
staging_source_filename=self.staging_source_filename))
api.put(zip_path, remote_zip_path)
api.put(EXTRACT_ZIP_PS_LOCAL_PATH, EXTRACT_ZIP_PS_REMOTE_PATH)
self._powershell(EXTRACT_ZIP_PS_REMOTE_PATH, 'file')
self.restart()
示例2: bootstrap
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def bootstrap(config, replace_existing_dse_install=True):
if replace_existing_dse_install:
filename = os.path.join(dse_cache_local, dse_tarball)
dest = os.path.join(dse_builds_remote, dse_tarball)
# remove build folder if it exists
fab.run('rm -rf {}'.format(os.path.join(dse_builds_remote, dse_tarball.replace('-bin.tar.gz', ''))))
# Upload the binaries
fab.run('mkdir -p {dse_builds}'.format(dse_builds=dse_builds_remote))
fab.put(filename, dest)
# Extract the binaries
fab.run('tar -C {dse_builds} -xf {dest}'.format(dse_builds=dse_builds_remote, dest=dest))
# Symlink current build to ~/fab/dse
fab.run('ln -sfn {dse_build} {dse_home}'.format(
dse_build=os.path.join(dse_builds_remote, dse_tarball.replace('-bin.tar.gz', '')),
dse_home=get_dse_path()))
return config['revision']
示例3: _configure_spark_env
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def _configure_spark_env(config):
# Place spark environment file on hosts:
spark_env = config.get('spark_env', '')
if isinstance(spark_env, list) or isinstance(spark_env, tuple):
spark_env = "\n".join(spark_env)
spark_env += "\n"
spark_env_script = "spark-{name}.sh".format(name=uuid.uuid1())
spark_env_file = StringIO(spark_env)
fab.run('mkdir -p ~/fab/scripts')
fab.put(spark_env_file, '~/fab/scripts/{spark_env_script}'.format(spark_env_script=spark_env_script))
fab.puts('spark-env is: {}'.format(spark_env))
if len(spark_env_script) > 0:
spark_env_path = os.path.join(get_dse_path(), 'resources', 'spark', 'conf', 'spark-env.sh')
fab.run('cat ~/fab/scripts/{spark_env_script} >> {spark_env_path}'.format(spark_env_script=spark_env_script,
spark_env_path=spark_env_path))
示例4: setup_supervisor
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def setup_supervisor():
"""Setup supervisor daemon for running OCL processes.
One of the key function is to put the API tokens required in the
environment for Web server.
"""
# first time this will fail because we have a chicken and egg
# situation, we need the API server to get the tokens, but
# we need supervisor to run the API server
auth_token, anon_token = get_api_tokens()
if auth_token is not None and anon_token is not None:
env.OCL_API_TOKEN = auth_token
env.OCL_ANON_API_TOKEN = anon_token
files.upload_template(_conf_path('ocl_supervisor.conf'),
'/etc/supervisor/conf.d', env, use_sudo=True)
put(_conf_path('supervisord.conf'), '/etc/supervisor', use_sudo=True)
# restart to run as deploy
sudo('/etc/init.d/supervisor restart')
示例5: setup_environment
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def setup_environment():
"""Create OCL directories and files.
"""
for directory in ['/opt/virtualenvs', '/opt/deploy']:
if not files.exists(directory):
print yellow('Creating directory %s...' % directory)
sudo('mkdir %s' % directory)
sudo('chown deploy:deploy %s' % directory)
# all logs go to /var/log/ocl subdirectories.
if not files.exists('/var/log/ocl'):
sudo('mkdir /var/log/ocl')
sudo('chown deploy:deploy /var/log/ocl')
# This backup dir is used by the current API server deployment
# process.
if not files.exists(BACKUP_DIR):
sudo('mkdir -p %s' % BACKUP_DIR)
sudo('chown deploy:deploy %s' % BACKUP_DIR)
# A few shell aliases that PK likes...
put(_conf_path('ocl_aliases'), '~/.bash_aliases')
put(_conf_path('tmux.conf'), '~/.tmux.conf')
示例6: put_secure
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def put_secure(user_group, mode, *args, **kwargs):
missing_owner_code = 42
user, group = user_group.split(":")
files = put(*args, mode=mode, **kwargs)
for file in files:
with settings(warn_only=True):
command = \
"( getent passwd {user} >/dev/null || ( rm -f {file} ; " \
"exit {missing_owner_code} ) ) && " \
"chown {user_group} {file}".format(
user=user, file=file, user_group=user_group,
missing_owner_code=missing_owner_code)
result = sudo(command)
if result.return_code == missing_owner_code:
abort("User %s does not exist. Make sure the Presto "
"server RPM is installed and try again" % (user,))
elif result.failed:
abort("Failed to chown file %s" % (file,))
示例7: build
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def build(self, tag, add_latest_tag=False, add_tags=None, raise_on_error=True, **kwargs):
try:
context = kwargs.pop('fileobj')
except KeyError:
raise ValueError("'fileobj' needs to be provided. Using 'path' is currently not implemented.")
for a in ['custom_context', 'encoding']:
kwargs.pop(a, None)
with temp_dir() as remote_tmp:
remote_fn = posixpath.join(remote_tmp, 'context')
put(context, remote_fn)
cmd_str = self._out.get_cmd('build', '- <', remote_fn, tag=tag, **kwargs)
with settings(warn_only=not raise_on_error):
res = self._call(cmd_str)
if res:
image_id = _find_image_id(res)
if image_id:
self.add_extra_tags(image_id, tag, add_tags, add_latest_tag)
return image_id
return None
示例8: update_config
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def update_config(self, content, path):
old_file = six.BytesIO()
if files.exists(path, use_sudo=self.sudo):
fab.get(remote_path=path, local_path=old_file, use_sudo=self.sudo)
old_content = old_file.getvalue()
need_update = content != old_content
if need_update:
fabricio.move_file(
path_from=path,
path_to=path + '.backup',
sudo=self.sudo,
ignore_errors=True,
)
fab.put(six.BytesIO(content), path, use_sudo=self.sudo, mode='0644')
fabricio.log('{path} updated'.format(path=path))
else:
fabricio.log('{path} not changed'.format(path=path))
return need_update
示例9: create_personal_git_repo
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def create_personal_git_repo(name, private=False):
"""Creates a new personal git repository under the public_git repository"""
# Command-line arguments are passed in as strings.
if isinstance(private, str):
private = (private.lower() == 'true')
# Create a repository locally, upload it and delete the local repository.
# Do not create a repository directly on the remote machine because its
# version of git may be old.
repo_name = '{0}.git'.format(name)
local(
'git init --bare --shared={0} {1}'
.format('none' if private else 'all', repo_name)
)
run('install -d -m 755 ~/public_git/')
put(repo_name, '~/public_git/')
local('rm -rf {0}'.format(repo_name))
示例10: setenforce
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def setenforce(mode):
"""Modify the mode SELinux is running in
:param mode: Use 'enforcing' or 1 or True to put SELinux in enforcing mode.
Use 'permissive' or 0 or False to put SELinux in permissive mode.
"""
if isinstance(mode, str):
mode = mode.lower()
enforcing_modes = ('enforcing', 1, True)
permissive_modes = ('permissive', 0, False)
if mode in enforcing_modes:
mode = 1
elif mode in permissive_modes:
mode = 0
else:
raise ValueError('Mode should be one of {0}'.format(
enforcing_modes + permissive_modes
))
run('setenforce {0}'.format(mode))
示例11: _copy_staging_archive_from_local_to_remote
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def _copy_staging_archive_from_local_to_remote(self, local_zip_path):
"""Copy staging archive from local to remote."""
remote_zip_path = (
'{clusterfuzz_parent_path}/{staging_source_filename}'.format(
clusterfuzz_parent_path=self.clusterfuzz_parent_path,
staging_source_filename=self.staging_source_filename))
self._run('rm -f ' + remote_zip_path)
api.sudo('chmod a+w ' + self.clusterfuzz_parent_path_outside)
api.put(local_zip_path, self._path_outside_docker(remote_zip_path))
示例12: _copy_staging_archive_from_local_to_remote
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def _copy_staging_archive_from_local_to_remote(self, local_zip_path):
"""Copy staging archive from local to remote."""
remote_zip_path = (
'{clusterfuzz_parent_path}/{staging_source_filename}'.format(
clusterfuzz_parent_path=self.clusterfuzz_parent_path,
staging_source_filename=self.staging_source_filename))
self._run('rm -f ' + remote_zip_path)
api.put(local_zip_path, remote_zip_path)
示例13: push_directory
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def push_directory(self):
with tempfile.TemporaryDirectory() as tmpdir:
archive = self.archive(rootdir=".", output_dir=tmpdir)
remote.put(archive, "/tmp/rorolite-project.tgz")
with lcd(tmpdir):
self.generate_supervisor_config(rootdir=tmpdir)
supervisor_archive = self.archive(tmpdir, base_dir=".rorolite", filename="rorolite-supervisor")
remote.put(supervisor_archive, "/tmp/rorolite-supervisor.tgz")
with remote.cd(self.deploy_root):
remote.sudo("chown {} .".format(env.user))
remote.run("tar xzf /tmp/rorolite-project.tgz")
remote.run("tar xzf /tmp/rorolite-supervisor.tgz")
示例14: destroy
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def destroy(leave_data=False, kill_delay=0):
"""Uninstall Cassandra and clean up data and logs"""
# We used to have a better pattern match for the Cassandra
# process, but it got fragile if you put too many JVM params.
if leave_data:
fab.run('JAVA_HOME={java_home} {nodetool_cmd} drain'.format(java_home=config['java_home'], nodetool_cmd=_nodetool_cmd()), quiet=True)
fab.run('rm -rf {commitlog}/*'.format(commitlog=config['commitlog_directory']))
_clean_up_cdc_directories()
if kill_delay:
fab.run('killall java', quiet=True)
time.sleep(kill_delay) # kill delay waiting the jvm to exit, profiling stuff require some time to be dumped
fab.run('killall -9 java', quiet=True)
fab.run('pkill -f "python.*fincore_capture"', quiet=True)
fab.run('rm -rf fab/cassandra')
fab.run('rm -rf fab/dse')
fab.run('rm -rf fab/scripts')
fab.run('rm -f {startup_log}'.format(startup_log=CASSANDRA_STARTUP_LOG))
# Ensure directory configurations look sane
assert type(config['data_file_directories']) == list
for t in [config['saved_caches_directory'], config['commitlog_directory'],
config['flush_directory'], config['log_dir']] + config['data_file_directories']:
assert type(t) in (str, unicode) and len(t) > 1, '{t} doesn\'t look like a directory'.format(t=t)
if not leave_data:
for d in config['data_file_directories']:
fab.run('rm -rf {data}/*'.format(data=d))
fab.run('rm -rf {saved_caches_directory}/*'.format(saved_caches_directory=config['saved_caches_directory']))
fab.run('rm -rf {commitlog}/*'.format(commitlog=config['commitlog_directory']))
fab.run('rm -rf {flushdir}/*'.format(flushdir=config['flush_directory']))
if config.get('hints_directory'):
fab.run('rm -rf {hints_directory}/*'.format(hints_directory=config.get('hints_directory')))
_clean_up_cdc_directories()
fab.run('rm -rf {log_dir}/*'.format(log_dir=config['log_dir']))
fab.run('rm -f /tmp/fincore.stats.log')
示例15: copy_logs
# 需要导入模块: from fabric import api [as 别名]
# 或者: from fabric.api import put [as 别名]
def copy_logs(local_directory):
# put the whole code in a with block and do not fail immediately in case a logging dir cannot be found
# E.g. if the C* JVM cannot start because of invalid settings, the C* system.log dir won't be available
# and this piece of code will fail. However, in this case we want to be able to at least copy
# the startup log (nohup.out) to see what went wrong
with fab.settings(warn_only=True):
cfg = config['hosts'][fab.env.host]
host_log_dir = os.path.join(local_directory, cfg['hostname'])
if not os.path.exists(host_log_dir):
os.makedirs(host_log_dir)
# copy the node's startup log
fab.get(CASSANDRA_STARTUP_LOG, host_log_dir)
# copy the node's system.log
fab.get(os.path.join(config['log_dir'], '*'), host_log_dir)