本文整理汇总了Python中execo.process.SshProcess类的典型用法代码示例。如果您正苦于以下问题:Python SshProcess类的具体用法?Python SshProcess怎么用?Python SshProcess使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SshProcess类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: start_spark
def start_spark(self):
"""Start spark processes.
In STANDALONE mode it starts the master and slaves. In YARN mode it just
checks that Hadoop is running, and starts it if not.
"""
logger.info("Starting Spark")
if self.running:
logger.warn("Spark was already started")
return
if self.mode == STANDALONE_MODE:
proc = SshProcess(self.sbin_dir + "/start-master.sh;" +
self.sbin_dir + "/start-slaves.sh;",
self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while starting Spark")
return
elif self.mode == YARN_MODE:
if not self.hc.running:
logger.warn("YARN services must be started first")
self.hc.start_and_wait()
self.running = True
示例2: _copy_xp_output
def _copy_xp_output(self):
"""Copy experiment's output."""
if self.output_path:
remote_path = self.macro_manager.test_macros["xp.output"] # TODO: what happens if not specified?
local_path = os.path.join(self.output_path, str(self.comb_id))
logger.info("Copying output to " + local_path)
tmp_dir = "/tmp"
# Remove file in tmp dir if exists
proc = SshProcess("rm -rf " +
os.path.join(tmp_dir, os.path.basename(remote_path)),
self.hc.master)
proc.run()
# Get files in master
self.hc.execute("fs -get " + remote_path + " " + tmp_dir,
verbose=False)
# Copy files from master
action = Get([self.hc.master],
[os.path.join(tmp_dir, os.path.basename(remote_path))],
local_path)
action.run()
示例3: bootstrap
def bootstrap(self, tar_file):
# 0. Check that required packages are present
required_packages = "openjdk-7-jre openjdk-7-jdk"
check_packages = TaktukRemote("dpkg -s " + required_packages,
self.hosts)
for p in check_packages.processes:
p.nolog_exit_code = p.nolog_error = True
check_packages.run()
if not check_packages.ok:
logger.info("Packages not installed, trying to install")
install_packages = TaktukRemote(
"export DEBIAN_MASTER=noninteractive ; " +
"apt-get update && apt-get install -y --force-yes " +
required_packages, self.hosts).run()
if not install_packages.ok:
logger.error("Unable to install the packages")
get_java_home = SshProcess('echo $(readlink -f /usr/bin/javac | '
'sed "s:/bin/javac::")', self.master)
get_java_home.run()
self.java_home = get_java_home.stdout.strip()
logger.info("All required packages are present")
# 1. Copy hadoop tar file and uncompress
logger.info("Copy " + tar_file + " to hosts and uncompress")
rm_dirs = TaktukRemote("rm -rf " + self.base_dir +
" " + self.conf_dir,
self.hosts)
put_tar = TaktukPut(self.hosts, [tar_file], "/tmp")
tar_xf = TaktukRemote(
"tar xf /tmp/" + os.path.basename(tar_file) + " -C /tmp",
self.hosts)
SequentialActions([rm_dirs, put_tar, tar_xf]).run()
# 2. Move installation to base dir
logger.info("Create installation directories")
mv_base_dir = TaktukRemote(
"mv /tmp/" + os.path.basename(tar_file).replace(".tgz", "") + " " +
self.base_dir,
self.hosts)
mkdirs = TaktukRemote("mkdir -p " + self.conf_dir, self.hosts)
chmods = TaktukRemote("chmod g+w " + self.base_dir +
" && chmod g+w " + self.conf_dir,
self.hosts)
SequentialActions([mv_base_dir, mkdirs, chmods]).run()
# 3. Specify environment variables
command = "cat >> " + self.conf_dir + "/spark-env.sh << EOF\n"
command += "JAVA_HOME=" + self.java_home + "\n"
command += "SPARK_LOG_DIR=" + self.logs_dir + "\n"
if self.hc:
command += "HADOOP_CONF_DIR=" + self.hc.conf_dir + "\n"
if self.mode == YARN_MODE:
command += "YARN_CONF_DIR=" + self.hc.conf_dir + "\n"
command += "EOF\n"
command += "chmod +x " + self.conf_dir + "/spark-env.sh"
action = Remote(command, self.hosts)
action.run()
示例4: execute_job
def execute_job(self, job, node=None, verbose=True):
"""Execute the given Spark job in the specified node.
Args:
job (SparkJob):
The job object.
node (Host, optional):
The host were the command should be executed. If not provided,
self.master is chosen.
verbose (bool, optional):
If True stdout and stderr of remote process is displayed.
Returns (tuple of str):
A tuple with the standard and error outputs of the process executing
the job.
"""
if not self.running:
logger.warn("The cluster was stopped. Starting it automatically")
self.start()
if node is None:
node = self.master
exec_dir = "/tmp"
# Copy necessary files to cluster
files_to_copy = job.get_files_to_copy()
action = Put([node], files_to_copy, exec_dir)
action.run()
# Get command
command = job.get_command(exec_dir)
# Execute
logger.info("Executing spark job. Command = {" + self.bin_dir +
"/spark-submit " + command + "} in " + str(node))
proc = SshProcess(self.bin_dir + "/spark-submit " + command, node)
if verbose:
red_color = '\033[01;31m'
proc.stdout_handlers.append(sys.stdout)
proc.stderr_handlers.append(
ColorDecorator(sys.stderr, red_color))
proc.start()
proc.wait()
# Get job info
job.stdout = proc.stdout
job.stderr = proc.stderr
job.success = (proc.exit_code == 0)
return proc.stdout, proc.stderr
示例5: get_version
def get_version(self):
"""Return the Hadoop version.
Returns (str):
The version used by the Hadoop cluster.
"""
proc = SshProcess("export JAVA_HOME=" + self.java_home + ";" +
self.bin_dir + "/hadoop version",
self.master)
proc.run()
version = proc.stdout.splitlines()[0]
return version
示例6: format_dfs
def format_dfs(self):
"""Format the distributed filesystem."""
logger.info("Formatting HDFS")
proc = SshProcess(self.bin_dir + "/hadoop namenode -format",
self.master)
proc.run()
if proc.finished_ok:
logger.info("HDFS formatted successfully")
else:
logger.warn("Error while formatting HDFS")
示例7: stop_dfs
def stop_dfs(self):
"""Stop the NameNode and DataNodes."""
self._check_initialization()
logger.info("Stopping HDFS")
proc = SshProcess(self.sbin_dir + "/stop-dfs.sh", self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while stopping HDFS")
else:
self.running_dfs = False
示例8: stop_map_reduce
def stop_map_reduce(self):
"""Stop the JobTracker and TaskTrackers."""
self._check_initialization()
logger.info("Stopping MapReduce")
proc = SshProcess(self.sbin_dir + "/stop-mapred.sh", self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while stopping MapReduce")
else:
self.running_map_reduce = False
示例9: stop_yarn
def stop_yarn(self):
"""Stop the YARN ResourceManager and NodeManagers."""
self._check_initialization()
logger.info("Stopping YARN")
proc = SshProcess(self.sbin_dir + "/stop-yarn.sh", self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while stopping YARN")
else:
self.running_yarn = False
示例10: stop_spark
def stop_spark(self):
"""Stop Spark processes."""
logger.info("Stopping Spark")
if self.mode == STANDALONE_MODE:
proc = SshProcess(self.sbin_dir + "/stop-slaves.sh;" +
self.sbin_dir + "/stop-master.sh;",
self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while stopping Spark")
return
self.running = False
示例11: start_dfs_and_wait
def start_dfs_and_wait(self):
"""Start the NameNode and DataNodes and wait for exiting safemode."""
self._check_initialization()
self.start_dfs()
logger.info("Waiting for safe mode to be off")
proc = SshProcess(self.bin_dir + "/hadoop dfsadmin -safemode wait",
self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while starting HDFS")
else:
self.running_dfs = True
示例12: execute
def execute(self, command, node=None, should_be_running=True,
verbose=True):
"""Execute the given Hadoop command in the given node.
Args:
command (str):
The command to be executed.
node (Host, optional):
The host were the command should be executed. If not provided,
self.master is chosen.
should_be_running (bool, optional):
True if the cluster needs to be running in order to execute the
command. If so, and it is not running, it is automatically started.
verbose: (bool, optional):
If True stdout and stderr of remote process is displayed.
Returns (tuple of str):
A tuple with the standard and error outputs of the process executing
the command.
"""
self._check_initialization()
if should_be_running and not self.running:
logger.warn("The cluster was stopped. Starting it automatically")
self.start()
if not node:
node = self.master
if verbose:
logger.info("Executing {" + self.bin_dir + "/hadoop " +
command + "} in " + str(node))
proc = SshProcess(self.bin_dir + "/hadoop " + command, node)
if verbose:
red_color = '\033[01;31m'
proc.stdout_handlers.append(sys.stdout)
proc.stderr_handlers.append(
ColorDecorator(sys.stderr, red_color))
proc.start()
proc.wait()
return (proc.stdout, proc.stderr)
示例13: start_yarn
def start_yarn(self):
"""Start the YARN ResourceManager and NodeManagers."""
logger.info("Starting YARN")
self._check_initialization()
proc = SshProcess(self.sbin_dir + "/start-yarn.sh", self.master)
proc.run()
if not proc.finished_ok:
logger.warn("Error while starting YARN")
else:
#TODO: get success or not from super.
self.running_yarn = True
if self.running_dfs:
self.running = True
示例14: start_map_reduce
def start_map_reduce(self):
"""Start the JobTracker and TaskTrackers."""
self._check_initialization()
logger.info("Starting MapReduce")
if self.running_map_reduce:
logger.warn("Error while starting MapReduce")
return
proc = SshProcess(self.sbin_dir + "/start-mapred.sh", self.master)
proc.run()
if not proc.finished_ok:
logger.info("MapReduce started successfully")
else:
self.running_map_reduce = True
示例15: execute
def execute(self):
"""Execute a single test.
Return:
str: Local path of the file containing the process output.
"""
test = SshProcess("java -jar " + self.jar_path +
" -p " + self.props_path,
self.host)
# Output is stored in a local temporary file
(_, temp_file) = tempfile.mkstemp("", "div_p2p-out-", "/tmp")
test.stdout_handlers.append(temp_file)
test.run()
return temp_file