本文整理汇总了Java中org.apache.hadoop.util.Shell.ShellCommandExecutor.getOutput方法的典型用法代码示例。如果您正苦于以下问题:Java ShellCommandExecutor.getOutput方法的具体用法?Java ShellCommandExecutor.getOutput怎么用?Java ShellCommandExecutor.getOutput使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.util.Shell.ShellCommandExecutor
的用法示例。
在下文中一共展示了ShellCommandExecutor.getOutput方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: isAvailable
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public static boolean isAvailable() {
if (Shell.WINDOWS) {
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] { Shell.WINUTILS, "help" });
try {
shellExecutor.execute();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
} finally {
String output = shellExecutor.getOutput();
if (output != null &&
output.contains("Prints to stdout a list of processes in the task")) {
return true;
}
}
}
return false;
}
示例2: isAvailable
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public static boolean isAvailable() {
if (Shell.WINDOWS) {
if (!Shell.hasWinutilsPath()) {
return false;
}
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] { Shell.getWinUtilsPath(), "help" });
try {
shellExecutor.execute();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
} finally {
String output = shellExecutor.getOutput();
if (output != null &&
output.contains("Prints to stdout a list of processes in the task")) {
return true;
}
}
}
return false;
}
示例3: mountCgroups
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public void mountCgroups(List<String> cgroupKVs, String hierarchy)
throws IOException {
List<String> command = new ArrayList<String>(
Arrays.asList(containerExecutorExe, "--mount-cgroups", hierarchy));
command.addAll(cgroupKVs);
String[] commandArray = command.toArray(new String[command.size()]);
ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray);
if (LOG.isDebugEnabled()) {
LOG.debug("mountCgroups: " + Arrays.toString(commandArray));
}
try {
shExec.execute();
} catch (IOException e) {
int ret_code = shExec.getExitCode();
LOG.warn("Exception in LinuxContainerExecutor mountCgroups ", e);
logOutput(shExec.getOutput());
throw new IOException("Problem mounting cgroups " + cgroupKVs +
"; exit code = " + ret_code + " and output: " + shExec.getOutput(), e);
}
}
示例4: createHardLink
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
* Creates a hardlink
* @param file - existing source file
* @param linkName - desired target link file
*/
public static void createHardLink(File file, File linkName)
throws IOException {
if (file == null) {
throw new IOException(
"invalid arguments to createHardLink: source file is null");
}
if (linkName == null) {
throw new IOException(
"invalid arguments to createHardLink: link name is null");
}
// construct and execute shell command
String[] hardLinkCommand = getHardLinkCommand.linkOne(file, linkName);
ShellCommandExecutor shexec = new ShellCommandExecutor(hardLinkCommand);
try {
shexec.execute();
} catch (ExitCodeException e) {
throw new IOException("Failed to execute command " +
Arrays.toString(hardLinkCommand) +
"; command output: \"" + shexec.getOutput() + "\"" +
"; WrappedException: \"" + e.getMessage() + "\"");
}
}
示例5: getSystemInfoInfoFromShell
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
String getSystemInfoInfoFromShell() {
try {
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] {Shell.getWinUtilsFile().getCanonicalPath(),
"systeminfo" });
shellExecutor.execute();
return shellExecutor.getOutput();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
}
return null;
}
示例6: getAllProcessInfoFromShell
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
String getAllProcessInfoFromShell() {
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] { Shell.WINUTILS, "task", "processList", taskProcessId });
try {
shellExecutor.execute();
return shellExecutor.getOutput();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
}
return null;
}
示例7: getSystemInfoInfoFromShell
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
String getSystemInfoInfoFromShell() {
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] { Shell.WINUTILS, "systeminfo" });
try {
shellExecutor.execute();
return shellExecutor.getOutput();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
}
return null;
}
示例8: getAllProcessInfoFromShell
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
String getAllProcessInfoFromShell() {
try {
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] {Shell.getWinUtilsFile().getCanonicalPath(),
"task", "processList", taskProcessId });
shellExecutor.execute();
return shellExecutor.getOutput();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
}
return null;
}
示例9: signalContainer
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
@Override
public boolean signalContainer(String user, String pid, Signal signal)
throws IOException {
verifyUsernamePattern(user);
String runAsUser = getRunAsUser(user);
String[] command =
new String[] { containerExecutorExe,
runAsUser,
user,
Integer.toString(Commands.SIGNAL_CONTAINER.getValue()),
pid,
Integer.toString(signal.getValue()) };
ShellCommandExecutor shExec = new ShellCommandExecutor(command);
if (LOG.isDebugEnabled()) {
LOG.debug("signalContainer: " + Arrays.toString(command));
}
try {
shExec.execute();
} catch (ExitCodeException e) {
int ret_code = shExec.getExitCode();
if (ret_code == ResultCode.INVALID_CONTAINER_PID.getValue()) {
return false;
}
LOG.warn("Error in signalling container " + pid + " with " + signal
+ "; exit = " + ret_code, e);
logOutput(shExec.getOutput());
throw new IOException("Problem signalling container " + pid + " with "
+ signal + "; output: " + shExec.getOutput() + " and exitCode: "
+ ret_code, e);
}
return true;
}
示例10: executeCommand
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
private void executeCommand(ArrayList<String> cmdArgs) throws IOException{
String[] cmd = (String[]) cmdArgs.toArray(new String[cmdArgs.size()]);
ShellCommandExecutor executor = new ShellCommandExecutor(cmd);
LOG.info(executor.toString());
executor.execute();
String output = executor.getOutput();
if (!output.isEmpty()) { //getOutput() never returns null value
if (output.toLowerCase().contains("error")) {
LOG.warn("Error is detected.");
throw new IOException("Start error\n" + output);
}
}
}
示例11: kill
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public void kill(String newConfLocation) throws IOException {
ShellCommandExecutor cme
= buildCommandExecutor(STOP_COMMAND, newConfLocation);
cme.execute();
String output = cme.getOutput();
if (!output.isEmpty()) { //getOutput() never returns null value
if (output.toLowerCase().contains("error")) {
LOG.info("Error is detected.");
throw new IOException("Kill error\n" + output);
}
}
}
示例12: commnadExecutor
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public String commnadExecutor(String[] execString, File dir, Map<String, String> env, long timeout)
throws IOException {
ShellCommandExecutor exec = new ShellCommandExecutor(execString, dir, env, timeout);
LOGGER.info("Command to be executed : " + exec.toString());
LOGGER.info("Command working Directory is : " + dir.getAbsolutePath());
exec.execute();
LOGGER.info("Status of the command executed, 0 means succesful : " + exec.getExitCode());
LOGGER.info("Output of the command executed : " + exec.getOutput());
return exec.getOutput();
}
示例13: signalContainer
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
@Override
public boolean signalContainer(String user, String pid, Signal signal)
throws IOException {
String[] command =
new String[] { containerExecutorExe,
user,
Integer.toString(Commands.SIGNAL_CONTAINER.getValue()),
pid,
Integer.toString(signal.getValue()) };
ShellCommandExecutor shExec = new ShellCommandExecutor(command);
if (LOG.isDebugEnabled()) {
LOG.debug("signalContainer: " + Arrays.toString(command));
}
try {
shExec.execute();
} catch (ExitCodeException e) {
int ret_code = shExec.getExitCode();
if (ret_code == ResultCode.INVALID_CONTAINER_PID.getValue()) {
return false;
}
LOG.warn("Error in signalling container " + pid + " with " + signal
+ "; exit = " + ret_code, e);
logOutput(shExec.getOutput());
throw new IOException("Problem signalling container " + pid + " with "
+ signal + "; output: " + shExec.getOutput() + " and exitCode: "
+ ret_code, e);
}
return true;
}
示例14: startLocalizer
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
@Override
public void startLocalizer(Path nmPrivateContainerTokensPath,
InetSocketAddress nmAddr, String user, String appId, String locId,
LocalDirsHandlerService dirsHandler)
throws IOException, InterruptedException {
List<String> localDirs = dirsHandler.getLocalDirs();
List<String> logDirs = dirsHandler.getLogDirs();
verifyUsernamePattern(user);
String runAsUser = getRunAsUser(user);
List<String> command = new ArrayList<String>();
addSchedPriorityCommand(command);
command.addAll(Arrays.asList(containerExecutorExe,
runAsUser,
user,
Integer.toString(Commands.INITIALIZE_CONTAINER.getValue()),
appId,
nmPrivateContainerTokensPath.toUri().getPath().toString(),
StringUtils.join(",", localDirs),
StringUtils.join(",", logDirs)));
File jvm = // use same jvm as parent
new File(new File(System.getProperty("java.home"), "bin"), "java");
command.add(jvm.toString());
command.add("-classpath");
command.add(System.getProperty("java.class.path"));
String javaLibPath = System.getProperty("java.library.path");
if (javaLibPath != null) {
command.add("-Djava.library.path=" + javaLibPath);
}
buildMainArgs(command, user, appId, locId, nmAddr, localDirs);
String[] commandArray = command.toArray(new String[command.size()]);
ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray);
if (LOG.isDebugEnabled()) {
LOG.debug("initApplication: " + Arrays.toString(commandArray));
}
try {
shExec.execute();
if (LOG.isDebugEnabled()) {
logOutput(shExec.getOutput());
}
} catch (ExitCodeException e) {
int exitCode = shExec.getExitCode();
LOG.warn("Exit code from container " + locId + " startLocalizer is : "
+ exitCode, e);
logOutput(shExec.getOutput());
throw new IOException("Application " + appId + " initialization failed" +
" (exitCode=" + exitCode + ") with output: " + shExec.getOutput(), e);
}
}
示例15: createHardLinkMult
import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
protected static int createHardLinkMult(File parentDir,
String[] fileBaseNames, File linkDir, int maxLength)
throws IOException {
if (parentDir == null) {
throw new IOException(
"invalid arguments to createHardLinkMult: parent directory is null");
}
if (linkDir == null) {
throw new IOException(
"invalid arguments to createHardLinkMult: link directory is null");
}
if (fileBaseNames == null) {
throw new IOException(
"invalid arguments to createHardLinkMult: "
+ "filename list can be empty but not null");
}
if (fileBaseNames.length == 0) {
//the OS cmds can't handle empty list of filenames,
//but it's legal, so just return.
return 0;
}
if (!linkDir.exists()) {
throw new FileNotFoundException(linkDir + " not found.");
}
//if the list is too long, split into multiple invocations
int callCount = 0;
if (getLinkMultArgLength(parentDir, fileBaseNames, linkDir) > maxLength
&& fileBaseNames.length > 1) {
String[] list1 = Arrays.copyOf(fileBaseNames, fileBaseNames.length/2);
callCount += createHardLinkMult(parentDir, list1, linkDir, maxLength);
String[] list2 = Arrays.copyOfRange(fileBaseNames, fileBaseNames.length/2,
fileBaseNames.length);
callCount += createHardLinkMult(parentDir, list2, linkDir, maxLength);
return callCount;
} else {
callCount = 1;
}
// construct and execute shell command
String[] hardLinkCommand = getHardLinkCommand.linkMult(fileBaseNames,
linkDir);
ShellCommandExecutor shexec = new ShellCommandExecutor(hardLinkCommand,
parentDir, null, 0L);
try {
shexec.execute();
} catch (ExitCodeException e) {
throw new IOException(shexec.getOutput() + e.getMessage());
}
return callCount;
}