当前位置: 首页>>代码示例>>Java>>正文


Java ShellCommandExecutor.execute方法代码示例

本文整理汇总了Java中org.apache.hadoop.util.Shell.ShellCommandExecutor.execute方法的典型用法代码示例。如果您正苦于以下问题:Java ShellCommandExecutor.execute方法的具体用法?Java ShellCommandExecutor.execute怎么用?Java ShellCommandExecutor.execute使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.util.Shell.ShellCommandExecutor的用法示例。


在下文中一共展示了ShellCommandExecutor.execute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: chmod

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Change the permissions on a file / directory, recursively, if
 * needed.
 * @param filename name of the file whose permissions are to change
 * @param perm permission string
 * @param recursive true, if permissions should be changed recursively
 * @return the exit code from the command.
 * @throws IOException
 */
public static int chmod(String filename, String perm, boolean recursive)
                          throws IOException {
  String [] cmd = Shell.getSetPermissionCommand(perm, recursive);
  String[] args = new String[cmd.length + 1];
  System.arraycopy(cmd, 0, args, 0, cmd.length);
  args[cmd.length] = new File(filename).getPath();
  ShellCommandExecutor shExec = new ShellCommandExecutor(args);
  try {
    shExec.execute();
  }catch(IOException e) {
    if(LOG.isDebugEnabled()) {
      LOG.debug("Error while changing permission : " + filename
                +" Exception: " + StringUtils.stringifyException(e));
    }
  }
  return shExec.getExitCode();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:27,代码来源:FileUtil.java

示例2: isAvailable

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public static boolean isAvailable() {
  if (Shell.WINDOWS) {
    ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
        new String[] { Shell.WINUTILS, "help" });
    try {
      shellExecutor.execute();
    } catch (IOException e) {
      LOG.error(StringUtils.stringifyException(e));
    } finally {
      String output = shellExecutor.getOutput();
      if (output != null &&
          output.contains("Prints to stdout a list of processes in the task")) {
        return true;
      }
    }
  }
  return false;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:WindowsBasedProcessTree.java

示例3: run

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public void run() {
  try {
    Vector<String> args = new Vector<String>();
    if (isSetsidAvailable()) {
      args.add("setsid");
    }
    args.add("bash");
    args.add("-c");
    args.add(" echo $$ > " + pidFile + "; sh " + shellScript + " " + N
        + ";");
    shexec = new ShellCommandExecutor(args.toArray(new String[0]));
    shexec.execute();
  } catch (ExitCodeException ee) {
    LOG.info("Shell Command exit with a non-zero exit code. This is"
        + " expected as we are killing the subprocesses of the"
        + " task intentionally. " + ee);
  } catch (IOException ioe) {
    LOG.info("Error executing shell command " + ioe);
  } finally {
    LOG.info("Exit code: " + shexec.getExitCode());
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:23,代码来源:TestProcfsBasedProcessTree.java

示例4: hasPerlSupport

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Is perl supported on this machine ?
 * @return true if perl is available and is working as expected
 */
public static boolean hasPerlSupport() {
  boolean hasPerl = false;
  ShellCommandExecutor shexec = new ShellCommandExecutor(
    new String[] { "perl", "-e", "print 42" });
  try {
    shexec.execute();
    if (shexec.getOutput().equals("42")) {
      hasPerl = true;
    }
    else {
      LOG.warn("Perl is installed, but isn't behaving as expected.");
    }
  } catch (Exception e) {
    LOG.warn("Could not run perl: " + e);
  }
  return hasPerl;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:22,代码来源:UtilTest.java

示例5: chmod

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Change the permissions on a file / directory, recursively, if
 * needed.
 * @param filename name of the file whose permissions are to change
 * @param perm permission string
 * @param recursive true, if permissions should be changed recursively
 * @return the exit code from the command.
 * @throws IOException
 */
public static int chmod(String filename, String perm, boolean recursive)
                          throws IOException {
  String [] cmd = Shell.getSetPermissionCommand(perm, recursive);
  String[] args = new String[cmd.length + 1];
  System.arraycopy(cmd, 0, args, 0, cmd.length);
  args[cmd.length] = new File(filename).getPath();
  ShellCommandExecutor shExec = new ShellCommandExecutor(args);
  try {
    shExec.execute();
  }catch(IOException e) {
    if(LOG.isDebugEnabled()) {
      LOG.debug("Error while changing permission : " + filename 
                +" Exception: " + StringUtils.stringifyException(e));
    }
  }
  return shExec.getExitCode();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:FileUtil.java

示例6: isAvailable

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public static boolean isAvailable() {
  if (Shell.WINDOWS) {
    if (!Shell.hasWinutilsPath()) {
      return false;
    }
    ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
        new String[] { Shell.getWinUtilsPath(), "help" });
    try {
      shellExecutor.execute();
    } catch (IOException e) {
      LOG.error(StringUtils.stringifyException(e));
    } finally {
      String output = shellExecutor.getOutput();
      if (output != null &&
          output.contains("Prints to stdout a list of processes in the task")) {
        return true;
      }
    }
  }
  return false;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:22,代码来源:WindowsBasedProcessTree.java

示例7: mountCgroups

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public void mountCgroups(List<String> cgroupKVs, String hierarchy)
       throws IOException {
  List<String> command = new ArrayList<String>(
          Arrays.asList(containerExecutorExe, "--mount-cgroups", hierarchy));
  command.addAll(cgroupKVs);
  
  String[] commandArray = command.toArray(new String[command.size()]);
  ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray);

  if (LOG.isDebugEnabled()) {
      LOG.debug("mountCgroups: " + Arrays.toString(commandArray));
  }

  try {
      shExec.execute();
  } catch (IOException e) {
      int ret_code = shExec.getExitCode();
      LOG.warn("Exception in LinuxContainerExecutor mountCgroups ", e);
      logOutput(shExec.getOutput());
      throw new IOException("Problem mounting cgroups " + cgroupKVs + 
        "; exit code = " + ret_code + " and output: " + shExec.getOutput(), e);
  }
}
 
开发者ID:yncxcw,项目名称:big-c,代码行数:24,代码来源:LinuxContainerExecutor.java

示例8: init

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
@Override 
public void init() throws IOException {        
  // Send command to executor which will just start up, 
  // verify configuration/permissions and exit
  List<String> command = new ArrayList<String>(
      Arrays.asList(containerExecutorExe,
          "--checksetup"));
  String[] commandArray = command.toArray(new String[command.size()]);
  ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray);
  if (LOG.isDebugEnabled()) {
    LOG.debug("checkLinuxExecutorSetup: " + Arrays.toString(commandArray));
  }
  try {
    shExec.execute();
  } catch (ExitCodeException e) {
    int exitCode = shExec.getExitCode();
    LOG.warn("Exit code from container executor initialization is : "
        + exitCode, e);
    logOutput(shExec.getOutput());
    throw new IOException("Linux container executor not configured properly"
        + " (error=" + exitCode + ")", e);
  }
 
  resourcesHandler.init(this);
}
 
开发者ID:yncxcw,项目名称:big-c,代码行数:26,代码来源:LinuxContainerExecutor.java

示例9: addTrackers

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public List<TaskTrackerLoadInfo> addTrackers(List<TaskTrackerLoadInfo> trackers) {
  List<TaskTrackerLoadInfo> trackersAdded = new ArrayList<TaskTrackerLoadInfo>();
  for (TaskTrackerLoadInfo tracker : trackers) {
    String host = tracker.getTaskTrackerHost();
    ShellCommandExecutor addHostCommand = 
        new ShellCommandExecutor(
        new String[]{"ssh", hostName,
        "cd " + hadoopHome + " && " + "bin/hadoop " +
        TTMover.class.getCanonicalName() +
        " -add " + host});
    try {
      addHostCommand.execute();
      trackersAdded.add(tracker);
    } catch (IOException ex) {
      DynamicCloudsDaemon.LOG.error("Error adding tracker " + tracker.getTaskTrackerName(), ex);
    }
  }
  return trackersAdded;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:20,代码来源:Cluster.java

示例10: run

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
public void run() {
  try {
    Vector<String> args = new Vector<String>();
    if(ProcessTree.isSetsidAvailable) {
      args.add("setsid");
    }
    args.add("bash");
    args.add("-c");
    args.add(" echo $$ > " + pidFile + "; sh " +
                      shellScript + " " + N + ";") ;
    shexec = new ShellCommandExecutor(args.toArray(new String[0]));
    shexec.execute();
  } catch (ExitCodeException ee) {
    LOG.info("Shell Command exit with a non-zero exit code. This is" +
             " expected as we are killing the subprocesses of the" +
             " task intentionally. " + ee);
  } catch (IOException ioe) {
    LOG.info("Error executing shell command " + ioe);
  } finally {
    LOG.info("Exit code: " + shexec.getExitCode());
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:23,代码来源:TestProcfsBasedProcessTree.java

示例11: killProcess

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Sends signal to process, forcefully terminating the process.
 * 
 * @param pid process id
 * @param signal the signal number to send
 */
public static void killProcess(String pid, Signal signal) {

  //If process tree is not alive then return immediately.
  if(!ProcessTree.isAlive(pid)) {
    return;
  }
  String[] args = { "kill", "-" + signal.getValue(), pid };
  ShellCommandExecutor shexec = new ShellCommandExecutor(args);
  try {
    shexec.execute();
  } catch (IOException e) {
    LOG.warn("Error sending signal " + signal + " to process "+ pid + " ."+ 
        StringUtils.stringifyException(e));
  } finally {
    LOG.info("Killing process " + pid + " with signal " + signal + 
             ". Exit code " + shexec.getExitCode());
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:25,代码来源:ProcessTree.java

示例12: killProcessGroup

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Sends kill signal to all process belonging to same process group,
 * forcefully terminating the process group.
 * 
 * @param pgrpId process group id
 */
public static void killProcessGroup(String pgrpId) {

  //If process tree is not alive then return immediately.
  if(!ProcessTree.isProcessGroupAlive(pgrpId)) {
    return;
  }

  String[] args = { "kill", "-9", "-"+pgrpId };
  ShellCommandExecutor shexec = new ShellCommandExecutor(args);
  try {
    shexec.execute();
  } catch (IOException e) {
    LOG.warn("Error sending SIGKILL to process group "+ pgrpId + " ."+ 
        StringUtils.stringifyException(e));
  } finally {
    LOG.info("Killing process group" + pgrpId + " with SIGKILL. Exit code "
        + shexec.getExitCode());
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:26,代码来源:ProcessTree.java

示例13: runCommand

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Helper method that runs a LinuxTaskController command
 * 
 * @param taskCommand
 * @param user
 * @param cmdArgs
 * @param env
 * @throws IOException
 */
private void runCommand(TaskCommands taskCommand, String user,
    List<String> cmdArgs, File workDir, Map<String, String> env)
    throws IOException {

  ShellCommandExecutor shExec =
      buildTaskControllerExecutor(taskCommand, user, cmdArgs, workDir, env);
  try {
    shExec.execute();
  } catch (Exception e) {
    LOG.warn("Exit code from " + taskCommand.toString() + " is : "
        + shExec.getExitCode());
    LOG.warn("Exception thrown by " + taskCommand.toString() + " : "
        + StringUtils.stringifyException(e));
    LOG.info("Output from LinuxTaskController's " + taskCommand.toString()
        + " follows:");
    logOutput(shExec.getOutput());
    throw new IOException(e);
  }
  if (LOG.isDebugEnabled()) {
    LOG.info("Output from LinuxTaskController's " + taskCommand.toString()
        + " follows:");
    logOutput(shExec.getOutput());
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:34,代码来源:LinuxTaskController.java

示例14: chmod

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Change the permissions on a file / directory, recursively, if
 * needed.
 * @param filename name of the file whose permissions are to change
 * @param perm permission string
 * @param recursive true, if permissions should be changed recursively
 * @return the exit code from the command.
 * @throws IOException
 * @throws InterruptedException
 */
public static int chmod(String filename, String perm, boolean recursive)
                          throws IOException, InterruptedException {
  StringBuffer cmdBuf = new StringBuffer();
  cmdBuf.append("chmod ");
  if (recursive) {
    cmdBuf.append("-R ");
  }
  cmdBuf.append(perm).append(" ");
  cmdBuf.append(filename);
  String[] shellCmd = {"bash", "-c" ,cmdBuf.toString()};
  ShellCommandExecutor shExec = new ShellCommandExecutor(shellCmd);
  try {
    shExec.execute();
  }catch(IOException e) {
    if(LOG.isDebugEnabled()) {
      LOG.debug("Error while changing permission : " + filename 
          +" Exception: " + StringUtils.stringifyException(e));
    }
  }
  return shExec.getExitCode();
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:32,代码来源:FileUtil.java

示例15: getUnixGroups

import org.apache.hadoop.util.Shell.ShellCommandExecutor; //导入方法依赖的package包/类
/**
 * Get the current user's group list from Unix by running the command 'groups'
 * NOTE. For non-existing user it will return EMPTY list.
 *
 * @param user get groups for this user
 * @return the groups list that the <code>user</code> belongs to. The primary
 *         group is returned first.
 * @throws IOException if encounter any error when running the command
 */
private List<String> getUnixGroups(String user) throws IOException {
  ShellCommandExecutor executor = createGroupExecutor(user);

  List<String> groups;
  try {
    executor.execute();
    groups = resolveFullGroupNames(executor.getOutput());
  } catch (ExitCodeException e) {
    try {
      groups = resolvePartialGroupNames(user, e.getMessage(),
          executor.getOutput());
    } catch (PartialGroupNameException pge) {
      LOG.warn("unable to return groups for user " + user, pge);
      return new LinkedList<>();
    }
  }

  // remove duplicated primary group
  if (!Shell.WINDOWS) {
    for (int i = 1; i < groups.size(); i++) {
      if (groups.get(i).equals(groups.get(0))) {
        groups.remove(i);
        break;
      }
    }
  }

  return groups;
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:39,代码来源:ShellBasedUnixGroupsMapping.java


注:本文中的org.apache.hadoop.util.Shell.ShellCommandExecutor.execute方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。