當前位置: 首頁>>代碼示例>>Java>>正文


Java FileUtil.setExecutable方法代碼示例

本文整理匯總了Java中org.apache.hadoop.fs.FileUtil.setExecutable方法的典型用法代碼示例。如果您正苦於以下問題:Java FileUtil.setExecutable方法的具體用法?Java FileUtil.setExecutable怎麽用?Java FileUtil.setExecutable使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.fs.FileUtil的用法示例。


在下文中一共展示了FileUtil.setExecutable方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testShellCommandTimeout

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
@Test
public void testShellCommandTimeout() throws Throwable {
  Assume.assumeFalse(WINDOWS);
  String rootDir = rootTestDir.getAbsolutePath();
  File shellFile = new File(rootDir, "timeout.sh");
  String timeoutCommand = "sleep 4; echo \"hello\"";
  Shell.ShellCommandExecutor shexc;
  try (PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile))) {
    writer.println(timeoutCommand);
    writer.close();
  }
  FileUtil.setExecutable(shellFile, true);
  shexc = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
      null, null, 100);
  try {
    shexc.execute();
  } catch (Exception e) {
    //When timing out exception is thrown.
  }
  shellFile.delete();
  assertTrue("Script did not timeout" , shexc.isTimedOut());
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:23,代碼來源:TestShell.java

示例2: setupMockExecutor

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
private void setupMockExecutor(String executorPath, Configuration conf)
    throws IOException {
  //we'll always use the tmpMockExecutor - since
  // PrivilegedOperationExecutor can only be initialized once.

  Files.copy(Paths.get(executorPath), Paths.get(tmpMockExecutor),
      REPLACE_EXISTING);

  File executor = new File(tmpMockExecutor);

  if (!FileUtil.canExecute(executor)) {
    FileUtil.setExecutable(executor, true);
  }
    String executorAbsolutePath = executor.getAbsolutePath();
  conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH,
      executorAbsolutePath);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestLinuxContainerExecutorWithMocks.java

示例3: testShellCommandTimeout

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
public void testShellCommandTimeout() throws Throwable {
  if(Shell.WINDOWS) {
    // setExecutable does not work on Windows
    return;
  }
  String rootDir = new File(System.getProperty(
      "test.build.data", "/tmp")).getAbsolutePath();
  File shellFile = new File(rootDir, "timeout.sh");
  String timeoutCommand = "sleep 4; echo \"hello\"";
  PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
  writer.println(timeoutCommand);
  writer.close();
  FileUtil.setExecutable(shellFile, true);
  Shell.ShellCommandExecutor shexc 
  = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
                                    null, null, 100);
  try {
    shexc.execute();
  } catch (Exception e) {
    //When timing out exception is thrown.
  }
  shellFile.delete();
  assertTrue("Script didnt not timeout" , shexc.isTimedOut());
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:25,代碼來源:TestShell.java

示例4: getConfigurationWithMockContainerExecutor

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
private Configuration getConfigurationWithMockContainerExecutor() {
  File f = new File("./src/test/resources/mock-container-executor");
  if(!FileUtil.canExecute(f)) {
    FileUtil.setExecutable(f, true);
  }
  String executorPath = f.getAbsolutePath();
    conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, executorPath);
  return conf;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:10,代碼來源:TestDockerContainerRuntime.java

示例5: testShellScriptBuilderNonZeroExitCode

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
/**
 * Test that script exists with non-zero exit code when command fails.
 * @throws IOException
 */
@Test (timeout = 10000)
public void testShellScriptBuilderNonZeroExitCode() throws IOException {
  ShellScriptBuilder builder = ShellScriptBuilder.create();
  builder.command(Arrays.asList(new String[] {"unknownCommand"}));
  File shellFile = Shell.appendScriptExtension(tmpDir, "testShellScriptBuilderError");
  PrintStream writer = new PrintStream(new FileOutputStream(shellFile));
  builder.write(writer);
  writer.close();
  try {
    FileUtil.setExecutable(shellFile, true);

    Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(
        new String[]{shellFile.getAbsolutePath()}, tmpDir);
    try {
      shexc.execute();
      fail("builder shell command was expected to throw");
    }
    catch(IOException e) {
      // expected
      System.out.println("Received an expected exception: " + e.getMessage());
    }
  }
  finally {
    FileUtil.fullyDelete(shellFile);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:31,代碼來源:TestContainerLaunch.java

示例6: setup

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
@Before
public void setup() {
  assumeTrue(Shell.LINUX);
  File f = new File("./src/test/resources/mock-container-executor");
  if(!FileUtil.canExecute(f)) {
    FileUtil.setExecutable(f, true);
  }
  String executorPath = f.getAbsolutePath();
  Configuration conf = new Configuration();
  yarnImage = "yarnImage";
  long time = System.currentTimeMillis();
  conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, executorPath);
  conf.set(YarnConfiguration.NM_LOCAL_DIRS, "/tmp/nm-local-dir" + time);
  conf.set(YarnConfiguration.NM_LOG_DIRS, "/tmp/userlogs" + time);
  conf.set(YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME, yarnImage);
  conf.set(YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME , DOCKER_LAUNCH_COMMAND);
  dockerContainerExecutor = new DockerContainerExecutor();
  dirsHandler = new LocalDirsHandlerService();
  dirsHandler.init(conf);
  dockerContainerExecutor.setConf(conf);
  lfs = null;
  try {
    lfs = FileContext.getLocalFSFileContext();
    workDir = new Path("/tmp/temp-"+ System.currentTimeMillis());
    lfs.mkdir(workDir, FsPermission.getDirDefault(), true);
  } catch (IOException e) {
    throw new RuntimeException(e);
  }

}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:31,代碼來源:TestDockerContainerExecutorWithMocks.java

示例7: testInvalidEnvSyntaxDiagnostics

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
@Test (timeout = 20000)
public void testInvalidEnvSyntaxDiagnostics() throws IOException  {

  File shellFile = null;
  try {
    shellFile = Shell.appendScriptExtension(tmpDir, "hello");
    Map<Path, List<String>> resources =
        new HashMap<Path, List<String>>();
    FileOutputStream fos = new FileOutputStream(shellFile);
    FileUtil.setExecutable(shellFile, true);

    Map<String, String> env = new HashMap<String, String>();
    // invalid env
    env.put(
        "APPLICATION_WORKFLOW_CONTEXT", "{\"workflowId\":\"609f91c5cd83\"," +
        "\"workflowName\":\"\n\ninsert table " +
        "\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, " );
    List<String> commands = new ArrayList<String>();
    new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands);
    fos.flush();
    fos.close();

    // It is supposed that LANG is set as C.
    Map<String, String> cmdEnv = new HashMap<String, String>();
    cmdEnv.put("LANG", "C");
    Shell.ShellCommandExecutor shexc
    = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
      tmpDir, cmdEnv);
    String diagnostics = null;
    try {
      shexc.execute();
      Assert.fail("Should catch exception");
    } catch(ExitCodeException e){
      diagnostics = e.getMessage();
    }
    Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ?
        "is not recognized as an internal or external command" :
        "command not found"));
    Assert.assertTrue(shexc.getExitCode() != 0);
  }
  finally {
    // cleanup
    if (shellFile != null
        && shellFile.exists()) {
      shellFile.delete();
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:49,代碼來源:TestContainerLaunch.java

示例8: testContainerLaunchStdoutAndStderrDiagnostics

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
@Test (timeout = 20000)
public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException {

  File shellFile = null;
  try {
    shellFile = Shell.appendScriptExtension(tmpDir, "hello");
    // echo "hello" to stdout and "error" to stderr and exit code with 2;
    String command = Shell.WINDOWS ?
        "@echo \"hello\" & @echo \"error\" 1>&2 & exit /b 2" :
        "echo \"hello\"; echo \"error\" 1>&2; exit 2;";
    PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
    FileUtil.setExecutable(shellFile, true);
    writer.println(command);
    writer.close();
    Map<Path, List<String>> resources =
        new HashMap<Path, List<String>>();
    FileOutputStream fos = new FileOutputStream(shellFile, true);

    Map<String, String> env = new HashMap<String, String>();
    List<String> commands = new ArrayList<String>();
    commands.add(command);
    ContainerExecutor exec = new DefaultContainerExecutor();
    exec.writeLaunchEnv(fos, env, resources, commands);
    fos.flush();
    fos.close();

    Shell.ShellCommandExecutor shexc
    = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()}, tmpDir);
    String diagnostics = null;
    try {
      shexc.execute();
      Assert.fail("Should catch exception");
    } catch(ExitCodeException e){
      diagnostics = e.getMessage();
    }
    // test stderr
    Assert.assertTrue(diagnostics.contains("error"));
    // test stdout
    Assert.assertTrue(shexc.getOutput().contains("hello"));
    Assert.assertTrue(shexc.getExitCode() == 2);
  }
  finally {
    // cleanup
    if (shellFile != null
        && shellFile.exists()) {
      shellFile.delete();
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:50,代碼來源:TestContainerLaunch.java

示例9: testCheckpointWithFailedStorageDir

import org.apache.hadoop.fs.FileUtil; //導入方法依賴的package包/類
/**
 * Test that, if a storage directory is failed when a checkpoint occurs,
 * the non-failed storage directory receives the checkpoint.
 */
@Test
public void testCheckpointWithFailedStorageDir() throws Exception {
  MiniDFSCluster cluster = null;
  SecondaryNameNode secondary = null;
  File currentDir = null;
  
  Configuration conf = new HdfsConfiguration();

  try {
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
        .format(true).build();

    secondary = startSecondaryNameNode(conf);

    // Checkpoint once
    secondary.doCheckpoint();

    // Now primary NN experiences failure of a volume -- fake by
    // setting its current dir to a-x permissions
    NamenodeProtocols nn = cluster.getNameNodeRpc();
    NNStorage storage = cluster.getNameNode().getFSImage().getStorage();
    StorageDirectory sd0 = storage.getStorageDir(0);
    StorageDirectory sd1 = storage.getStorageDir(1);
    
    currentDir = sd0.getCurrentDir();
    FileUtil.setExecutable(currentDir, false);

    // Upload checkpoint when NN has a bad storage dir. This should
    // succeed and create the checkpoint in the good dir.
    secondary.doCheckpoint();
    
    GenericTestUtils.assertExists(
        new File(sd1.getCurrentDir(), NNStorage.getImageFileName(2)));
    
    // Restore the good dir
    FileUtil.setExecutable(currentDir, true);
    nn.restoreFailedStorage("true");
    nn.rollEditLog();

    // Checkpoint again -- this should upload to both dirs
    secondary.doCheckpoint();
    
    assertNNHasCheckpoints(cluster, ImmutableList.of(8));
    assertParallelFilesInvariant(cluster, ImmutableList.of(secondary));
  } finally {
    if (currentDir != null) {
      FileUtil.setExecutable(currentDir, true);
    }
    cleanup(secondary);
    secondary = null;
    cleanup(cluster);
    cluster = null;
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:59,代碼來源:TestCheckpoint.java


注:本文中的org.apache.hadoop.fs.FileUtil.setExecutable方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。