本文整理汇总了Java中org.apache.hadoop.fs.FileUtil.setExecutable方法的典型用法代码示例。如果您正苦于以下问题:Java FileUtil.setExecutable方法的具体用法?Java FileUtil.setExecutable怎么用?Java FileUtil.setExecutable使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.fs.FileUtil
的用法示例。
在下文中一共展示了FileUtil.setExecutable方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testShellCommandTimeout
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
@Test
public void testShellCommandTimeout() throws Throwable {
Assume.assumeFalse(WINDOWS);
String rootDir = rootTestDir.getAbsolutePath();
File shellFile = new File(rootDir, "timeout.sh");
String timeoutCommand = "sleep 4; echo \"hello\"";
Shell.ShellCommandExecutor shexc;
try (PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile))) {
writer.println(timeoutCommand);
writer.close();
}
FileUtil.setExecutable(shellFile, true);
shexc = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
null, null, 100);
try {
shexc.execute();
} catch (Exception e) {
//When timing out exception is thrown.
}
shellFile.delete();
assertTrue("Script did not timeout" , shexc.isTimedOut());
}
示例2: setupMockExecutor
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
private void setupMockExecutor(String executorPath, Configuration conf)
throws IOException {
//we'll always use the tmpMockExecutor - since
// PrivilegedOperationExecutor can only be initialized once.
Files.copy(Paths.get(executorPath), Paths.get(tmpMockExecutor),
REPLACE_EXISTING);
File executor = new File(tmpMockExecutor);
if (!FileUtil.canExecute(executor)) {
FileUtil.setExecutable(executor, true);
}
String executorAbsolutePath = executor.getAbsolutePath();
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH,
executorAbsolutePath);
}
示例3: testShellCommandTimeout
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
public void testShellCommandTimeout() throws Throwable {
if(Shell.WINDOWS) {
// setExecutable does not work on Windows
return;
}
String rootDir = new File(System.getProperty(
"test.build.data", "/tmp")).getAbsolutePath();
File shellFile = new File(rootDir, "timeout.sh");
String timeoutCommand = "sleep 4; echo \"hello\"";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
writer.println(timeoutCommand);
writer.close();
FileUtil.setExecutable(shellFile, true);
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
null, null, 100);
try {
shexc.execute();
} catch (Exception e) {
//When timing out exception is thrown.
}
shellFile.delete();
assertTrue("Script didnt not timeout" , shexc.isTimedOut());
}
示例4: getConfigurationWithMockContainerExecutor
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
private Configuration getConfigurationWithMockContainerExecutor() {
File f = new File("./src/test/resources/mock-container-executor");
if(!FileUtil.canExecute(f)) {
FileUtil.setExecutable(f, true);
}
String executorPath = f.getAbsolutePath();
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, executorPath);
return conf;
}
示例5: testShellScriptBuilderNonZeroExitCode
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
/**
* Test that script exists with non-zero exit code when command fails.
* @throws IOException
*/
@Test (timeout = 10000)
public void testShellScriptBuilderNonZeroExitCode() throws IOException {
ShellScriptBuilder builder = ShellScriptBuilder.create();
builder.command(Arrays.asList(new String[] {"unknownCommand"}));
File shellFile = Shell.appendScriptExtension(tmpDir, "testShellScriptBuilderError");
PrintStream writer = new PrintStream(new FileOutputStream(shellFile));
builder.write(writer);
writer.close();
try {
FileUtil.setExecutable(shellFile, true);
Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(
new String[]{shellFile.getAbsolutePath()}, tmpDir);
try {
shexc.execute();
fail("builder shell command was expected to throw");
}
catch(IOException e) {
// expected
System.out.println("Received an expected exception: " + e.getMessage());
}
}
finally {
FileUtil.fullyDelete(shellFile);
}
}
示例6: setup
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
@Before
public void setup() {
assumeTrue(Shell.LINUX);
File f = new File("./src/test/resources/mock-container-executor");
if(!FileUtil.canExecute(f)) {
FileUtil.setExecutable(f, true);
}
String executorPath = f.getAbsolutePath();
Configuration conf = new Configuration();
yarnImage = "yarnImage";
long time = System.currentTimeMillis();
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, executorPath);
conf.set(YarnConfiguration.NM_LOCAL_DIRS, "/tmp/nm-local-dir" + time);
conf.set(YarnConfiguration.NM_LOG_DIRS, "/tmp/userlogs" + time);
conf.set(YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME, yarnImage);
conf.set(YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME , DOCKER_LAUNCH_COMMAND);
dockerContainerExecutor = new DockerContainerExecutor();
dirsHandler = new LocalDirsHandlerService();
dirsHandler.init(conf);
dockerContainerExecutor.setConf(conf);
lfs = null;
try {
lfs = FileContext.getLocalFSFileContext();
workDir = new Path("/tmp/temp-"+ System.currentTimeMillis());
lfs.mkdir(workDir, FsPermission.getDirDefault(), true);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
示例7: testInvalidEnvSyntaxDiagnostics
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
@Test (timeout = 20000)
public void testInvalidEnvSyntaxDiagnostics() throws IOException {
File shellFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
Map<Path, List<String>> resources =
new HashMap<Path, List<String>>();
FileOutputStream fos = new FileOutputStream(shellFile);
FileUtil.setExecutable(shellFile, true);
Map<String, String> env = new HashMap<String, String>();
// invalid env
env.put(
"APPLICATION_WORKFLOW_CONTEXT", "{\"workflowId\":\"609f91c5cd83\"," +
"\"workflowName\":\"\n\ninsert table " +
"\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, " );
List<String> commands = new ArrayList<String>();
new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
// It is supposed that LANG is set as C.
Map<String, String> cmdEnv = new HashMap<String, String>();
cmdEnv.put("LANG", "C");
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
tmpDir, cmdEnv);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch(ExitCodeException e){
diagnostics = e.getMessage();
}
Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ?
"is not recognized as an internal or external command" :
"command not found"));
Assert.assertTrue(shexc.getExitCode() != 0);
}
finally {
// cleanup
if (shellFile != null
&& shellFile.exists()) {
shellFile.delete();
}
}
}
示例8: testContainerLaunchStdoutAndStderrDiagnostics
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
@Test (timeout = 20000)
public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException {
File shellFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
// echo "hello" to stdout and "error" to stderr and exit code with 2;
String command = Shell.WINDOWS ?
"@echo \"hello\" & @echo \"error\" 1>&2 & exit /b 2" :
"echo \"hello\"; echo \"error\" 1>&2; exit 2;";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
FileUtil.setExecutable(shellFile, true);
writer.println(command);
writer.close();
Map<Path, List<String>> resources =
new HashMap<Path, List<String>>();
FileOutputStream fos = new FileOutputStream(shellFile, true);
Map<String, String> env = new HashMap<String, String>();
List<String> commands = new ArrayList<String>();
commands.add(command);
ContainerExecutor exec = new DefaultContainerExecutor();
exec.writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()}, tmpDir);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch(ExitCodeException e){
diagnostics = e.getMessage();
}
// test stderr
Assert.assertTrue(diagnostics.contains("error"));
// test stdout
Assert.assertTrue(shexc.getOutput().contains("hello"));
Assert.assertTrue(shexc.getExitCode() == 2);
}
finally {
// cleanup
if (shellFile != null
&& shellFile.exists()) {
shellFile.delete();
}
}
}
示例9: testCheckpointWithFailedStorageDir
import org.apache.hadoop.fs.FileUtil; //导入方法依赖的package包/类
/**
* Test that, if a storage directory is failed when a checkpoint occurs,
* the non-failed storage directory receives the checkpoint.
*/
@Test
public void testCheckpointWithFailedStorageDir() throws Exception {
MiniDFSCluster cluster = null;
SecondaryNameNode secondary = null;
File currentDir = null;
Configuration conf = new HdfsConfiguration();
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.format(true).build();
secondary = startSecondaryNameNode(conf);
// Checkpoint once
secondary.doCheckpoint();
// Now primary NN experiences failure of a volume -- fake by
// setting its current dir to a-x permissions
NamenodeProtocols nn = cluster.getNameNodeRpc();
NNStorage storage = cluster.getNameNode().getFSImage().getStorage();
StorageDirectory sd0 = storage.getStorageDir(0);
StorageDirectory sd1 = storage.getStorageDir(1);
currentDir = sd0.getCurrentDir();
FileUtil.setExecutable(currentDir, false);
// Upload checkpoint when NN has a bad storage dir. This should
// succeed and create the checkpoint in the good dir.
secondary.doCheckpoint();
GenericTestUtils.assertExists(
new File(sd1.getCurrentDir(), NNStorage.getImageFileName(2)));
// Restore the good dir
FileUtil.setExecutable(currentDir, true);
nn.restoreFailedStorage("true");
nn.rollEditLog();
// Checkpoint again -- this should upload to both dirs
secondary.doCheckpoint();
assertNNHasCheckpoints(cluster, ImmutableList.of(8));
assertParallelFilesInvariant(cluster, ImmutableList.of(secondary));
} finally {
if (currentDir != null) {
FileUtil.setExecutable(currentDir, true);
}
cleanup(secondary);
secondary = null;
cleanup(cluster);
cluster = null;
}
}