本文整理汇总了Java中org.apache.hadoop.util.RunJar.unJar方法的典型用法代码示例。如果您正苦于以下问题:Java RunJar.unJar方法的具体用法?Java RunJar.unJar怎么用?Java RunJar.unJar使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.util.RunJar
的用法示例。
在下文中一共展示了RunJar.unJar方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: localizeJobJarFile
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
/**
* Download the job jar file from FS to the local file system and unjar it.
* Set the local jar file in the passed configuration.
*
* @param localJobConf
* @throws IOException
*/
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
// copy Jar file to the local FS and unjar it.
String jarFile = localJobConf.getJar();
FileStatus status = null;
long jarFileSize = -1;
if (jarFile != null) {
Path jarFilePath = new Path(jarFile);
FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
try {
status = userFs.getFileStatus(jarFilePath);
jarFileSize = status.getLen();
} catch (FileNotFoundException fe) {
jarFileSize = -1;
}
// Here we check for five times the size of jarFileSize to accommodate for
// unjarring the jar file in the jars directory
Path localJarFile =
lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);
//Download job.jar
userFs.copyToLocalFile(jarFilePath, localJarFile);
localJobConf.setJar(localJarFile.toString());
// also unjar the parts of the job.jar that need to end up on the
// classpath, or explicitly requested by the user.
RunJar.unJar(
new File(localJarFile.toString()),
new File(localJarFile.getParent().toString()),
localJobConf.getJarUnpackPattern());
FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
}
}
示例2: localizeJobJarFile
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
/**
* Download the job jar file from FS to the local file system and unjar it.
* Set the local jar file in the passed configuration.
*
* @param localJobConf
* @throws IOException
*/
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
// copy Jar file to the local FS and unjar it.
String jarFile = localJobConf.getJar();
FileStatus status = null;
long jarFileSize = -1;
if (jarFile != null) {
Path jarFilePath = new Path(jarFile);
FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
try {
status = userFs.getFileStatus(jarFilePath);
jarFileSize = status.getLen();
} catch (FileNotFoundException fe) {
jarFileSize = -1;
}
// Here we check for five times the size of jarFileSize to accommodate for
// unjarring the jar file in the jars directory
Path localJarFile =
lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);
//Download job.jar
userFs.copyToLocalFile(jarFilePath, localJarFile);
localJobConf.setJar(localJarFile.toString());
// Also un-jar the job.jar files. We un-jar it so that classes inside
// sub-directories, for e.g., lib/, classes/ are available on class-path
RunJar.unJar(new File(localJarFile.toString()),
new File(localJarFile.getParent().toString()));
FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
}
}
示例3: localizeJobJarFile
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
/**
* Download the job jar file from FS to the local file system and unjar it.
* Set the local jar file in the passed configuration.
*
* @param localJobConf
* @throws IOException
*/
private void localizeJobJarFile(JobConf localJobConf) throws IOException {
// copy Jar file to the local FS and unjar it.
String jarFile = localJobConf.getJar();
FileStatus status = null;
long jarFileSize = -1;
if (jarFile != null) {
Path jarFilePath = new Path(jarFile);
FileSystem userFs = jarFilePath.getFileSystem(localJobConf);
try {
status = userFs.getFileStatus(jarFilePath);
jarFileSize = status.getLen();
} catch (FileNotFoundException fe) {
jarFileSize = -1;
}
// Here we check for five times the size of jarFileSize to accommodate for
// unjarring the jar file in the jars directory
Path localJarFile =
lDirAlloc.getLocalPathForWrite(JARDST, 5 * jarFileSize, ttConf);
//Download job.jar
userFs.copyToLocalFile(jarFilePath, localJarFile);
localJobConf.setJar(localJarFile.toString());
// also unjar the parts of the job.jar that need to end up on the
// classpath, or explicitly requested by the user.
RunJar.unJar(
new File(localJarFile.toString()),
new File(localJarFile.getParent().toString()),
localJobConf.getJarUnpackPattern());
try {
FileUtil.chmod(localJarFile.getParent().toString(), "ugo+rx", true);
} catch (InterruptedException ie) {
// This exception is never actually thrown, but the signature says
// it is, and we can't make the incompatible change within CDH
throw new IOException("Interrupted while chmodding", ie);
}
}
}
示例4: localizeJobJarFile
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
/**
* Download the job jar file from FS to the local file system and unjar it.
* Set the local jar file in the passed configuration.
*
* @param jobId
* @param localFs
* @param localJobConf
* @throws IOException
*/
private void localizeJobJarFile(String user, JobID jobId, FileSystem localFs,
JobConf localJobConf)
throws IOException, InterruptedException {
// copy Jar file to the local FS and unjar it.
String jarFile = localJobConf.getJar();
FileStatus status = null;
long jarFileSize = -1;
if (jarFile != null) {
Path jarFilePath = new Path(jarFile);
FileSystem fs = getFS(jarFilePath, jobId, localJobConf);
try {
status = fs.getFileStatus(jarFilePath);
jarFileSize = status.getLen();
} catch (FileNotFoundException fe) {
jarFileSize = -1;
}
// Here we check for five times the size of jarFileSize to accommodate for
// unjarring the jar file in the jars directory
Path localJarFile =
lDirAlloc.getLocalPathForWrite(
getJobJarFile(user, jobId.toString()), 5 * jarFileSize, fConf);
// Download job.jar
fs.copyToLocalFile(jarFilePath, localJarFile);
localJobConf.setJar(localJarFile.toString());
// Un-jar the parts of the job.jar that need to be added to the classpath
RunJar.unJar(
new File(localJarFile.toString()),
new File(localJarFile.getParent().toString()),
localJobConf.getJarUnpackPattern());
}
}
示例5: submitAndMonitorJob
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
public int submitAndMonitorJob() throws IOException {
if (jar_ != null && isLocalHadoop()) {
// getAbs became required when shell and subvm have different working dirs...
File wd = new File(".").getAbsoluteFile();
RunJar.unJar(new File(jar_), wd);
}
// if jobConf_ changes must recreate a JobClient
jc_ = new JobClient(jobConf_);
running_ = null;
try {
running_ = jc_.submitJob(jobConf_);
jobId_ = running_.getID();
if (background_) {
LOG.info("Job is running in background.");
} else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
LOG.error("Job not successful!");
return 1;
}
LOG.info("Output directory: " + output_);
} catch(FileNotFoundException fe) {
LOG.error("Error launching job , bad input path : " + fe.getMessage());
return 2;
} catch(InvalidJobConfException je) {
LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
return 3;
} catch(FileAlreadyExistsException fae) {
LOG.error("Error launching job , Output path already exists : "
+ fae.getMessage());
return 4;
} catch(IOException ioe) {
LOG.error("Error Launching job : " + ioe.getMessage());
return 5;
} catch (InterruptedException ie) {
LOG.error("Error monitoring job : " + ie.getMessage());
return 6;
} finally {
jc_.close();
}
return 0;
}
示例6: submitAndMonitorJob
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
public int submitAndMonitorJob() throws IOException {
if (jar_ != null && isLocalHadoop()) {
// getAbs became required when shell and subvm have different working dirs...
File wd = new File(".").getAbsoluteFile();
RunJar.unJar(new File(jar_), wd);
}
// if jobConf_ changes must recreate a JobClient
jc_ = new JobClient(jobConf_);
running_ = null;
try {
running_ = jc_.submitJob(jobConf_);
jobId_ = running_.getID();
if (background_) {
LOG.info("Job is running in background.");
} else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
LOG.error("Job not Successful!");
return 1;
}
LOG.info("Output directory: " + output_);
} catch(FileNotFoundException fe) {
LOG.error("Error launching job , bad input path : " + fe.getMessage());
return 2;
} catch(InvalidJobConfException je) {
LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
return 3;
} catch(FileAlreadyExistsException fae) {
LOG.error("Error launching job , Output path already exists : "
+ fae.getMessage());
return 4;
} catch(IOException ioe) {
LOG.error("Error Launching job : " + ioe.getMessage());
return 5;
} catch (InterruptedException ie) {
LOG.error("Error monitoring job : " + ie.getMessage());
return 6;
} finally {
jc_.close();
}
return 0;
}
示例7: processFiles
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
/**
* Process list of resources.
*
* @param jobLocDir Job working directory.
* @param files Array of {@link URI} or {@link org.apache.hadoop.fs.Path} to process resources.
* @param download {@code true}, if need to download. Process class path only else.
* @param extract {@code true}, if need to extract archive.
* @param clsPathUrls Collection to add resource as classpath resource.
* @param rsrcNameProp Property for resource name array setting.
* @throws IOException If failed.
*/
private void processFiles(File jobLocDir, @Nullable Object[] files, boolean download, boolean extract,
@Nullable Collection<URL> clsPathUrls, @Nullable String rsrcNameProp) throws IOException {
if (F.isEmptyOrNulls(files))
return;
Collection<String> res = new ArrayList<>();
for (Object pathObj : files) {
Path srcPath;
if (pathObj instanceof URI) {
URI uri = (URI)pathObj;
srcPath = new Path(uri);
}
else
srcPath = (Path)pathObj;
String locName = srcPath.getName();
File dstPath = new File(jobLocDir.getAbsolutePath(), locName);
res.add(locName);
rsrcSet.add(dstPath);
if (clsPathUrls != null)
clsPathUrls.add(dstPath.toURI().toURL());
if (!download)
continue;
JobConf cfg = ctx.getJobConf();
FileSystem dstFs = FileSystem.getLocal(cfg);
FileSystem srcFs = job.fileSystem(srcPath.toUri(), cfg);
if (extract) {
File archivesPath = new File(jobLocDir.getAbsolutePath(), ".cached-archives");
if (!archivesPath.exists() && !archivesPath.mkdir())
throw new IOException("Failed to create directory " +
"[path=" + archivesPath + ", jobId=" + jobId + ']');
File archiveFile = new File(archivesPath, locName);
FileUtil.copy(srcFs, srcPath, dstFs, new Path(archiveFile.toString()), false, cfg);
String archiveNameLC = archiveFile.getName().toLowerCase();
if (archiveNameLC.endsWith(".jar"))
RunJar.unJar(archiveFile, dstPath);
else if (archiveNameLC.endsWith(".zip"))
FileUtil.unZip(archiveFile, dstPath);
else if (archiveNameLC.endsWith(".tar.gz") ||
archiveNameLC.endsWith(".tgz") ||
archiveNameLC.endsWith(".tar"))
FileUtil.unTar(archiveFile, dstPath);
else
throw new IOException("Cannot unpack archive [path=" + srcPath + ", jobId=" + jobId + ']');
}
else
FileUtil.copy(srcFs, srcPath, dstFs, new Path(dstPath.toString()), false, cfg);
}
if (!res.isEmpty() && rsrcNameProp != null)
ctx.getJobConf().setStrings(rsrcNameProp, res.toArray(new String[res.size()]));
}
示例8: localizeCache
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
Path localizeCache(Configuration conf,
URI cache, long confFileStamp,
CacheStatus cacheStatus,
boolean isArchive, boolean isPublic)
throws IOException {
FileSystem fs = FileSystem.get(cache, conf);
FileSystem localFs = FileSystem.getLocal(conf);
Path parchive = null;
if (isArchive) {
parchive = new Path(cacheStatus.localizedLoadPath,
new Path(cacheStatus.localizedLoadPath.getName()));
} else {
parchive = cacheStatus.localizedLoadPath;
}
if (!localFs.mkdirs(parchive.getParent())) {
throw new IOException("Mkdirs failed to create directory " +
cacheStatus.localizedLoadPath.toString());
}
String cacheId = cache.getPath();
fs.copyToLocalFile(new Path(cacheId), parchive);
if (isArchive) {
String tmpArchive = parchive.toString().toLowerCase();
File srcFile = new File(parchive.toString());
File destDir = new File(parchive.getParent().toString());
LOG.info(String.format("Extracting %s to %s",
srcFile.toString(), destDir.toString()));
if (tmpArchive.endsWith(".jar")) {
RunJar.unJar(srcFile, destDir);
} else if (tmpArchive.endsWith(".zip")) {
FileUtil.unZip(srcFile, destDir);
} else if (isTarFile(tmpArchive)) {
FileUtil.unTar(srcFile, destDir);
} else {
LOG.warn(String.format(
"Cache file %s specified as archive, but not valid extension.",
srcFile.toString()));
// else will not do anyhting
// and copy the file into the dir as it is
}
}
long cacheSize =
FileUtil.getDU(new File(parchive.getParent().toString()));
cacheStatus.size = cacheSize;
// Increase the size and sub directory count of the cache
// from baseDirSize and baseDirNumberSubDir.
baseDirManager.addCacheUpdate(cacheStatus);
// set proper permissions for the localized directory
setPermissions(conf, cacheStatus, isPublic);
// update cacheStatus to reflect the newly cached file
cacheStatus.mtime = getTimestamp(conf, cache);
LOG.info(String.format("Cached %s as %s",
cache.toString(), cacheStatus.localizedLoadPath));
return cacheStatus.localizedLoadPath;
}
示例9: submitAndMonitorJob
import org.apache.hadoop.util.RunJar; //导入方法依赖的package包/类
public int submitAndMonitorJob() throws IOException {
if (jar_ != null && isLocalHadoop()) {
// getAbs became required when shell and subvm have different working dirs...
File wd = new File(".").getAbsoluteFile();
RunJar.unJar(new File(jar_), wd);
}
// if jobConf_ changes must recreate a JobClient
jc_ = new JobClient(jobConf_);
running_ = null;
try {
running_ = jc_.submitJob(jobConf_);
jobId_ = running_.getID();
jobInfo();
if (background_) {
LOG.info("Job is running in background.");
} else if (!jc_.monitorAndPrintJob(jobConf_, running_)) {
LOG.error("Job not Successful!");
return 1;
}
LOG.info("Output directory: " + output_);
} catch(FileNotFoundException fe) {
LOG.error("Error launching job , bad input path : " + fe.getMessage());
return 2;
} catch(InvalidJobConfException je) {
LOG.error("Error launching job , Invalid job conf : " + je.getMessage());
return 3;
} catch(FileAlreadyExistsException fae) {
LOG.error("Error launching job , Output path already exists : "
+ fae.getMessage());
return 4;
} catch(IOException ioe) {
LOG.error("Error Launching job : " + ioe.getMessage());
return 5;
} catch (InterruptedException ie) {
LOG.error("Error monitoring job : " + ie.getMessage());
return 6;
} finally {
jc_.close();
}
return 0;
}