本文整理汇总了Java中org.apache.hadoop.util.ClassUtil.findContainingJar方法的典型用法代码示例。如果您正苦于以下问题:Java ClassUtil.findContainingJar方法的具体用法?Java ClassUtil.findContainingJar怎么用?Java ClassUtil.findContainingJar使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.util.ClassUtil
的用法示例。
在下文中一共展示了ClassUtil.findContainingJar方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testJarAtPath
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
/**
* Given a path with a jar, make a classloader with that jar on the
* classpath, and check that findContainingJar can correctly
* identify the path of the jar.
*/
private void testJarAtPath(String path) throws Exception {
File jar = new File(path).getAbsoluteFile();
assertTrue(jar.exists());
URL urls[] = new URL[] {
jar.toURI().toURL()
};
ClassLoader cl = new URLClassLoader(urls);
Class clazz = Class.forName(CLASSNAME, true, cl);
assertNotNull(clazz);
String containingJar = ClassUtil.findContainingJar(clazz);
assertEquals(jar.getAbsolutePath(), containingJar);
}
示例2: setJar
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
public static void setJar(Job job, Class clazz) throws IOException
{
Configuration conf = job.getConfiguration();
if (isLocal(conf))
{
String jar = ClassUtil.findContainingJar(clazz);
if (jar != null)
{
conf.set("mapreduce.job.jar", jar);
}
}
else
{
DependencyLoader.addDependencies(job, clazz);
DataProviderFactory.addDependencies(conf);
}
}
示例3: getJar
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
public static String getJar(Configuration conf, Class clazz) throws IOException
{
if (isLocal(conf))
{
String jar = ClassUtil.findContainingJar(clazz);
if (jar != null)
{
conf.set("mapreduce.job.jar", jar);
}
}
return DependencyLoader.getMasterJar(clazz);
//setJar(job.getConfiguration());
}
示例4: setJarByClass
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
/**
* Set the job's jar file by finding an example class location.
*
* @param cls the example class.
*/
public void setJarByClass(Class cls) {
String jar = ClassUtil.findContainingJar(cls);
if (jar != null) {
setJar(jar);
}
}
示例5: LaunchCluster
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
public LaunchCluster(Configuration conf, YarnClient yarnClient, CommandLine cliParser) {
this.conf = conf;
this.yarnClient = yarnClient;
appName = cliParser.getOptionValue(
Constants.OPT_TF_APP_NAME, Constants.DEFAULT_APP_NAME);
amMemory = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_APP_MASTER_MEMORY, Constants.DEFAULT_APP_MASTER_MEMORY));
amVCores = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_APP_MASTER_VCORES, Constants.DEFAULT_APP_MASTER_VCORES));
amQueue = cliParser.getOptionValue(
Constants.OPT_TF_APP_MASTER_QUEUE, Constants.DEFAULT_APP_MASTER_QUEUE);
containerMemory = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_CONTAINER_MEMORY, Constants.DEFAULT_CONTAINER_MEMORY));
containerVCores = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_CONTAINER_VCORES, Constants.DEFAULT_CONTAINER_VCORES));
if (cliParser.hasOption(Constants.OPT_TF_JAR)) {
tfJar = cliParser.getOptionValue(Constants.OPT_TF_JAR);
} else {
tfJar = ClassUtil.findContainingJar(getClass());
}
if (cliParser.hasOption(Constants.OPT_TF_LIB)) {
tfLib = cliParser.getOptionValue(Constants.OPT_TF_LIB);
} else {
tfLib = Utils.getParentDir(tfJar) + File.separator + Constants.TF_LIB_NAME;
}
workerNum = Integer.parseInt(
cliParser.getOptionValue(Constants.OPT_TF_WORKER_NUM, Constants.DEFAULT_TF_WORKER_NUM));
if (workerNum <= 0) {
throw new IllegalArgumentException(
"Illegal number of TensorFlow worker task specified: " + workerNum);
}
psNum = Integer.parseInt(
cliParser.getOptionValue(Constants.OPT_TF_PS_NUM, Constants.DEFAULT_TF_PS_NUM));
if (psNum < 0) {
throw new IllegalArgumentException(
"Illegal number of TensorFlow ps task specified: " + psNum);
}
}
示例6: testSleepJobInternal
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
+ " not found. Not running test.");
return;
}
Configuration sleepConf = new Configuration(mrCluster.getConfig());
// set master address to local to test that local mode applied iff framework == local
sleepConf.set(MRConfig.MASTER_ADDRESS, "local");
SleepJob sleepJob = new SleepJob();
sleepJob.setConf(sleepConf);
// job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);
job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
if (useRemoteJar) {
final Path localJar = new Path(
ClassUtil.findContainingJar(SleepJob.class));
ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
localFs.makeQualified(localJar.getParent()).toUri());
job.setJar("viewfs:///jobjars/" + localJar.getName());
} else {
job.setJarByClass(SleepJob.class);
}
job.setMaxMapAttempts(1); // speed up failures
job.submit();
String trackingUrl = job.getTrackingURL();
String jobId = job.getJobID().toString();
boolean succeeded = job.waitForCompletion(true);
Assert.assertTrue(succeeded);
Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
Assert.assertTrue("Tracking URL was " + trackingUrl +
" but didn't Match Job ID " + jobId ,
trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
verifySleepJobCounters(job);
verifyTaskProgress(job);
// TODO later: add explicit "isUber()" checks of some sort (extend
// JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
示例7: loadDependenciesByReflection
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
@SuppressWarnings("squid:S1166") // Exception caught and handled
private static Set<Dependency> loadDependenciesByReflection(Class<?> clazz) throws IOException
{
String jar = ClassUtil.findContainingJar(clazz);
if (jar != null)
{
return loadDependenciesFromJar(jar);
}
else
{
// the properties may have been added on the classpath, lets see if we can find it...
Set<Dependency> deps = null;
// set up a resource scanner
Reflections reflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage(ClassUtils.getPackageName(DependencyLoader.class)))
.setScanners(new ResourcesScanner()));
Set<String> resources = reflections.getResources(Pattern.compile(".*dependencies\\.properties"));
for (String resource : resources)
{
log.debug("Loading dependency properties from: /" + resource);
InputStream is = DependencyLoader.class.getResourceAsStream("/" + resource);
try
{
Set<Dependency> d = readDependencies(is);
is.close();
if (deps == null)
{
deps = d;
}
else
{
deps.addAll(d);
}
}
finally
{
if (is != null)
{
try
{
is.close();
}
catch (IOException ignored)
{
}
}
}
}
return deps;
}
}
示例8: findContainingJar
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
/**
* Find a jar that contains a class of the same name, if any.
* It will return a jar file, even if that is not the first thing
* on the class path that has a class with the same name.
*
* @param my_class the class to find.
* @return a jar file that contains the class, or null.
*/
public static String findContainingJar(Class my_class) {
return ClassUtil.findContainingJar(my_class);
}
示例9: findContainingJar
import org.apache.hadoop.util.ClassUtil; //导入方法依赖的package包/类
/**
* Find a jar that contains a class of the same name, if any.
* It will return a jar file, even if that is not the first thing
* on the class path that has a class with the same name.
*
* @param my_class the class to find.
* @return a jar file that contains the class, or null.
* @throws IOException
*/
public static String findContainingJar(Class my_class) {
return ClassUtil.findContainingJar(my_class);
}