本文整理汇总了Java中org.apache.hadoop.util.ClassUtil类的典型用法代码示例。如果您正苦于以下问题:Java ClassUtil类的具体用法?Java ClassUtil怎么用?Java ClassUtil使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ClassUtil类属于org.apache.hadoop.util包,在下文中一共展示了ClassUtil类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadFileSystems
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
private static void loadFileSystems() {
synchronized (FileSystem.class) {
if (!FILE_SYSTEMS_LOADED) {
ServiceLoader<FileSystem> serviceLoader = ServiceLoader.load(FileSystem.class);
Iterator<FileSystem> it = serviceLoader.iterator();
while (it.hasNext()) {
FileSystem fs = null;
try {
fs = it.next();
try {
SERVICE_FILE_SYSTEMS.put(fs.getScheme(), fs.getClass());
} catch (Exception e) {
LOG.warn("Cannot load: " + fs + " from " +
ClassUtil.findContainingJar(fs.getClass()), e);
}
} catch (ServiceConfigurationError ee) {
LOG.warn("Cannot load filesystem", ee);
}
}
FILE_SYSTEMS_LOADED = true;
}
}
}
示例2: testJarAtPath
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
/**
* Given a path with a jar, make a classloader with that jar on the
* classpath, and check that findContainingJar can correctly
* identify the path of the jar.
*/
private void testJarAtPath(String path) throws Exception {
File jar = new File(path).getAbsoluteFile();
assertTrue(jar.exists());
URL urls[] = new URL[] {
jar.toURI().toURL()
};
ClassLoader cl = new URLClassLoader(urls);
Class clazz = Class.forName(CLASSNAME, true, cl);
assertNotNull(clazz);
String containingJar = ClassUtil.findContainingJar(clazz);
assertEquals(jar.getAbsolutePath(), containingJar);
}
示例3: EmbeddedGobblinDistcp
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public EmbeddedGobblinDistcp(Path from, Path to) throws JobTemplate.TemplateException, IOException {
super("Distcp");
try {
setTemplate(ResourceBasedJobTemplate.forResourcePath("templates/distcp.template"));
} catch (URISyntaxException | SpecNotFoundException exc) {
throw new RuntimeException("Could not instantiate an " + EmbeddedGobblinDistcp.class.getName(), exc);
}
this.setConfiguration("from", from.toString());
this.setConfiguration("to", to.toString());
// Infer source and target fs uris from the input paths
this.setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, from.getFileSystem(new Configuration()).getUri().toString());
this.setConfiguration(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, to.getFileSystem(new Configuration()).getUri().toString());
// add gobblin-data-management jar to distributed jars
this.distributeJar(ClassUtil.findContainingJar(CopySource.class));
}
示例4: setJar
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public static void setJar(Job job, Class clazz) throws IOException
{
Configuration conf = job.getConfiguration();
if (isLocal(conf))
{
String jar = ClassUtil.findContainingJar(clazz);
if (jar != null)
{
conf.set("mapreduce.job.jar", jar);
}
}
else
{
DependencyLoader.addDependencies(job, clazz);
DataProviderFactory.addDependencies(conf);
}
}
示例5: getJar
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public static String getJar(Configuration conf, Class clazz) throws IOException
{
if (isLocal(conf))
{
String jar = ClassUtil.findContainingJar(clazz);
if (jar != null)
{
conf.set("mapreduce.job.jar", jar);
}
}
return DependencyLoader.getMasterJar(clazz);
//setJar(job.getConfiguration());
}
示例6: setJarByClass
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
/**
* Set the job's jar file by finding an example class location.
*
* @param cls the example class.
*/
public void setJarByClass(Class cls) {
String jar = ClassUtil.findContainingJar(cls);
if (jar != null) {
setJar(jar);
}
}
示例7: main
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public static void main(String[] args) {
System.out.println("Octopus " + getVersion());
System.out.println("SCM: " + getUrl() + ", revision: " + getRevision());
System.out.println("Compiled by " + getUser() + " on " + getDate());
System.out.println("From source with checksum " + getSrcChecksum());
System.out.println("This command was run using " + ClassUtil.findContainingJar(VersionInfo.class));
}
示例8: main
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public static void main(String[] args) {
LOG.debug("version: "+ getVersion());
System.out.println("Tajo " + getVersion());
System.out.println("Git " + getUrl() + " -r " + getRevision());
System.out.println("Compiled by " + getUser() + " on " + getDate());
System.out.println("Compiled with protoc " + getProtocVersion());
System.out.println("From source with checksum " + getSrcChecksum());
System.out.println("This command was run using " + ClassUtil.findContainingJar(VersionInfo.class));
}
示例9: main
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public static void main(String[] args) {
if (args.length != 1) {
System.err.println("Invalid no. of args. Usage: VersionInfo <component-name>");
System.exit(-1);
}
VersionInfo versionInfo = new VersionInfo(args[0]);
System.out.println("VersionInfo: " + versionInfo.toString());
System.out.println("This command was run using " +
ClassUtil.findContainingJar(VersionInfo.class));
}
示例10: LaunchCluster
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
public LaunchCluster(Configuration conf, YarnClient yarnClient, CommandLine cliParser) {
this.conf = conf;
this.yarnClient = yarnClient;
appName = cliParser.getOptionValue(
Constants.OPT_TF_APP_NAME, Constants.DEFAULT_APP_NAME);
amMemory = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_APP_MASTER_MEMORY, Constants.DEFAULT_APP_MASTER_MEMORY));
amVCores = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_APP_MASTER_VCORES, Constants.DEFAULT_APP_MASTER_VCORES));
amQueue = cliParser.getOptionValue(
Constants.OPT_TF_APP_MASTER_QUEUE, Constants.DEFAULT_APP_MASTER_QUEUE);
containerMemory = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_CONTAINER_MEMORY, Constants.DEFAULT_CONTAINER_MEMORY));
containerVCores = Integer.parseInt(cliParser.getOptionValue(
Constants.OPT_TF_CONTAINER_VCORES, Constants.DEFAULT_CONTAINER_VCORES));
if (cliParser.hasOption(Constants.OPT_TF_JAR)) {
tfJar = cliParser.getOptionValue(Constants.OPT_TF_JAR);
} else {
tfJar = ClassUtil.findContainingJar(getClass());
}
if (cliParser.hasOption(Constants.OPT_TF_LIB)) {
tfLib = cliParser.getOptionValue(Constants.OPT_TF_LIB);
} else {
tfLib = Utils.getParentDir(tfJar) + File.separator + Constants.TF_LIB_NAME;
}
workerNum = Integer.parseInt(
cliParser.getOptionValue(Constants.OPT_TF_WORKER_NUM, Constants.DEFAULT_TF_WORKER_NUM));
if (workerNum <= 0) {
throw new IllegalArgumentException(
"Illegal number of TensorFlow worker task specified: " + workerNum);
}
psNum = Integer.parseInt(
cliParser.getOptionValue(Constants.OPT_TF_PS_NUM, Constants.DEFAULT_TF_PS_NUM));
if (psNum < 0) {
throw new IllegalArgumentException(
"Illegal number of TensorFlow ps task specified: " + psNum);
}
}
示例11: testSleepJobInternal
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
private void testSleepJobInternal(boolean useRemoteJar) throws Exception {
LOG.info("\n\n\nStarting testSleepJob: useRemoteJar=" + useRemoteJar);
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
+ " not found. Not running test.");
return;
}
Configuration sleepConf = new Configuration(mrCluster.getConfig());
// set master address to local to test that local mode applied iff framework == local
sleepConf.set(MRConfig.MASTER_ADDRESS, "local");
SleepJob sleepJob = new SleepJob();
sleepJob.setConf(sleepConf);
// job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
Job job = sleepJob.createJob(3, numSleepReducers, 10000, 1, 5000, 1);
job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
if (useRemoteJar) {
final Path localJar = new Path(
ClassUtil.findContainingJar(SleepJob.class));
ConfigUtil.addLink(job.getConfiguration(), "/jobjars",
localFs.makeQualified(localJar.getParent()).toUri());
job.setJar("viewfs:///jobjars/" + localJar.getName());
} else {
job.setJarByClass(SleepJob.class);
}
job.setMaxMapAttempts(1); // speed up failures
job.submit();
String trackingUrl = job.getTrackingURL();
String jobId = job.getJobID().toString();
boolean succeeded = job.waitForCompletion(true);
Assert.assertTrue(succeeded);
Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
Assert.assertTrue("Tracking URL was " + trackingUrl +
" but didn't Match Job ID " + jobId ,
trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
verifySleepJobCounters(job);
verifyTaskProgress(job);
// TODO later: add explicit "isUber()" checks of some sort (extend
// JobStatus?)--compare against MRJobConfig.JOB_UBERTASK_ENABLE value
}
示例12: loadDependenciesByReflection
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
@SuppressWarnings("squid:S1166") // Exception caught and handled
private static Set<Dependency> loadDependenciesByReflection(Class<?> clazz) throws IOException
{
String jar = ClassUtil.findContainingJar(clazz);
if (jar != null)
{
return loadDependenciesFromJar(jar);
}
else
{
// the properties may have been added on the classpath, lets see if we can find it...
Set<Dependency> deps = null;
// set up a resource scanner
Reflections reflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage(ClassUtils.getPackageName(DependencyLoader.class)))
.setScanners(new ResourcesScanner()));
Set<String> resources = reflections.getResources(Pattern.compile(".*dependencies\\.properties"));
for (String resource : resources)
{
log.debug("Loading dependency properties from: /" + resource);
InputStream is = DependencyLoader.class.getResourceAsStream("/" + resource);
try
{
Set<Dependency> d = readDependencies(is);
is.close();
if (deps == null)
{
deps = d;
}
else
{
deps.addAll(d);
}
}
finally
{
if (is != null)
{
try
{
is.close();
}
catch (IOException ignored)
{
}
}
}
}
return deps;
}
}
示例13: findContainingJar
import org.apache.hadoop.util.ClassUtil; //导入依赖的package包/类
/**
* Find a jar that contains a class of the same name, if any.
* It will return a jar file, even if that is not the first thing
* on the class path that has a class with the same name.
*
* @param my_class the class to find.
* @return a jar file that contains the class, or null.
*/
public static String findContainingJar(Class my_class) {
return ClassUtil.findContainingJar(my_class);
}