当前位置: 首页>>代码示例>>Java>>正文


Java DistributedCache.addFileToClassPath方法代码示例

本文整理汇总了Java中org.apache.hadoop.filecache.DistributedCache.addFileToClassPath方法的典型用法代码示例。如果您正苦于以下问题:Java DistributedCache.addFileToClassPath方法的具体用法?Java DistributedCache.addFileToClassPath怎么用?Java DistributedCache.addFileToClassPath使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.filecache.DistributedCache的用法示例。


在下文中一共展示了DistributedCache.addFileToClassPath方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: addJars

import org.apache.hadoop.filecache.DistributedCache; //导入方法依赖的package包/类
/**
 * Add framework or job-specific jars to the classpath through DistributedCache
 * so the mappers can use them.
 */
private void addJars(Path jarFileDir, String jarFileList) throws IOException {
  LocalFileSystem lfs = FileSystem.getLocal(this.conf);
  for (String jarFile : SPLITTER.split(jarFileList)) {
    Path srcJarFile = new Path(jarFile);
    FileStatus[] fileStatusList = lfs.globStatus(srcJarFile);
    for (FileStatus status : fileStatusList) {
      // DistributedCache requires absolute path, so we need to use makeQualified.
      Path destJarFile = new Path(this.fs.makeQualified(jarFileDir), status.getPath().getName());
      // Copy the jar file from local file system to HDFS
      this.fs.copyFromLocalFile(status.getPath(), destJarFile);
      // Then add the jar file on HDFS to the classpath
      LOG.info(String.format("Adding %s to classpath", destJarFile));
      DistributedCache.addFileToClassPath(destJarFile, this.conf, this.fs);
    }
  }
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:21,代码来源:MRJobLauncher.java

示例2: configure

import org.apache.hadoop.filecache.DistributedCache; //导入方法依赖的package包/类
@Override
public void configure(Job job) throws IOException {

    for (Path p : getLocalPaths()) {
        Configuration conf = job.getConfiguration();
        FileSystem jobFS = FileSystem.get(conf);
        FileSystem localFS = FileSystem.getLocal(conf);
        Path stagedPath = uploadFileIfNecessary(localFS, p, jobFS);
        DistributedCache.addFileToClassPath(stagedPath, conf, jobFS);
    }

    // We don't really need to set a mapred job jar here,
    // but doing so suppresses a warning
    String mj = getMapredJar();
    if (null != mj)
        job.getConfiguration().set(Hadoop1Compat.CFG_JOB_JAR, mj);
}
 
开发者ID:graben1437,项目名称:titan1.0.1.kafka,代码行数:18,代码来源:DistCacheConfigurer.java

示例3: provisionQueries

import org.apache.hadoop.filecache.DistributedCache; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
public static <T extends Configuration> T provisionQueries(T cfg) {
    if (HadoopCfgUtils.isLocal(cfg)) {
        return cfg;
    }

    try {
        DistributedCache.addFileToClassPath(new Path(TestUtils.sampleQueryDsl()), cfg);
        DistributedCache.addFileToClassPath(new Path(TestUtils.sampleQueryUri()), cfg);
    } catch (IOException ex) {
    }
    return cfg;
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:14,代码来源:QueryTestParams.java

示例4: addJars

import org.apache.hadoop.filecache.DistributedCache; //导入方法依赖的package包/类
private void addJars(Configuration conf) throws IOException {
  if (!this.dataset.jobProps().contains(MRCompactor.COMPACTION_JARS)) {
    return;
  }
  Path jarFileDir = new Path(this.dataset.jobProps().getProp(MRCompactor.COMPACTION_JARS));
  for (FileStatus status : this.fs.listStatus(jarFileDir)) {
    DistributedCache.addFileToClassPath(status.getPath(), conf, this.fs);
  }
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:10,代码来源:MRCompactorJobRunner.java

示例5: run

import org.apache.hadoop.filecache.DistributedCache; //导入方法依赖的package包/类
@Override
public int run(String[] args) throws Exception {
	final CmdLineParser parser = new CmdLineParser(this);

	try {
		parser.parseArgument(args);
	} catch (final CmdLineException e) {
		System.err.println(e.getMessage());
		System.err.println("Usage: hadoop jar HadoopImageIndexer.jar [options]");
		parser.printUsage(System.err);
		return -1;
	}

	final Path[] paths = SequenceFileUtility.getFilePaths(input, "part");
	final Path outputPath = new Path(output);

	if (outputPath.getFileSystem(this.getConf()).exists(outputPath) && replace)
		outputPath.getFileSystem(this.getConf()).delete(outputPath, true);

	final Job job = TextBytesJobUtil.createJob(paths, outputPath, null, this.getConf());
	job.setJarByClass(this.getClass());
	job.setMapperClass(PcaVladMapper.class);
	job.setNumReduceTasks(0);

	DistributedCache.addFileToClassPath(new Path(indexerData), job.getConfiguration());
	job.getConfiguration().set(VLAD_INDEXER_DATA_PATH_KEY, new Path(indexerData).getName());

	SequenceFileOutputFormat.setCompressOutput(job, !dontcompress);
	job.waitForCompletion(true);

	return 0;
}
 
开发者ID:openimaj,项目名称:openimaj,代码行数:33,代码来源:HadoopPcaVladExtractor.java

示例6: addFileToClasspath

import org.apache.hadoop.filecache.DistributedCache; //导入方法依赖的package包/类
/**
 * Adds a file in HDFS to the classpath for hadoop nodes (via the
 * DistributedCache)
 * 
 * @param hdfsConfig the HDFSConfig object with host and port set
 * @param conf the Configuration object that will be changed by this operation
 * @param path the path to the file (in HDFS) to be added to the classpath for
 *          hadopp nodes
 * @param env any environment variables
 * @throws IOException if a problem occurs
 */
public static void addFileToClasspath(HDFSConfig hdfsConfig,
  Configuration conf, String path, Environment env) throws IOException {

  // conf.set(HDFSConfig.FS_DEFAULT_NAME,
  // HDFSConfig.constructHostURL(hdfsConfig, env));
  hdfsConfig.configureForHadoop(conf, env);

  FileSystem fs = FileSystem.get(conf);

  if (path.startsWith("hdfs://")) {
    throw new IOException("Path should not include 'hdfs://host:port'");
  }
  if (env != null) {
    try {
      path = env.substitute(path);
    } catch (Exception ex) {
    }
  }
  // if (!path.startsWith("/")) {
  // path = "/" + path;
  // }

  // We know that all job-specific jars are installed in the user's home
  // directory
  Path destPath = new Path(path);
  String userHome = fs.getHomeDirectory().toString();
  String absolutePath = userHome + "/" + destPath.toString();
  if (absolutePath.startsWith("hdfs://")) {
    // strip this off - for some reason under CDH4
    // DistributedCache.addFileToClassPath() keeps the hdfs:// part
    // of the URL in the classpath spec! Apache does not do this.
    absolutePath = absolutePath.replace("hdfs://", "");
    absolutePath = absolutePath.substring(absolutePath.indexOf("/"),
      absolutePath.length());
  }
  destPath = new Path(absolutePath);

  DistributedCache.addFileToClassPath(destPath, conf, fs);

  checkForWindowsAccessingHadoopOnLinux(conf);
}
 
开发者ID:mydzigear,项目名称:repo.kmeanspp.silhouette_score,代码行数:53,代码来源:HDFSUtils.java


注:本文中的org.apache.hadoop.filecache.DistributedCache.addFileToClassPath方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。