當前位置: 首頁>>代碼示例>>Java>>正文


Java Configuration.setClassLoader方法代碼示例

本文整理匯總了Java中org.apache.hadoop.conf.Configuration.setClassLoader方法的典型用法代碼示例。如果您正苦於以下問題:Java Configuration.setClassLoader方法的具體用法?Java Configuration.setClassLoader怎麽用?Java Configuration.setClassLoader使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.conf.Configuration的用法示例。


在下文中一共展示了Configuration.setClassLoader方法的10個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testClassLoader

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Tests the class loader set by 
 * {@link Configuration#setClassLoader(ClassLoader)}
 * is inherited by any {@link WrappedRecordReader}s created by
 * {@link CompositeRecordReader}
 */
public void testClassLoader() throws Exception {
  Configuration conf = new Configuration();
  Fake_ClassLoader classLoader = new Fake_ClassLoader();
  conf.setClassLoader(classLoader);
  assertTrue(conf.getClassLoader() instanceof Fake_ClassLoader);

  FileSystem fs = FileSystem.get(conf);
  Path testdir = new Path(System.getProperty("test.build.data", "/tmp"))
      .makeQualified(fs);

  Path base = new Path(testdir, "/empty");
  Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") };
  conf.set(CompositeInputFormat.JOIN_EXPR, 
    CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src));

  CompositeInputFormat<NullWritable> inputFormat = 
    new CompositeInputFormat<NullWritable>();
  // create dummy TaskAttemptID
  TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
  conf.set(MRJobConfig.TASK_ATTEMPT_ID, tid.toString());
  inputFormat.createRecordReader
    (inputFormat.getSplits(Job.getInstance(conf)).get(0), 
     new TaskAttemptContextImpl(conf, tid));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:31,代碼來源:TestWrappedRRClassloader.java

示例2: execute

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Invoke {@link org.apache.hadoop.fs.FsShell#main} after a
 * few cursory checks of the configuration.
 */
public void execute() throws BuildException {
  if (null == cmd)
    throw new BuildException("Missing command (cmd) argument");
  argv.add(0, cmd);

  if (null == confloader) {
    setConf(getProject().getProperty("hadoop.conf.dir"));
  }

  int exit_code = 0;
  try {
    pushContext();

    Configuration conf = new HdfsConfiguration();
    conf.setClassLoader(confloader);
    exit_code = ToolRunner.run(conf, shell,
        argv.toArray(new String[argv.size()]));
    exit_code = postCmd(exit_code);

    if (0 > exit_code) {
      StringBuilder msg = new StringBuilder();
      for (String s : argv)
        msg.append(s + " ");
      msg.append("failed: " + exit_code);
      throw new Exception(msg.toString());
    }
  } catch (Exception e) {
    if (failonerror)
        throw new BuildException(e);
  } finally {
    popContext();
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:38,代碼來源:DfsTask.java

示例3: processGeneralOptions

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Modify configuration according user-specified generic options
 * @param conf Configuration to be modified
 * @param line User-specified generic options
 */
private void processGeneralOptions(Configuration conf,
    CommandLine line) throws IOException {
  if (line.hasOption("fs")) {
    FileSystem.setDefaultUri(conf, line.getOptionValue("fs"));
  }

  if (line.hasOption("jt")) {
    String optionValue = line.getOptionValue("jt");
    if (optionValue.equalsIgnoreCase("local")) {
      conf.set("mapreduce.framework.name", optionValue);
    }

    conf.set("yarn.resourcemanager.address", optionValue, 
        "from -jt command line option");
  }
  if (line.hasOption("conf")) {
    String[] values = line.getOptionValues("conf");
    for(String value : values) {
      conf.addResource(new Path(value));
    }
  }

  if (line.hasOption('D')) {
    String[] property = line.getOptionValues('D');
    for(String prop : property) {
      String[] keyval = prop.split("=", 2);
      if (keyval.length == 2) {
        conf.set(keyval[0], keyval[1], "from command line");
      }
    }
  }

  if (line.hasOption("libjars")) {
    conf.set("tmpjars", 
             validateFiles(line.getOptionValue("libjars"), conf),
             "from -libjars command line option");
    //setting libjars in client classpath
    URL[] libjars = getLibJars(conf);
    if(libjars!=null && libjars.length>0) {
      conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
      Thread.currentThread().setContextClassLoader(
          new URLClassLoader(libjars, 
              Thread.currentThread().getContextClassLoader()));
    }
  }
  if (line.hasOption("files")) {
    conf.set("tmpfiles", 
             validateFiles(line.getOptionValue("files"), conf),
             "from -files command line option");
  }
  if (line.hasOption("archives")) {
    conf.set("tmparchives", 
              validateFiles(line.getOptionValue("archives"), conf),
              "from -archives command line option");
  }
  conf.setBoolean("mapreduce.client.genericoptionsparser.used", true);
  
  // tokensFile
  if(line.hasOption("tokenCacheFile")) {
    String fileName = line.getOptionValue("tokenCacheFile");
    // check if the local file exists
    FileSystem localFs = FileSystem.getLocal(conf);
    Path p = localFs.makeQualified(new Path(fileName));
    if (!localFs.exists(p)) {
        throw new FileNotFoundException("File "+fileName+" does not exist.");
    }
    if(LOG.isDebugEnabled()) {
      LOG.debug("setting conf tokensFile: " + fileName);
    }
    UserGroupInformation.getCurrentUser().addCredentials(
        Credentials.readTokenStorageFile(p, conf));
    conf.set("mapreduce.job.credentials.binary", p.toString(),
             "from -tokenCacheFile command line option");

  }
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:82,代碼來源:GenericOptionsParser.java

示例4: loadPluginsFromConfDir

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * If $SQOOP_CONF_DIR/tools.d/ exists and sqoop.tool.plugins is not set,
 * then we look through the files in that directory; they should contain
 * lines of the form 'plugin.class.name[=/path/to/containing.jar]'.
 *
 * <p>Put all plugin.class.names into the Configuration, and load any
 * specified jars into the ClassLoader.
 * </p>
 *
 * @param conf the current configuration to populate with class names.
 * @return conf again, after possibly populating sqoop.tool.plugins.
 */
private static Configuration loadPluginsFromConfDir(Configuration conf) {
  if (conf.get(TOOL_PLUGINS_KEY) != null) {
    LOG.debug(TOOL_PLUGINS_KEY + " is set; ignoring tools.d");
    return conf;
  }

  String confDirName = System.getenv("SQOOP_CONF_DIR");
  if (null == confDirName) {
    LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. "
        + "Cannot check for additional configuration.");
    return conf;
  }

  File confDir = new File(confDirName);
  File toolsDir = new File(confDir, "tools.d");

  if (toolsDir.exists() && toolsDir.isDirectory()) {
    // We have a tools.d subdirectory. Get the file list, sort it,
    // and process them in order.
    String [] fileNames = toolsDir.list();
    Arrays.sort(fileNames);

    for (String fileName : fileNames) {
      File f = new File(toolsDir, fileName);
      if (f.isFile()) {
        loadPluginsFromFile(conf, f);
      }
    }
  }

  // Set the classloader in this configuration so that it will use
  // the jars we just loaded in.
  conf.setClassLoader(Thread.currentThread().getContextClassLoader());
  return conf;
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:48,代碼來源:SqoopTool.java

示例5: setClassLoader

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Sets the provided classloader on the given configuration and as the thread
 * context classloader if the classloader is not null.
 * @param classLoader
 * @param conf
 */
public static void setClassLoader(ClassLoader classLoader,
    Configuration conf) {
  if (classLoader != null) {
    LOG.info("Setting classloader " + classLoader.getClass().getName() +
        " on the configuration and as the thread context classloader");
    conf.setClassLoader(classLoader);
    Thread.currentThread().setContextClassLoader(classLoader);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:16,代碼來源:MRApps.java

示例6: testGoodClassOrNull

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
@Test
public void testGoodClassOrNull() throws Exception {
  String NAME = "ClassWithNoPackage";
  ClassLoader cl = TestClassWithNoPackage.class.getClassLoader();
  String JAR = JarFinder.getJar(cl.loadClass(NAME));

  // Add testjob jar file to classpath.
  Configuration conf = new Configuration();
  conf.setClassLoader(new URLClassLoader(new URL[]{new URL("file", null, JAR)}, 
                                         null));
  // Get class with no package name.
  String defaultPackage = this.getClass().getPackage().getName();
  Class c = StreamUtil.goodClassOrNull(conf, NAME, defaultPackage);
  assertNotNull("Class " + NAME + " not found!", c);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:16,代碼來源:TestClassWithNoPackage.java

示例7: create

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Creates a Configuration with HBase resources
 * @return a Configuration with HBase resources
 */
public static Configuration create() {
  Configuration conf = new Configuration();
  // In case HBaseConfiguration is loaded from a different classloader than
  // Configuration, conf needs to be set with appropriate class loader to resolve
  // HBase resources.
  conf.setClassLoader(HBaseConfiguration.class.getClassLoader());
  return addHbaseResources(conf);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,代碼來源:HBaseConfiguration.java

示例8: create

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public static Configuration create() {
  Configuration conf = new Configuration();
  conf.setClassLoader(AngelConf.class.getClassLoader());
  return addAngelResources(conf);
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:6,代碼來源:AngelConf.java

示例9: clone

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * @param that Configuration to clone.
 * @return the cloned configuration
 */
public static Configuration clone(final Configuration that) {
  Configuration conf = new Configuration(that);
  conf.setClassLoader(AngelConf.class.getClassLoader());
  return conf;
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:10,代碼來源:AngelConf.java

示例10: loadManagersFromConfDir

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * If $SQOOP_CONF_DIR/managers.d/ exists and sqoop.connection.factories is
 * not set, then we look through the files in that directory; they should
 * contain lines of the form mgr.class.name[=/path/to/containing.jar].
 *
 * <p>
 * Put all mgr.class.names into the Configuration, and load any specified
 * jars into the ClassLoader.
 * </p>
 *
 * @param conf the current configuration to populate with class names.
 * @return conf again, after possibly populating sqoop.connection.factories.
 */
private Configuration loadManagersFromConfDir(Configuration conf) {
  if (conf.get(FACTORY_CLASS_NAMES_KEY) != null) {
    LOG.debug(FACTORY_CLASS_NAMES_KEY + " is set; ignoring managers.d");
    return conf;
  }

  String confDirName = System.getenv("SQOOP_CONF_DIR");
  if (null == confDirName) {
    LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. "
        + "Cannot check for additional configuration.");
    return conf;
  }

  File confDir = new File(confDirName);
  File mgrDir = new File(confDir, "managers.d");

  if (mgrDir.exists() && mgrDir.isDirectory()) {
    // We have a managers.d subdirectory. Get the file list, sort it,
    // and process them in order.
    String[] fileNames;

    try {
      fileNames = mgrDir.list();
    } catch (SecurityException e) {
      fileNames = null;
    }

    if (null == fileNames) {
      LOG.warn("Sqoop cannot read $SQOOP_CONF_DIR/managers.d. "
          + "Please check the permissions on managers.d.");
      return conf;
    }

    Arrays.sort(fileNames);

    for (String fileName : fileNames) {
      File f = new File(mgrDir, fileName);
      if (f.isFile()) {
        addManagersFromFile(conf, f);
      }
    }

    // Add the default MF.
    addManager(conf, DEFAULT_FACTORY_CLASS_NAMES);
  }

  // Set the classloader in this configuration so that it will use
  // the jars we just loaded in.
  conf.setClassLoader(Thread.currentThread().getContextClassLoader());
  return conf;
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:65,代碼來源:ConnFactory.java


注:本文中的org.apache.hadoop.conf.Configuration.setClassLoader方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。