本文整理汇总了Java中org.apache.hadoop.conf.Configuration.setClassLoader方法的典型用法代码示例。如果您正苦于以下问题:Java Configuration.setClassLoader方法的具体用法?Java Configuration.setClassLoader怎么用?Java Configuration.setClassLoader使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.conf.Configuration
的用法示例。
在下文中一共展示了Configuration.setClassLoader方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testClassLoader
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* Tests the class loader set by
* {@link Configuration#setClassLoader(ClassLoader)}
* is inherited by any {@link WrappedRecordReader}s created by
* {@link CompositeRecordReader}
*/
public void testClassLoader() throws Exception {
Configuration conf = new Configuration();
Fake_ClassLoader classLoader = new Fake_ClassLoader();
conf.setClassLoader(classLoader);
assertTrue(conf.getClassLoader() instanceof Fake_ClassLoader);
FileSystem fs = FileSystem.get(conf);
Path testdir = new Path(System.getProperty("test.build.data", "/tmp"))
.makeQualified(fs);
Path base = new Path(testdir, "/empty");
Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") };
conf.set(CompositeInputFormat.JOIN_EXPR,
CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src));
CompositeInputFormat<NullWritable> inputFormat =
new CompositeInputFormat<NullWritable>();
// create dummy TaskAttemptID
TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
conf.set(MRJobConfig.TASK_ATTEMPT_ID, tid.toString());
inputFormat.createRecordReader
(inputFormat.getSplits(Job.getInstance(conf)).get(0),
new TaskAttemptContextImpl(conf, tid));
}
示例2: execute
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* Invoke {@link org.apache.hadoop.fs.FsShell#main} after a
* few cursory checks of the configuration.
*/
public void execute() throws BuildException {
if (null == cmd)
throw new BuildException("Missing command (cmd) argument");
argv.add(0, cmd);
if (null == confloader) {
setConf(getProject().getProperty("hadoop.conf.dir"));
}
int exit_code = 0;
try {
pushContext();
Configuration conf = new HdfsConfiguration();
conf.setClassLoader(confloader);
exit_code = ToolRunner.run(conf, shell,
argv.toArray(new String[argv.size()]));
exit_code = postCmd(exit_code);
if (0 > exit_code) {
StringBuilder msg = new StringBuilder();
for (String s : argv)
msg.append(s + " ");
msg.append("failed: " + exit_code);
throw new Exception(msg.toString());
}
} catch (Exception e) {
if (failonerror)
throw new BuildException(e);
} finally {
popContext();
}
}
示例3: processGeneralOptions
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* Modify configuration according user-specified generic options
* @param conf Configuration to be modified
* @param line User-specified generic options
*/
private void processGeneralOptions(Configuration conf,
CommandLine line) throws IOException {
if (line.hasOption("fs")) {
FileSystem.setDefaultUri(conf, line.getOptionValue("fs"));
}
if (line.hasOption("jt")) {
String optionValue = line.getOptionValue("jt");
if (optionValue.equalsIgnoreCase("local")) {
conf.set("mapreduce.framework.name", optionValue);
}
conf.set("yarn.resourcemanager.address", optionValue,
"from -jt command line option");
}
if (line.hasOption("conf")) {
String[] values = line.getOptionValues("conf");
for(String value : values) {
conf.addResource(new Path(value));
}
}
if (line.hasOption('D')) {
String[] property = line.getOptionValues('D');
for(String prop : property) {
String[] keyval = prop.split("=", 2);
if (keyval.length == 2) {
conf.set(keyval[0], keyval[1], "from command line");
}
}
}
if (line.hasOption("libjars")) {
conf.set("tmpjars",
validateFiles(line.getOptionValue("libjars"), conf),
"from -libjars command line option");
//setting libjars in client classpath
URL[] libjars = getLibJars(conf);
if(libjars!=null && libjars.length>0) {
conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
Thread.currentThread().setContextClassLoader(
new URLClassLoader(libjars,
Thread.currentThread().getContextClassLoader()));
}
}
if (line.hasOption("files")) {
conf.set("tmpfiles",
validateFiles(line.getOptionValue("files"), conf),
"from -files command line option");
}
if (line.hasOption("archives")) {
conf.set("tmparchives",
validateFiles(line.getOptionValue("archives"), conf),
"from -archives command line option");
}
conf.setBoolean("mapreduce.client.genericoptionsparser.used", true);
// tokensFile
if(line.hasOption("tokenCacheFile")) {
String fileName = line.getOptionValue("tokenCacheFile");
// check if the local file exists
FileSystem localFs = FileSystem.getLocal(conf);
Path p = localFs.makeQualified(new Path(fileName));
if (!localFs.exists(p)) {
throw new FileNotFoundException("File "+fileName+" does not exist.");
}
if(LOG.isDebugEnabled()) {
LOG.debug("setting conf tokensFile: " + fileName);
}
UserGroupInformation.getCurrentUser().addCredentials(
Credentials.readTokenStorageFile(p, conf));
conf.set("mapreduce.job.credentials.binary", p.toString(),
"from -tokenCacheFile command line option");
}
}
示例4: loadPluginsFromConfDir
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* If $SQOOP_CONF_DIR/tools.d/ exists and sqoop.tool.plugins is not set,
* then we look through the files in that directory; they should contain
* lines of the form 'plugin.class.name[=/path/to/containing.jar]'.
*
* <p>Put all plugin.class.names into the Configuration, and load any
* specified jars into the ClassLoader.
* </p>
*
* @param conf the current configuration to populate with class names.
* @return conf again, after possibly populating sqoop.tool.plugins.
*/
private static Configuration loadPluginsFromConfDir(Configuration conf) {
if (conf.get(TOOL_PLUGINS_KEY) != null) {
LOG.debug(TOOL_PLUGINS_KEY + " is set; ignoring tools.d");
return conf;
}
String confDirName = System.getenv("SQOOP_CONF_DIR");
if (null == confDirName) {
LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. "
+ "Cannot check for additional configuration.");
return conf;
}
File confDir = new File(confDirName);
File toolsDir = new File(confDir, "tools.d");
if (toolsDir.exists() && toolsDir.isDirectory()) {
// We have a tools.d subdirectory. Get the file list, sort it,
// and process them in order.
String [] fileNames = toolsDir.list();
Arrays.sort(fileNames);
for (String fileName : fileNames) {
File f = new File(toolsDir, fileName);
if (f.isFile()) {
loadPluginsFromFile(conf, f);
}
}
}
// Set the classloader in this configuration so that it will use
// the jars we just loaded in.
conf.setClassLoader(Thread.currentThread().getContextClassLoader());
return conf;
}
示例5: setClassLoader
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* Sets the provided classloader on the given configuration and as the thread
* context classloader if the classloader is not null.
* @param classLoader
* @param conf
*/
public static void setClassLoader(ClassLoader classLoader,
Configuration conf) {
if (classLoader != null) {
LOG.info("Setting classloader " + classLoader.getClass().getName() +
" on the configuration and as the thread context classloader");
conf.setClassLoader(classLoader);
Thread.currentThread().setContextClassLoader(classLoader);
}
}
示例6: testGoodClassOrNull
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
@Test
public void testGoodClassOrNull() throws Exception {
String NAME = "ClassWithNoPackage";
ClassLoader cl = TestClassWithNoPackage.class.getClassLoader();
String JAR = JarFinder.getJar(cl.loadClass(NAME));
// Add testjob jar file to classpath.
Configuration conf = new Configuration();
conf.setClassLoader(new URLClassLoader(new URL[]{new URL("file", null, JAR)},
null));
// Get class with no package name.
String defaultPackage = this.getClass().getPackage().getName();
Class c = StreamUtil.goodClassOrNull(conf, NAME, defaultPackage);
assertNotNull("Class " + NAME + " not found!", c);
}
示例7: create
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* Creates a Configuration with HBase resources
* @return a Configuration with HBase resources
*/
public static Configuration create() {
Configuration conf = new Configuration();
// In case HBaseConfiguration is loaded from a different classloader than
// Configuration, conf needs to be set with appropriate class loader to resolve
// HBase resources.
conf.setClassLoader(HBaseConfiguration.class.getClassLoader());
return addHbaseResources(conf);
}
示例8: create
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
public static Configuration create() {
Configuration conf = new Configuration();
conf.setClassLoader(AngelConf.class.getClassLoader());
return addAngelResources(conf);
}
示例9: clone
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* @param that Configuration to clone.
* @return the cloned configuration
*/
public static Configuration clone(final Configuration that) {
Configuration conf = new Configuration(that);
conf.setClassLoader(AngelConf.class.getClassLoader());
return conf;
}
示例10: loadManagersFromConfDir
import org.apache.hadoop.conf.Configuration; //导入方法依赖的package包/类
/**
* If $SQOOP_CONF_DIR/managers.d/ exists and sqoop.connection.factories is
* not set, then we look through the files in that directory; they should
* contain lines of the form mgr.class.name[=/path/to/containing.jar].
*
* <p>
* Put all mgr.class.names into the Configuration, and load any specified
* jars into the ClassLoader.
* </p>
*
* @param conf the current configuration to populate with class names.
* @return conf again, after possibly populating sqoop.connection.factories.
*/
private Configuration loadManagersFromConfDir(Configuration conf) {
if (conf.get(FACTORY_CLASS_NAMES_KEY) != null) {
LOG.debug(FACTORY_CLASS_NAMES_KEY + " is set; ignoring managers.d");
return conf;
}
String confDirName = System.getenv("SQOOP_CONF_DIR");
if (null == confDirName) {
LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. "
+ "Cannot check for additional configuration.");
return conf;
}
File confDir = new File(confDirName);
File mgrDir = new File(confDir, "managers.d");
if (mgrDir.exists() && mgrDir.isDirectory()) {
// We have a managers.d subdirectory. Get the file list, sort it,
// and process them in order.
String[] fileNames;
try {
fileNames = mgrDir.list();
} catch (SecurityException e) {
fileNames = null;
}
if (null == fileNames) {
LOG.warn("Sqoop cannot read $SQOOP_CONF_DIR/managers.d. "
+ "Please check the permissions on managers.d.");
return conf;
}
Arrays.sort(fileNames);
for (String fileName : fileNames) {
File f = new File(mgrDir, fileName);
if (f.isFile()) {
addManagersFromFile(conf, f);
}
}
// Add the default MF.
addManager(conf, DEFAULT_FACTORY_CLASS_NAMES);
}
// Set the classloader in this configuration so that it will use
// the jars we just loaded in.
conf.setClassLoader(Thread.currentThread().getContextClassLoader());
return conf;
}