當前位置: 首頁>>代碼示例>>Java>>正文


Java Configuration.addResource方法代碼示例

本文整理匯總了Java中org.apache.hadoop.conf.Configuration.addResource方法的典型用法代碼示例。如果您正苦於以下問題:Java Configuration.addResource方法的具體用法?Java Configuration.addResource怎麽用?Java Configuration.addResource使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.conf.Configuration的用法示例。


在下文中一共展示了Configuration.addResource方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: main

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public static void main(String[] argc) throws Exception {
  Configuration conf = HBaseConfiguration.create();
  conf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml")));

  if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
    conf.set("mapreduce.job.credentials.binary",
             System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
  }

  Connection connection = ConnectionFactory.createConnection(conf);
  Admin admin = connection.getAdmin();

  System.out.println("Compacting table " + argc[0]);
  TableName tableName = TableName.valueOf(argc[0]);
  admin.majorCompact(tableName);
  while (admin.getCompactionState(tableName).toString() == "MAJOR") {
    TimeUnit.SECONDS.sleep(10);
    System.out.println("Compacting table " + argc[0]);
  }
  System.out.println("Done compacting table " + argc[0]);
}
 
開發者ID:cbaenziger,項目名稱:Oozie_MajorCompaction_Example,代碼行數:22,代碼來源:MajorCompaction.java

示例2: getConfiguration

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
static Configuration getConfiguration(String jobTrackerSpec)
{
  Configuration conf = new Configuration();
  if (jobTrackerSpec != null) {        
    if (jobTrackerSpec.indexOf(":") >= 0) {
      conf.set("mapred.job.tracker", jobTrackerSpec);
    } else {
      String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
      URL validate = conf.getResource(classpathFile);
      if (validate == null) {
        throw new RuntimeException(classpathFile + " not found on CLASSPATH");
      }
      conf.addResource(classpathFile);
    }
  }
  return conf;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:JobClient.java

示例3: KerberosWebHDFSConnection2

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public KerberosWebHDFSConnection2(String httpfsUrl, String principal, String password)  {
        this.httpfsUrl = httpfsUrl;
        this.principal = principal;
        this.password = password;

        Configuration conf = new Configuration();
        conf.addResource("conf/hdfs-site.xml");
        conf.addResource("conf/core-site.xml");
        newToken = new AuthenticatedURL.Token();

        KerberosAuthenticator ka = new KerberosAuthenticator();
        ConnectionConfigurator connectionConfigurator = new SSLFactory(SSLFactory.Mode.CLIENT,conf);
        ka.setConnectionConfigurator(connectionConfigurator);

        try{
            URL url = new URL(httpfsUrl);
            ka.authenticate(url,newToken);
        }catch(Exception e){
            e.printStackTrace();
        }


         this.authenticatedURL = new AuthenticatedURL(ka,connectionConfigurator);
//        this.authenticatedURL = new AuthenticatedURL(
//                new KerberosAuthenticator2(principal, password));
    }
 
開發者ID:Transwarp-DE,項目名稱:Transwarp-Sample-Code,代碼行數:27,代碼來源:KerberosWebHDFSConnection2.java

示例4: main

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public static void main(String[] args) {
    String rootPath = "hdfs://nameservice1";
    Path p = new Path(rootPath + "/tmp/file.txt");
    Configuration conf = new Configuration();
    conf.addResource("core-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("yarn-site.xml");
    try {
        // 沒開kerberos,注釋下麵兩行
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytab("[email protected]","E:\\星環\\hdfs.keytab");
        FileSystem fs = p.getFileSystem(conf);
        boolean b = fs.delete(p, true);
        System.out.println(b);
        fs.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
開發者ID:Transwarp-DE,項目名稱:Transwarp-Sample-Code,代碼行數:20,代碼來源:Delete.java

示例5: create

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Retrieves {@link CompactionManager}.
 *
 * @param options map of various options keyed by name
 * @return {@link CompactionManager}
 */
public static CompactionManager create(Map<String, String> options) throws Exception {
    final Configuration configuration = new Configuration();
    configuration.addResource(new Path("/etc/hadoop/conf/hdfs-site.xml"));
    configuration.addResource(new Path("/etc/hadoop/conf/core-site.xml"));
    configuration.addResource(new Path("/etc/hadoop/conf/yarn-site.xml"));
    configuration.addResource(new Path("/etc/hadoop/conf/mapred-site.xml"));

    try {
        PropertiesConfiguration config = new PropertiesConfiguration(CONF_PATH);
        DEFAULT_THRESHOLD_IN_BYTES = config.getLong("default.threshold");
    } catch (Exception e) {
        throw new RuntimeException("Exception while loading default threshold in bytes" + e);

    }
    final CompactionCriteria criteria = new CompactionCriteria(options);
    if (StringUtils.isNotBlank(options.get("targetPath"))) {
        return new CompactionManagerImpl(configuration, criteria);
    }
    return new CompactionManagerInPlaceImpl(configuration, criteria);
}
 
開發者ID:ExpediaInceCommercePlatform,項目名稱:dataSqueeze,代碼行數:27,代碼來源:CompactionManagerFactory.java

示例6: initializeSSLConf

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Initialize SSL Config if same is set in conf
 *
 * @throws IOException - If any
 */
private void initializeSSLConf(Context context) throws IOException {
  LOG.info("Initializing SSL configuration");

  String workDir = conf.get(JobContext.JOB_LOCAL_DIR) + "/work";
  Path[] cacheFiles = context.getLocalCacheFiles();

  Configuration sslConfig = new Configuration(false);
  String sslConfFileName = conf.get(DistCpConstants.CONF_LABEL_SSL_CONF);
  Path sslClient = findCacheFile(cacheFiles, sslConfFileName);
  if (sslClient == null) {
    LOG.warn("SSL Client config file not found. Was looking for " + sslConfFileName +
        " in " + Arrays.toString(cacheFiles));
    return;
  }
  sslConfig.addResource(sslClient);

  String trustStoreFile = conf.get("ssl.client.truststore.location");
  Path trustStorePath = findCacheFile(cacheFiles, trustStoreFile);
  sslConfig.set("ssl.client.truststore.location", trustStorePath.toString());

  String keyStoreFile = conf.get("ssl.client.keystore.location");
  Path keyStorePath = findCacheFile(cacheFiles, keyStoreFile);
  sslConfig.set("ssl.client.keystore.location", keyStorePath.toString());

  try {
    OutputStream out = new FileOutputStream(workDir + "/" + sslConfFileName);
    try {
      sslConfig.writeXml(out);
    } finally {
      out.close();
    }
    conf.set(DistCpConstants.CONF_LABEL_SSL_KEYSTORE, sslConfFileName);
  } catch (IOException e) {
    LOG.warn("Unable to write out the ssl configuration. " +
        "Will fall back to default ssl-client.xml in class path, if there is one", e);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:43,代碼來源:CopyMapper.java

示例7: setup

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
@BeforeClass
public static void setup() throws Exception {
  conf = new Configuration();
  conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  Configuration sslConf = new Configuration(false);
  sslConf.addResource("ssl-server.xml");
  sslConf.addResource("ssl-client.xml");

  clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
  clientSslFactory.init();

  server = new HttpServer.Builder()
      .setName("test")
      .addEndpoint(new URI("https://localhost"))
      .setConf(conf)
      .keyPassword(HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.keypassword",
          null))
      .keyStore(sslConf.get("ssl.server.keystore.location"),
          HBaseConfiguration.getPassword(sslConf, "ssl.server.keystore.password", null),
          sslConf.get("ssl.server.keystore.type", "jks"))
      .trustStore(sslConf.get("ssl.server.truststore.location"),
          HBaseConfiguration.getPassword(sslConf, "ssl.server.truststore.password", null),
          sslConf.get("ssl.server.truststore.type", "jks")).build();
  server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
  server.start();
  baseUrl = new URL("https://"
      + NetUtils.getHostPortString(server.getConnectorAddress(0)));
  LOG.info("HTTP server started: " + baseUrl);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:38,代碼來源:TestSSLHttpServer.java

示例8: loadHadoopConf

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
private Configuration loadHadoopConf(File dir) throws IOException {
  Configuration hadoopConf = new Configuration(false);
  for (String file : HADOOP_CONF_FILES) {
    File f = new File(dir, file);
    if (f.exists()) {
      hadoopConf.addResource(new Path(f.getAbsolutePath()));
    }
  }
  return hadoopConf;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:11,代碼來源:FileSystemAccessService.java

示例9: setUp

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
@BeforeClass
public static void setUp() throws Exception {
  Configuration conf = new Configuration();
  conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
          DummyFilterInitializer.class.getName());

  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);

  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  Configuration sslConf = new Configuration(false);
  sslConf.addResource("ssl-server.xml");
  sslConf.addResource("ssl-client.xml");

  clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
  clientSslFactory.init();

  server = new HttpServer2.Builder()
          .setName("test")
          .addEndpoint(new URI("http://localhost"))
          .addEndpoint(new URI("https://localhost"))
          .setConf(conf)
          .keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
          .keyStore(sslConf.get("ssl.server.keystore.location"),
                  sslConf.get("ssl.server.keystore.password"),
                  sslConf.get("ssl.server.keystore.type", "jks"))
          .trustStore(sslConf.get("ssl.server.truststore.location"),
                  sslConf.get("ssl.server.truststore.password"),
                  sslConf.get("ssl.server.truststore.type", "jks")).build();
  server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
  server.start();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:36,代碼來源:TestHttpCookieFlag.java

示例10: SLSRunner

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public SLSRunner(boolean isSLS, String inputTraces[], String nodeFile,
                 String outputDir, Set<String> trackedApps,
                 boolean printsimulation)
        throws IOException, ClassNotFoundException {
  this.isSLS = isSLS;
  this.inputTraces = inputTraces.clone();
  this.nodeFile = nodeFile;
  this.trackedApps = trackedApps;
  this.printSimulation = printsimulation;
  metricsOutputDir = outputDir;
  
  nmMap = new HashMap<NodeId, NMSimulator>();
  queueAppNumMap = new HashMap<String, Integer>();
  amMap = new HashMap<String, AMSimulator>();
  amClassMap = new HashMap<String, Class>();
  
  // runner configuration
  conf = new Configuration(false);
  conf.addResource("sls-runner.xml");
  // runner
  int poolSize = conf.getInt(SLSConfiguration.RUNNER_POOL_SIZE, 
                              SLSConfiguration.RUNNER_POOL_SIZE_DEFAULT); 
  SLSRunner.runner.setQueueSize(poolSize);
  // <AMType, Class> map
  for (Map.Entry e : conf) {
    String key = e.getKey().toString();
    if (key.startsWith(SLSConfiguration.AM_TYPE)) {
      String amType = key.substring(SLSConfiguration.AM_TYPE.length());
      amClassMap.put(amType, Class.forName(conf.get(key)));
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:33,代碼來源:SLSRunner.java

示例11: refresh

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public void refresh(Configuration conf,
                                        PolicyProvider provider) {
  // Get the system property 'hadoop.policy.file'
  String policyFile = 
    System.getProperty("hadoop.policy.file", HADOOP_POLICY_FILE);
  
  // Make a copy of the original config, and load the policy file
  Configuration policyConf = new Configuration(conf);
  policyConf.addResource(policyFile);
  refreshWithLoadedConfiguration(policyConf, provider);
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:12,代碼來源:ServiceAuthorizationManager.java

示例12: processGeneralOptions

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Modify configuration according user-specified generic options
 * @param conf Configuration to be modified
 * @param line User-specified generic options
 */
private void processGeneralOptions(Configuration conf,
    CommandLine line) throws IOException {
  if (line.hasOption("fs")) {
    FileSystem.setDefaultUri(conf, line.getOptionValue("fs"));
  }

  if (line.hasOption("jt")) {
    String optionValue = line.getOptionValue("jt");
    if (optionValue.equalsIgnoreCase("local")) {
      conf.set("mapreduce.framework.name", optionValue);
    }

    conf.set("yarn.resourcemanager.address", optionValue, 
        "from -jt command line option");
  }
  if (line.hasOption("conf")) {
    String[] values = line.getOptionValues("conf");
    for(String value : values) {
      conf.addResource(new Path(value));
    }
  }

  if (line.hasOption('D')) {
    String[] property = line.getOptionValues('D');
    for(String prop : property) {
      String[] keyval = prop.split("=", 2);
      if (keyval.length == 2) {
        conf.set(keyval[0], keyval[1], "from command line");
      }
    }
  }

  if (line.hasOption("libjars")) {
    conf.set("tmpjars", 
             validateFiles(line.getOptionValue("libjars"), conf),
             "from -libjars command line option");
    //setting libjars in client classpath
    URL[] libjars = getLibJars(conf);
    if(libjars!=null && libjars.length>0) {
      conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader()));
      Thread.currentThread().setContextClassLoader(
          new URLClassLoader(libjars, 
              Thread.currentThread().getContextClassLoader()));
    }
  }
  if (line.hasOption("files")) {
    conf.set("tmpfiles", 
             validateFiles(line.getOptionValue("files"), conf),
             "from -files command line option");
  }
  if (line.hasOption("archives")) {
    conf.set("tmparchives", 
              validateFiles(line.getOptionValue("archives"), conf),
              "from -archives command line option");
  }
  conf.setBoolean("mapreduce.client.genericoptionsparser.used", true);
  
  // tokensFile
  if(line.hasOption("tokenCacheFile")) {
    String fileName = line.getOptionValue("tokenCacheFile");
    // check if the local file exists
    FileSystem localFs = FileSystem.getLocal(conf);
    Path p = localFs.makeQualified(new Path(fileName));
    if (!localFs.exists(p)) {
        throw new FileNotFoundException("File "+fileName+" does not exist.");
    }
    if(LOG.isDebugEnabled()) {
      LOG.debug("setting conf tokensFile: " + fileName);
    }
    UserGroupInformation.getCurrentUser().addCredentials(
        Credentials.readTokenStorageFile(p, conf));
    conf.set("mapreduce.job.credentials.binary", p.toString(),
             "from -tokenCacheFile command line option");

  }
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:82,代碼來源:GenericOptionsParser.java

示例13: Hdfs2Qiniu

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
public Hdfs2Qiniu(Config uploadCfg, int worker) throws IOException, NoSuchAlgorithmException {
    Configuration cfg = new Configuration();
    String[] hdfsCfgs = uploadCfg.hdfsConfigs.split(",");
    for (String xml : hdfsCfgs) {
        cfg.addResource(new Path("file://" + xml.trim()));
    }
    this.hdfsFileSystem = FileSystem.get(cfg);
    this.uploadCfg = uploadCfg;
    this.worker = worker;
    this.auth = Auth.create(this.uploadCfg.accessKey, this.uploadCfg.secretKey);
    this.initLogging();
}
 
開發者ID:jemygraw,項目名稱:Hdfs2Qiniu,代碼行數:13,代碼來源:Hdfs2Qiniu.java

示例14: readConfiguration

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
/**
 * Read the {@link Configuration} stored in the byte stream.
 * 
 * @param bytes
 *          to read from
 * @return A valid configuration
 */
private static Configuration readConfiguration(byte[] bytes)
    throws IOException {
  ByteArrayInputStream is = new ByteArrayInputStream(bytes);
  Configuration conf = new Configuration(false);
  conf.addResource(is);
  return conf;
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:15,代碼來源:Constraints.java

示例15: loadConfFile

import org.apache.hadoop.conf.Configuration; //導入方法依賴的package包/類
@Override
public Configuration loadConfFile() throws IOException {
  Path confPath = getConfFile();
  FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
  Configuration jobConf = new Configuration(false);
  jobConf.addResource(fc.open(confPath), confPath.toString());
  return jobConf;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:9,代碼來源:JobImpl.java


注:本文中的org.apache.hadoop.conf.Configuration.addResource方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。