当前位置: 首页>>代码示例>>Java>>正文


Java ConfigurationHelper类代码示例

本文整理汇总了Java中org.apache.sqoop.config.ConfigurationHelper的典型用法代码示例。如果您正苦于以下问题:Java ConfigurationHelper类的具体用法?Java ConfigurationHelper怎么用?Java ConfigurationHelper使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ConfigurationHelper类属于org.apache.sqoop.config包,在下文中一共展示了ConfigurationHelper类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: loadPlugins

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
/**
 * Load plugins referenced in sqoop-site.xml or other config (e.g., tools.d/),
 * to allow external tool definitions.
 *
 * @return the Configuration used to load the plugins.
 */
public static Configuration loadPlugins(Configuration conf) {
  conf = loadPluginsFromConfDir(conf);
  List<ToolPlugin> plugins =
      org.apache.sqoop.config.ConfigurationHelper.getInstances(
          conf, TOOL_PLUGINS_KEY, ToolPlugin.class);
  for (ToolPlugin plugin : plugins) {
    LOG.debug("Loading plugin: " + plugin.getClass().getName());
    List<ToolDesc> descriptions = plugin.getTools();
    for (ToolDesc desc : descriptions) {
      LOG.debug("  Adding tool: " + desc.getName()
          + " -> " + desc.getToolClass().getName());
      registerTool(desc);
    }
  }

  return conf;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:24,代码来源:SqoopTool.java

示例2: setup

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
@Override
protected void setup(Context context)
  throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  helper = new SqoopHCatImportHelper(conf);
  String recordClassName = conf.get(ConfigurationHelper
    .getDbInputClassProperty());
  if (null == recordClassName) {
    throw new IOException("DB Input class name is not set!");
  }
  try {
    Class<?> cls = Class.forName(recordClassName, true,
      Thread.currentThread().getContextClassLoader());
    sqoopRecord = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == sqoopRecord) {
    throw new IOException("Could not instantiate object of type "
      + recordClassName);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:24,代码来源:NetezzaExternalTableHCatImportMapper.java

示例3: getSplits

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
@Override
public List<InputSplit> getSplits(JobContext job) throws IOException {
  List<InputSplit> splits = new ArrayList<InputSplit>();
  Configuration conf = job.getConfiguration();
  String dsName
      = conf.get(MainframeConfiguration.MAINFRAME_INPUT_DATASET_NAME);
  LOG.info("Datasets to transfer from: " + dsName);
  List<String> datasets = retrieveDatasets(dsName, conf);
  if (datasets.isEmpty()) {
    throw new IOException ("No sequential datasets retrieved from " + dsName);
  } else {
    int count = datasets.size();
    int chunks = Math.min(count, ConfigurationHelper.getJobNumMaps(job));
    for (int i = 0; i < chunks; i++) {
      splits.add(new MainframeDatasetInputSplit());
    }

    int j = 0;
    while(j < count) {
      for (InputSplit sp : splits) {
        if (j == count) {
          break;
        }
        ((MainframeDatasetInputSplit)sp).addDataset(datasets.get(j));
        j++;
      }
    }
  }
  return splits;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:31,代码来源:MainframeDatasetInputFormat.java

示例4: invokeOutputCommitterForLocalMode

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
public void invokeOutputCommitterForLocalMode(Job job) throws IOException {
  if (ConfigurationHelper.isLocalJobTracker(job.getConfiguration()) && isHadoop1()) {
    // HCatalog 0.11- do have special class HCatHadoopShims, however this
    // class got merged into Hive Shim layer in 0.12+. Following method will
    // try to find correct implementation via reflection.
    HadoopShims shims = ShimLoader.getHadoopShims();
    HCatHadoopShims hcatShims = shims.getHCatShim();
    try {
      hcatShims.commitJob(new HCatOutputFormat(), job);
    } catch (Exception e) {
      throw new RuntimeException("Can't explicitly commit job", e);
    }
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:15,代码来源:SqoopHCatUtilities.java

示例5: getJobStorage

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
/**
 * Given a storage descriptor, determine the correct JobStorage
 * implementation to use to connect to the storage resource and return an
 * instance of it -- or null if no JobStorage instance is appropriate.
 */
public JobStorage getJobStorage(Map<String, String> descriptor) {
  List<JobStorage> storages = ConfigurationHelper.getInstances(
      conf, AVAILABLE_STORAGES_KEY, JobStorage.class);
  for (JobStorage stor : storages) {
    if (stor.canAccept(descriptor)) {
      return stor;
    }
  }

  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:17,代码来源:JobStorageFactory.java

示例6: configureNumTasks

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
@Override
protected int configureNumTasks(Job job) throws IOException {
  SqoopOptions options = context.getOptions();
  int numMapTasks = options.getNumMappers();
  if (numMapTasks < 1) {
    numMapTasks = SqoopOptions.DEFAULT_NUM_MAPPERS;
    LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers.");
  }

  ConfigurationHelper.setJobNumMaps(job, numMapTasks);
  return numMapTasks;
}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:13,代码来源:PGBulkloadExportJob.java

示例7: map

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
public void map(Integer dataSliceId, NullWritable val, Context context)
    throws IOException, InterruptedException {
  conf = context.getConfiguration();
  dbc = new DBConfiguration(conf);
  numMappers = ConfigurationHelper.getConfNumMaps(conf);
  char rd = (char) conf.getInt(DelimiterSet.OUTPUT_RECORD_DELIM_KEY, '\n');
  initNetezzaExternalTableImport(dataSliceId);
  counter = new PerfCounters();
  counter.startClock();
  Text outputRecord = new Text();
  if (extTableThread.isAlive()) {
    try {
      String inputRecord = recordReader.readLine();
      while (inputRecord != null) {
        if (Thread.interrupted()) {
          if (!extTableThread.isAlive()) {
            break;
          }
        }
        outputRecord.set(inputRecord + rd);
        // May be we should set the output to be String for faster performance
        // There is no real benefit in changing it to Text and then
        // converting it back in our case
        context.write(outputRecord, NullWritable.get());
        counter.addBytes(1 + inputRecord.length());
        inputRecord = recordReader.readLine();
      }
    } finally {
      recordReader.close();
      extTableThread.join();
      counter.stopClock();
      LOG.info("Transferred " + counter.toString());
      if (extTableThread.hasExceptions()) {
        extTableThread.printException();
        throw new IOException(extTableThread.getExcepton());
      }
    }
  }
}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:40,代码来源:NetezzaExternalTableImportMapper.java

示例8: configureInputFormat

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
/**
 * Configure the inputformat to use for the job.
 */
@Override
protected void configureInputFormat(Job job, String tableName,
    String tableClassName, String splitByCol) throws ClassNotFoundException,
    IOException {

  ConnManager mgr = getContext().getConnManager();
  String username = options.getUsername();
  if (null == username || username.length() == 0) {
    DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
        options.getConnectString());
  } else {
    DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
        options.getConnectString(), username, options.getPassword());
  }

  String[] colNames = options.getColumns();
  if (null == colNames) {
    colNames = mgr.getColumnNames(tableName);
  }

  String[] sqlColNames = null;
  if (null != colNames) {
    sqlColNames = new String[colNames.length];
    for (int i = 0; i < colNames.length; i++) {
      sqlColNames[i] = mgr.escapeColName(colNames[i]);
    }
  }

  // It's ok if the where clause is null in DBInputFormat.setInput.
  String whereClause = options.getWhereClause();

  // We can't set the class properly in here, because we may not have the
  // jar loaded in this JVM. So we start by calling setInput() with
  // DBWritable and then overriding the string manually.

  // Note that mysqldump also does *not* want a quoted table name.
  DataDrivenDBInputFormat.setInput(job, DBWritable.class, tableName,
      whereClause, mgr.escapeColName(splitByCol), sqlColNames);

  LOG.debug("Using InputFormat: " + inputFormatClass);
  job.setInputFormatClass(getInputFormatClass());

  if (isHCatJob) {
    LOG.debug("Using table class: " + tableClassName);
    job.getConfiguration().set(ConfigurationHelper.getDbInputClassProperty(),
      tableClassName);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:52,代码来源:NetezzaExternalTableImportJob.java

示例9: map

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
public void map(Integer dataSliceId, NullWritable val, Context context)
  throws IOException, InterruptedException {
  conf = context.getConfiguration();


  dbc = new DBConfiguration(conf);
  numMappers = ConfigurationHelper.getConfNumMaps(conf);
  char rd = (char) conf.getInt(DelimiterSet.OUTPUT_RECORD_DELIM_KEY, '\n');
  initNetezzaExternalTableImport(dataSliceId);
  counter = new PerfCounters();
  counter.startClock();
  Text outputRecord = new Text();
  if (extTableThread.isAlive()) {
    try {
      String inputRecord = recordReader.readLine();
      while (inputRecord != null) {
        if (Thread.interrupted()) {
          if (!extTableThread.isAlive()) {
            break;
          }
        }
        outputRecord.set(inputRecord + rd);
        // May be we should set the output to be String for faster performance
        // There is no real benefit in changing it to Text and then
        // converting it back in our case
        writeRecord(outputRecord, context);
        counter.addBytes(1 + inputRecord.length());
        inputRecord = recordReader.readLine();
      }
    } finally {
      recordReader.close();
      extTableThread.join();
      counter.stopClock();
      LOG.info("Transferred " + counter.toString());
      if (extTableThread.hasExceptions()) {
        extTableThread.printException();
        throw new IOException(extTableThread.getException());
      }
    }
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:42,代码来源:NetezzaExternalTableImportMapper.java

示例10: addJars

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
/**
 * Add the Hive and HCatalog jar files to local classpath and dist cache.
 * @throws IOException
 */
public static void addJars(Job job, SqoopOptions options) throws IOException {

  if (ConfigurationHelper.isLocalJobTracker(job.getConfiguration())) {
    LOG.info("Not adding hcatalog jars to distributed cache in local mode");
    return;
  }
  if (options.isSkipDistCache()) {
    LOG.info("Not adding hcatalog jars to distributed cache as requested");
    return;
  }
  Configuration conf = job.getConfiguration();
  String hiveHome = null;
  String hCatHome = null;
  FileSystem fs = FileSystem.getLocal(conf);
  if (options != null) {
    hiveHome = options.getHiveHome();
  }
  if (hiveHome == null) {
    hiveHome = SqoopOptions.getHiveHomeDefault();
  }
  if (options != null) {
    hCatHome = options.getHCatHome();
  }
  if (hCatHome == null) {
    hCatHome = SqoopOptions.getHCatHomeDefault();
  }
  LOG.info("HCatalog job : Hive Home = " + hiveHome);
  LOG.info("HCatalog job:  HCatalog Home = " + hCatHome);

  conf.addResource(hiveHome + HIVESITEXMLPATH);

  // Add these to the 'tmpjars' array, which the MR JobSubmitter
  // will upload to HDFS and put in the DistributedCache libjars.
  List<String> libDirs = new ArrayList<String>();
  libDirs.add(hCatHome + File.separator + HCATSHAREDIR);
  libDirs.add(hCatHome + File.separator + DEFLIBDIR);
  libDirs.add(hiveHome + File.separator + DEFLIBDIR);
  Set<String> localUrls = new HashSet<String>();
  // Add any libjars already specified
  localUrls
    .addAll(conf
      .getStringCollection(
      ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM));
  for (String dir : libDirs) {
    LOG.info("Adding jar files under " + dir + " to distributed cache");
    addDirToCache(new File(dir), fs, localUrls, false);
  }

  // Recursively add all hcatalog storage handler jars
  // The HBase storage handler is getting deprecated post Hive+HCat merge
  String hCatStorageHandlerDir = hCatHome + File.separator
    + "share/hcatalog/storage-handlers";
  LOG.info("Adding jar files under " + hCatStorageHandlerDir
    + " to distributed cache (recursively)");

  addDirToCache(new File(hCatStorageHandlerDir), fs, localUrls, true);

  String tmpjars = conf
    .get(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM);
  StringBuilder sb = new StringBuilder(1024);
  if (null != tmpjars) {
    sb.append(tmpjars);
    sb.append(",");
  }
  sb.append(StringUtils.arrayToString(localUrls.toArray(new String[0])));
  conf.set(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM, sb.toString());
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:72,代码来源:SqoopHCatUtilities.java

示例11: getRowCountFromHadoop

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
protected long getRowCountFromHadoop(Job job)
  throws IOException, InterruptedException {
  return ConfigurationHelper.getNumMapOutputRecords(job);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:5,代码来源:JobBase.java

示例12: parseArguments

import org.apache.sqoop.config.ConfigurationHelper; //导入依赖的package包/类
/**
 * Configures a SqoopOptions according to the specified arguments.
 * Reads a set of arguments and uses them to configure a SqoopOptions
 * and its embedded configuration (i.e., through GenericOptionsParser.)
 * Stores any unparsed arguments in the extraArguments field.
 *
 * @param args the arguments to parse.
 * @param conf if non-null, set as the configuration for the returned
 * SqoopOptions.
 * @param in a (perhaps partially-configured) SqoopOptions. If null,
 * then a new SqoopOptions will be used. If this has a null configuration
 * and conf is null, then a new Configuration will be inserted in this.
 * @param useGenericOptions if true, will also parse generic Hadoop
 * options into the Configuration.
 * @return a SqoopOptions that is fully configured by a given tool.
 */
public SqoopOptions parseArguments(String [] args,
    Configuration conf, SqoopOptions in, boolean useGenericOptions)
    throws ParseException, SqoopOptions.InvalidOptionsException {
  SqoopOptions out = in;

  if (null == out) {
    out = new SqoopOptions();
  }

  if (null != conf) {
    // User specified a configuration; use it and override any conf
    // that may have been in the SqoopOptions.
    out.setConf(conf);
  } else if (null == out.getConf()) {
    // User did not specify a configuration, but neither did the
    // SqoopOptions. Fabricate a new one.
    out.setConf(new Configuration());
  }

  // This tool is the "active" tool; bind it in the SqoopOptions.
  //TODO(jarcec): Remove the cast when SqoopOptions will be moved
  //              to apache package
  out.setActiveSqoopTool((com.cloudera.sqoop.tool.SqoopTool)this);

  String [] toolArgs = args; // args after generic parser is done.
  if (useGenericOptions) {
    try {
      toolArgs = ConfigurationHelper.parseGenericOptions(
          out.getConf(), args);
    } catch (IOException ioe) {
      ParseException pe = new ParseException(
          "Could not parse generic arguments");
      pe.initCause(ioe);
      throw pe;
    }
  }

  // Parse tool-specific arguments.
  ToolOptions toolOptions = new ToolOptions();
  configureOptions(toolOptions);
  CommandLineParser parser = new SqoopParser();
  CommandLine cmdLine = parser.parse(toolOptions.merge(), toolArgs, true);
  applyOptions(cmdLine, out);
  this.extraArguments = cmdLine.getArgs();
  return out;
}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:63,代码来源:SqoopTool.java


注:本文中的org.apache.sqoop.config.ConfigurationHelper类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。