当前位置: 首页>>代码示例>>Java>>正文


Java DefaultStringifier.store方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.DefaultStringifier.store方法的典型用法代码示例。如果您正苦于以下问题:Java DefaultStringifier.store方法的具体用法?Java DefaultStringifier.store怎么用?Java DefaultStringifier.store使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.DefaultStringifier的用法示例。


在下文中一共展示了DefaultStringifier.store方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: configureGenericRecordExportInputFormat

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
private void configureGenericRecordExportInputFormat(Job job, String tableName)
    throws IOException {
  ConnManager connManager = context.getConnManager();
  Map<String, Integer> columnTypeInts;
  if (options.getCall() == null) {
    columnTypeInts = connManager.getColumnTypes(
        tableName,
        options.getSqlQuery());
  } else {
    columnTypeInts = connManager.getColumnTypesForProcedure(
        options.getCall());
  }
  String[] specifiedColumns = options.getColumns();
  MapWritable columnTypes = new MapWritable();
  for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
    String column = e.getKey();
    column = (specifiedColumns == null) ? column : options.getColumnNameCaseInsensitive(column);
    if (column != null) {
      Text columnName = new Text(column);
      Text columnType = new Text(connManager.toJavaType(tableName, column, e.getValue()));
      columnTypes.put(columnName, columnType);
    }
  }
  DefaultStringifier.store(job.getConfiguration(), columnTypes,
      AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:27,代码来源:JdbcExportJob.java

示例2: checkOutputSpecs

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs) 
throws IOException {
    // check for required configuration
    if (conf.get(OUTPUT_QUERY) == null) {
        throw new IllegalArgumentException(OUTPUT_QUERY + 
        " is not specified.");
    }
    // warn against unsupported configuration
    if (conf.get(BATCH_SIZE) != null) {
        LOG.warn("Config entry for " +
                "\"mapreduce.marklogic.output.batchsize\" is not " +
                "supported for " + this.getClass().getName() + 
                " and will be ignored.");
    }
    String queryLanguage = conf.get(OUTPUT_QUERY_LANGUAGE);
    if (queryLanguage != null) {
        InternalUtilities.checkQueryLanguage(queryLanguage);
    }
    // store hosts into config system
    DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:23,代码来源:KeyValueOutputFormat.java

示例3: configureInputFormat

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
    String tableClassName, String splitByCol)
    throws ClassNotFoundException, IOException {

  fileType = getInputFileType();

  super.configureInputFormat(job, tableName, tableClassName, splitByCol);

  if (fileType == FileType.AVRO_DATA_FILE) {
    LOG.debug("Configuring for Avro export");
    ConnManager connManager = context.getConnManager();
    Map<String, Integer> columnTypeInts =
      connManager.getColumnTypes(tableName, options.getSqlQuery());
    MapWritable columnTypes = new MapWritable();
    for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
      Text columnName = new Text(e.getKey());
      Text columnText = new Text(
          connManager.toJavaType(tableName, e.getKey(), e.getValue()));
      columnTypes.put(columnName, columnText);
    }
    DefaultStringifier.store(job.getConfiguration(), columnTypes,
        AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
  }

}
 
开发者ID:infinidb,项目名称:sqoop,代码行数:27,代码来源:JdbcExportJob.java

示例4: checkOutputSpecs

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs)
    throws IOException {
    super.checkOutputSpecs(conf, cs);

    // store mimetypes map into config system
    DefaultStringifier.store(conf, getMimetypesMap(),
        ConfigConstants.CONF_MIMETYPES);
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:10,代码来源:TransformOutputFormat.java

示例5: checkOutputSpecs

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs) 
throws IOException {
    // warn against unsupported configuration
    if (conf.get(BATCH_SIZE) != null) {
        LOG.warn("Config entry for " +
                "\"mapreduce.marklogic.output.batchsize\" is not " +
                "supported for " + this.getClass().getName() + 
                " and will be ignored.");
    }     
    // store hosts into config system
    DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:14,代码来源:NodeOutputFormat.java

示例6: checkOutputSpecs

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs) 
throws IOException {
    // warn against unsupported configuration
    if (conf.get(BATCH_SIZE) != null) {
        LOG.warn("Config entry for " +
                "\"mapreduce.marklogic.output.batchsize\" is not " +
                "supported for " + this.getClass().getName() + 
                " and will be ignored.");
    }      
    // store hosts into config system
    DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:14,代码来源:PropertyOutputFormat.java

示例7: storeToConf

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
 * Stores the given object in the configuration under the given dataKey
 * @param obj the object to store
 * @param conf the configuration to store the object into
 * @param dataKey the key to store the data
 */
public static<T> void storeToConf(T obj, Configuration conf, String dataKey)
  throws IOException {
  String classKey = dataKey + "._class";
  conf.set(classKey, obj.getClass().getName());
  DefaultStringifier.store(conf, obj, dataKey);
}
 
开发者ID:jianglibo,项目名称:gora-boot,代码行数:13,代码来源:IOUtils.java

示例8: configureInputFormat

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
    String tableClassName, String splitByCol)
    throws ClassNotFoundException, IOException {

  fileType = getInputFileType();

  super.configureInputFormat(job, tableName, tableClassName, splitByCol);

  if (isHCatJob) {
    SqoopHCatUtilities.configureExportInputFormat(options, job,
      context.getConnManager(), tableName, job.getConfiguration());
    return;
  } else if (fileType == FileType.AVRO_DATA_FILE) {
    LOG.debug("Configuring for Avro export");
    ConnManager connManager = context.getConnManager();
    Map<String, Integer> columnTypeInts;
    if (options.getCall() == null) {
      columnTypeInts = connManager.getColumnTypes(
        tableName,
        options.getSqlQueryRich());
    } else {
      columnTypeInts = connManager.getColumnTypesForProcedure(
        options.getCall());
    }
    MapWritable columnTypes = new MapWritable();
    for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
      Text columnName = new Text(e.getKey());
      Text columnText = new Text(
          connManager.toJavaType(tableName, e.getKey(), e.getValue()));
      columnTypes.put(columnName, columnText);
    }
    DefaultStringifier.store(job.getConfiguration(), columnTypes,
        AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
  }

}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:38,代码来源:JdbcExportJob.java

示例9: storeToConf

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
 * Stores the given object in the configuration under the given dataKey.
 *
 * @param obj the object to store.
 * @param conf the configuration to store the object into.
 * @param dataKey the key to store the data.
 * @param <T> the given object class type.
 * @throws IOException when failed storing the given data in Hadoop conf.
 */
public static<T> void storeToConf(T obj, Configuration conf, String dataKey)
  throws IOException {
  String classKey = dataKey + "._class";
  conf.set(classKey, obj.getClass().getName());
  DefaultStringifier.store(conf, obj, dataKey);
}
 
开发者ID:apache,项目名称:gora,代码行数:16,代码来源:IOUtils.java


注:本文中的org.apache.hadoop.io.DefaultStringifier.store方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。