本文整理汇总了Java中org.apache.hadoop.io.DefaultStringifier.store方法的典型用法代码示例。如果您正苦于以下问题:Java DefaultStringifier.store方法的具体用法?Java DefaultStringifier.store怎么用?Java DefaultStringifier.store使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.DefaultStringifier
的用法示例。
在下文中一共展示了DefaultStringifier.store方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: configureGenericRecordExportInputFormat
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
private void configureGenericRecordExportInputFormat(Job job, String tableName)
throws IOException {
ConnManager connManager = context.getConnManager();
Map<String, Integer> columnTypeInts;
if (options.getCall() == null) {
columnTypeInts = connManager.getColumnTypes(
tableName,
options.getSqlQuery());
} else {
columnTypeInts = connManager.getColumnTypesForProcedure(
options.getCall());
}
String[] specifiedColumns = options.getColumns();
MapWritable columnTypes = new MapWritable();
for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
String column = e.getKey();
column = (specifiedColumns == null) ? column : options.getColumnNameCaseInsensitive(column);
if (column != null) {
Text columnName = new Text(column);
Text columnType = new Text(connManager.toJavaType(tableName, column, e.getValue()));
columnTypes.put(columnName, columnType);
}
}
DefaultStringifier.store(job.getConfiguration(), columnTypes,
AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
}
示例2: checkOutputSpecs
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs)
throws IOException {
// check for required configuration
if (conf.get(OUTPUT_QUERY) == null) {
throw new IllegalArgumentException(OUTPUT_QUERY +
" is not specified.");
}
// warn against unsupported configuration
if (conf.get(BATCH_SIZE) != null) {
LOG.warn("Config entry for " +
"\"mapreduce.marklogic.output.batchsize\" is not " +
"supported for " + this.getClass().getName() +
" and will be ignored.");
}
String queryLanguage = conf.get(OUTPUT_QUERY_LANGUAGE);
if (queryLanguage != null) {
InternalUtilities.checkQueryLanguage(queryLanguage);
}
// store hosts into config system
DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
示例3: configureInputFormat
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
String tableClassName, String splitByCol)
throws ClassNotFoundException, IOException {
fileType = getInputFileType();
super.configureInputFormat(job, tableName, tableClassName, splitByCol);
if (fileType == FileType.AVRO_DATA_FILE) {
LOG.debug("Configuring for Avro export");
ConnManager connManager = context.getConnManager();
Map<String, Integer> columnTypeInts =
connManager.getColumnTypes(tableName, options.getSqlQuery());
MapWritable columnTypes = new MapWritable();
for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
Text columnName = new Text(e.getKey());
Text columnText = new Text(
connManager.toJavaType(tableName, e.getKey(), e.getValue()));
columnTypes.put(columnName, columnText);
}
DefaultStringifier.store(job.getConfiguration(), columnTypes,
AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
}
}
示例4: checkOutputSpecs
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs)
throws IOException {
super.checkOutputSpecs(conf, cs);
// store mimetypes map into config system
DefaultStringifier.store(conf, getMimetypesMap(),
ConfigConstants.CONF_MIMETYPES);
}
示例5: checkOutputSpecs
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs)
throws IOException {
// warn against unsupported configuration
if (conf.get(BATCH_SIZE) != null) {
LOG.warn("Config entry for " +
"\"mapreduce.marklogic.output.batchsize\" is not " +
"supported for " + this.getClass().getName() +
" and will be ignored.");
}
// store hosts into config system
DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
示例6: checkOutputSpecs
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs)
throws IOException {
// warn against unsupported configuration
if (conf.get(BATCH_SIZE) != null) {
LOG.warn("Config entry for " +
"\"mapreduce.marklogic.output.batchsize\" is not " +
"supported for " + this.getClass().getName() +
" and will be ignored.");
}
// store hosts into config system
DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
示例7: storeToConf
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
* Stores the given object in the configuration under the given dataKey
* @param obj the object to store
* @param conf the configuration to store the object into
* @param dataKey the key to store the data
*/
public static<T> void storeToConf(T obj, Configuration conf, String dataKey)
throws IOException {
String classKey = dataKey + "._class";
conf.set(classKey, obj.getClass().getName());
DefaultStringifier.store(conf, obj, dataKey);
}
示例8: configureInputFormat
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
String tableClassName, String splitByCol)
throws ClassNotFoundException, IOException {
fileType = getInputFileType();
super.configureInputFormat(job, tableName, tableClassName, splitByCol);
if (isHCatJob) {
SqoopHCatUtilities.configureExportInputFormat(options, job,
context.getConnManager(), tableName, job.getConfiguration());
return;
} else if (fileType == FileType.AVRO_DATA_FILE) {
LOG.debug("Configuring for Avro export");
ConnManager connManager = context.getConnManager();
Map<String, Integer> columnTypeInts;
if (options.getCall() == null) {
columnTypeInts = connManager.getColumnTypes(
tableName,
options.getSqlQueryRich());
} else {
columnTypeInts = connManager.getColumnTypesForProcedure(
options.getCall());
}
MapWritable columnTypes = new MapWritable();
for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
Text columnName = new Text(e.getKey());
Text columnText = new Text(
connManager.toJavaType(tableName, e.getKey(), e.getValue()));
columnTypes.put(columnName, columnText);
}
DefaultStringifier.store(job.getConfiguration(), columnTypes,
AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
}
}
示例9: storeToConf
import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
* Stores the given object in the configuration under the given dataKey.
*
* @param obj the object to store.
* @param conf the configuration to store the object into.
* @param dataKey the key to store the data.
* @param <T> the given object class type.
* @throws IOException when failed storing the given data in Hadoop conf.
*/
public static<T> void storeToConf(T obj, Configuration conf, String dataKey)
throws IOException {
String classKey = dataKey + "._class";
conf.set(classKey, obj.getClass().getName());
DefaultStringifier.store(conf, obj, dataKey);
}