当前位置: 首页>>代码示例>>Java>>正文


Java DefaultStringifier.load方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.DefaultStringifier.load方法的典型用法代码示例。如果您正苦于以下问题:Java DefaultStringifier.load方法的具体用法?Java DefaultStringifier.load怎么用?Java DefaultStringifier.load使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.DefaultStringifier的用法示例。


在下文中一共展示了DefaultStringifier.load方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setup

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);

  Configuration conf = context.getConfiguration();

  // Instantiate a copy of the user's class to hold and parse the record.
  String recordClassName = conf.get(
      ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
        + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
        + ") is not set!");
  }

  try {
    Class cls = Class.forName(recordClassName, true,
        Thread.currentThread().getContextClassLoader());
    recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == recordImpl) {
    throw new IOException("Could not instantiate object of type "
        + recordClassName);
  }

  columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
      MapWritable.class);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:32,代码来源:ParquetExportMapper.java

示例2: getForestStatusMap

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
protected LinkedMapWritable getForestStatusMap(Configuration conf) 
throws IOException {
    String forestHost = conf.get(OUTPUT_FOREST_HOST);
    if (forestHost != null) {
        //Restores the object from the configuration.
        LinkedMapWritable fhmap = DefaultStringifier.load(conf, OUTPUT_FOREST_HOST, 
            LinkedMapWritable.class);
        // must be in fast load mode, otherwise won't reach here
        String s = conf.get(ASSIGNMENT_POLICY);
        //EXECUTION_MODE must have a value in mlcp;
        //default is "distributed" in hadoop connector
        String mode = conf.get(EXECUTION_MODE, MODE_DISTRIBUTED);
        if (MODE_DISTRIBUTED.equals(mode)) {
        	AssignmentPolicy.Kind policy =
        			AssignmentPolicy.Kind.forName(s);
            am.initialize(policy, fhmap, conf.getInt(BATCH_SIZE, 10));
        }
        return fhmap;
    } else {
        throw new IOException("Forest host map not found");
    }
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:23,代码来源:ContentOutputFormat.java

示例3: getRoleMap

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
protected LinkedMapWritable getRoleMap(TaskAttemptContext context) throws IOException{
    //Restores the object from the configuration.
    Configuration conf = context.getConfiguration();
    LinkedMapWritable fhmap = null;
    if(conf.get(ConfigConstants.CONF_ROLE_MAP)!=null) {
        fhmap = DefaultStringifier.load(conf, ConfigConstants.CONF_ROLE_MAP, 
            LinkedMapWritable.class);
    }
    return fhmap;
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:11,代码来源:RDFInputFormat.java

示例4: getServerVersion

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
protected String getServerVersion(TaskAttemptContext context) throws IOException{
    //Restores the object from the configuration.
    Configuration conf = context.getConfiguration();
    Text version = DefaultStringifier.load(conf, ConfigConstants.CONF_ML_VERSION, 
        Text.class);
    return version.toString();
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:8,代码来源:RDFInputFormat.java

示例5: getHosts

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
protected TextArrayWritable getHosts(Configuration conf) throws IOException {
    String forestHost = conf.get(OUTPUT_FOREST_HOST);
    if (forestHost != null) {
        // Restores the object from the configuration.
        TextArrayWritable hosts = DefaultStringifier.load(conf,
            OUTPUT_FOREST_HOST, TextArrayWritable.class);
        return hosts;
    } else {
        throw new IOException("Forest host map not found");
    }
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:12,代码来源:MarkLogicOutputFormat.java

示例6: loadFromConf

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
 * Loads the object stored by {@link #storeToConf(Object, Configuration, String)}
 * method from the configuration under the given dataKey.
 * @param conf the configuration to read from
 * @param dataKey the key to get the data from
 * @return the store object
 */
@SuppressWarnings("unchecked")
public static<T> T loadFromConf(Configuration conf, String dataKey)
  throws IOException {
  String classKey = dataKey + "._class";
  String className = conf.get(classKey);
  try {
    T obj = (T) DefaultStringifier.load(conf, dataKey, ClassLoadingUtils.loadClass(className));
    return obj;
  } catch (Exception ex) {
    throw new IOException(ex);
  }
}
 
开发者ID:jianglibo,项目名称:gora-boot,代码行数:20,代码来源:IOUtils.java

示例7: setup

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {

  super.setup(context);

  Configuration conf = context.getConfiguration();

  // Instantiate a copy of the user's class to hold and parse the record.
  String recordClassName = conf.get(
      ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
        + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
        + ") is not set!");
  }

  try {
    Class cls = Class.forName(recordClassName, true,
        Thread.currentThread().getContextClassLoader());
    recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == recordImpl) {
    throw new IOException("Could not instantiate object of type "
        + recordClassName);
  }

  columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
      MapWritable.class);
}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:34,代码来源:AvroExportMapper.java

示例8: loadFromConf

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
 * Loads the object stored by {@link #storeToConf(Object, Configuration, String)}
 * method from the configuration under the given dataKey.
 *
 * @param conf the configuration to read from.
 * @param dataKey the key to get the data from.
 * @param <T> the given object class type.
 * @return the store object.
 * @throws IOException when failed retrieving the data given key from Hadoop conf.
 */
@SuppressWarnings("unchecked")
public static<T> T loadFromConf(Configuration conf, String dataKey)
  throws IOException {
  String classKey = dataKey + "._class";
  String className = conf.get(classKey);
  try {
    T obj = (T) DefaultStringifier.load(conf, dataKey, ClassLoadingUtils.loadClass(className));
    return obj;
  } catch (Exception ex) {
    throw new IOException(ex);
  }
}
 
开发者ID:apache,项目名称:gora,代码行数:23,代码来源:IOUtils.java

示例9: SqoopHCatExportHelper

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
public SqoopHCatExportHelper(Configuration conf, boolean isOdps)
  throws IOException, InterruptedException {
  this.isOdps = isOdps;

  if (!isOdps) {
    colTypesJava =
        DefaultStringifier.load(conf, SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_JAVA,
            MapWritable.class);
    colTypesSql =
        DefaultStringifier.load(conf, SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_SQL,
            MapWritable.class);
  }
  // Instantiate a copy of the user's class to hold and parse the record.

  String recordClassName = conf.get(
    ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
      + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
      + ") is not set!");
  }

  bigDecimalFormatString = conf.getBoolean(
    ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
    ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);

  debugHCatExportMapper = conf.getBoolean(
    SqoopHCatUtilities.DEBUG_HCAT_EXPORT_MAPPER_PROP, false);
  try {
    Class<?> cls = Class.forName(recordClassName, true,
      Thread.currentThread().getContextClassLoader());
    sqoopRecord = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == sqoopRecord) {
    throw new IOException("Could not instantiate object of type "
      + recordClassName);
  }

  String inputJobInfoStr = conf.get(HCatConstants.HCAT_KEY_JOB_INFO);
  jobInfo =
    (InputJobInfo) HCatUtil.deserialize(inputJobInfoStr);
  HCatSchema tableSchema = jobInfo.getTableInfo().getDataColumns();
  HCatSchema partitionSchema =
    jobInfo.getTableInfo().getPartitionColumns();
  hCatFullTableSchema = new HCatSchema(tableSchema.getFields());
  for (HCatFieldSchema hfs : partitionSchema.getFields()) {
    hCatFullTableSchema.append(hfs);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:53,代码来源:SqoopHCatExportHelper.java

示例10: getMimetypesMap

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
/**
 * initialize mimetype map if not initialized, return the map
 * 
 * @return
 * @throws IOException
 */
private LinkedMapWritable getMimetypesMap() throws IOException {
    if (mimetypeMap != null)
        return mimetypeMap;
    String mtmap = conf.get(ConfigConstants.CONF_MIMETYPES);
    if (mtmap != null) {
        mimetypeMap = DefaultStringifier.load(conf,
            ConfigConstants.CONF_MIMETYPES, LinkedMapWritable.class);
        return mimetypeMap;
    }
    String host = conf.get(OUTPUT_HOST);
    Session session = null;
    ResultSequence result = null;
    try {
        ContentSource cs = InternalUtilities.getOutputContentSource(conf,
            host);
        session = cs.newSession();
        AdhocQuery query = session.newAdhocQuery(MIMETYPES_QUERY);
        RequestOptions options = new RequestOptions();
        options.setDefaultXQueryVersion("1.0-ml");
        query.setOptions(options);
        result = session.submitRequest(query);
        if (!result.hasNext())
            throw new IOException(
                "Server-side transform requires MarkLogic 7 or later");
        mimetypeMap = new LinkedMapWritable();
        while (result.hasNext()) {
            String suffs = result.next().asString();
            Text format = new Text(result.next().asString());
            // some extensions are in a space separated string
            for (String s : suffs.split(" ")) {
                Text suff = new Text(s);
                mimetypeMap.put(suff, format);
            }
        }
        return mimetypeMap;
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
        throw new IOException(e);
    } finally {
        if (result != null) {
            result.close();
        }
        if (session != null) {
            session.close();
        }
    }
}
 
开发者ID:marklogic,项目名称:marklogic-contentpump,代码行数:54,代码来源:TransformOutputFormat.java

示例11: setup

import org.apache.hadoop.io.DefaultStringifier; //导入方法依赖的package包/类
@Override
protected void setup(Context context)
  throws IOException, InterruptedException {
  super.setup(context);

  Configuration conf = context.getConfiguration();

  colTypesJava = DefaultStringifier.load(conf,
    SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_JAVA, MapWritable.class);
  colTypesSql = DefaultStringifier.load(conf,
    SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_SQL, MapWritable.class);
  // Instantiate a copy of the user's class to hold and parse the record.

  String recordClassName = conf.get(
    ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
      + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
      + ") is not set!");
  }
  debugHCatExportMapper = conf.getBoolean(
    SqoopHCatUtilities.DEBUG_HCAT_EXPORT_MAPPER_PROP, false);
  try {
    Class cls = Class.forName(recordClassName, true,
      Thread.currentThread().getContextClassLoader());
    sqoopRecord = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == sqoopRecord) {
    throw new IOException("Could not instantiate object of type "
      + recordClassName);
  }

  String inputJobInfoStr = conf.get(HCatConstants.HCAT_KEY_JOB_INFO);
  jobInfo =
    (InputJobInfo) HCatUtil.deserialize(inputJobInfoStr);
  HCatSchema tableSchema = jobInfo.getTableInfo().getDataColumns();
  HCatSchema partitionSchema =
    jobInfo.getTableInfo().getPartitionColumns();
  hCatFullTableSchema = new HCatSchema(tableSchema.getFields());
  for (HCatFieldSchema hfs : partitionSchema.getFields()) {
    hCatFullTableSchema.append(hfs);
  }
  hCatSchemaFields = hCatFullTableSchema.getFields();

}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:49,代码来源:SqoopHCatExportMapper.java


注:本文中的org.apache.hadoop.io.DefaultStringifier.load方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。