當前位置: 首頁>>代碼示例>>Java>>正文


Java TypeInfo.getTypeName方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.getTypeName方法的典型用法代碼示例。如果您正苦於以下問題:Java TypeInfo.getTypeName方法的具體用法?Java TypeInfo.getTypeName怎麽用?Java TypeInfo.getTypeName使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hive.serde2.typeinfo.TypeInfo的用法示例。


在下文中一共展示了TypeInfo.getTypeName方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: initialize

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
  serdeParams = ShimsLoader.getHiveShims()
      .getSerDeParametersShim(conf, tbl, getClass().getName());
  String specifiedColumnMapping = tbl.getProperty(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING);

  for (TypeInfo type : serdeParams.getColumnTypes()) {
    if (HiveDynamoDBTypeFactory.getTypeObjectFromHiveType(type.getTypeName()) == null) {
      throw new SerDeException("Unsupported type: " + type.getTypeName());
    }
  }

  log.info("Provided column mapping: " + specifiedColumnMapping);
  columnMappings = Maps.newHashMap();
  if (!Strings.isNullOrEmpty(specifiedColumnMapping)) {
    columnMappings = HiveDynamoDBUtil.getHiveToDynamoDBSchemaMapping(specifiedColumnMapping);
  }
  addDefaultColumnMappings(serdeParams.getColumnNames());

  log.info("Final column mapping: " + columnMappings);
  objectInspector = new DynamoDBObjectInspector(serdeParams.getColumnNames(), serdeParams
      .getColumnTypes(), columnMappings);
}
 
開發者ID:awslabs,項目名稱:emr-dynamodb-connector,代碼行數:24,代碼來源:DynamoDBExportSerDe.java

示例2: convertToIndexRSchema

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private static SegmentSchema convertToIndexRSchema(List<String> columnNames,
                                                   List<TypeInfo> columnTypes,
                                                   Set<String> indexColumns) throws IOException {
    List<ColumnSchema> schemas = new ArrayList<ColumnSchema>();
    for (int i = 0; i < columnNames.size(); i++) {
        String currentColumn = columnNames.get(i);
        TypeInfo currentType = columnTypes.get(i);
        SQLType convertedType = null;

        if (currentType.equals(TypeInfoFactory.intTypeInfo)) {
            convertedType = SQLType.INT;
        } else if (currentType.equals(TypeInfoFactory.longTypeInfo)) {
            convertedType = SQLType.BIGINT;
        } else if (currentType.equals(TypeInfoFactory.floatTypeInfo)) {
            convertedType = SQLType.FLOAT;
        } else if (currentType.equals(TypeInfoFactory.doubleTypeInfo)) {
            convertedType = SQLType.DOUBLE;
        } else if (currentType.equals(TypeInfoFactory.stringTypeInfo)) {
            convertedType = SQLType.VARCHAR;
        } else if (currentType.equals(TypeInfoFactory.dateTypeInfo)) {
            convertedType = SQLType.DATE;
        } else if (currentType.equals(TypeInfoFactory.timestampTypeInfo)) {
            convertedType = SQLType.DATETIME;
        } else {
            throw new IOException("can't recognize this type [" + currentType.getTypeName() + "]");
        }

        boolean isIndexed = indexColumns.contains(currentColumn.toLowerCase());
        schemas.add(new ColumnSchema(currentColumn, convertedType, isIndexed));
    }
    return new SegmentSchema(schemas);
}
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:33,代碼來源:IndexROutputFormat.java

示例3: HiveType

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private HiveType(TypeInfo typeInfo)
{
    requireNonNull(typeInfo, "typeInfo is null");
    this.hiveTypeName = typeInfo.getTypeName();
    this.typeInfo = typeInfo;
}
 
開發者ID:y-lan,項目名稱:presto,代碼行數:7,代碼來源:HiveType.java

示例4: initCassandraSerDeParameters

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Initialize the cassandra serialization and deserialization parameters
 * from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws org.apache.hadoop.hive.serde2.SerDeException
 *
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
        throws SerDeException {
    cassandraKeyspace = parseCassandraKeyspace(tbl);
    cassandraColumnFamily = parseCassandraColumnFamily(tbl);
    cassandraColumnNames = parseOrCreateColumnMapping(tbl);

    cassandraColumnNamesText = new ArrayList<Text>();
    for (String columnName : cassandraColumnNames) {
        cassandraColumnNamesText.add(new Text(columnName));
    }

    serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

    validatorType = parseOrCreateValidatorType(tbl);

    setTableMapping();

    if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
        throw new SerDeException(serdeName + ": columns has "
                + serdeParams.getColumnNames().size()
                + " elements while cassandra.columns.mapping has "
                + cassandraColumnNames.size() + " elements"
                + " (counting the key if implicit)");
    }

    // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
    for (int i = 0; i < cassandraColumnNames.size(); i++) {
        String cassandraColName = cassandraColumnNames.get(i);
        if (cassandraColName.endsWith(":")) {
            TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
            if ((typeInfo.getCategory() != Category.MAP)
                    || (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
                    != Constants.STRING_TYPE_NAME)) {

                throw new SerDeException(
                        serdeName + ": Cassandra column family '"
                        + cassandraColName
                        + "' should be mapped to map<string,?> but is mapped to "
                        + typeInfo.getTypeName());
            }
        }
    }
}
 
開發者ID:2013Commons,項目名稱:hive-cassandra,代碼行數:55,代碼來源:CqlSerDe.java

示例5: initCassandraSerDeParameters

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Initialize the cassandra serialization and deserialization parameters
 * from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws SerDeException
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
        throws SerDeException {
    cassandraKeyspace = parseCassandraKeyspace(tbl);
    cassandraColumnFamily = parseCassandraColumnFamily(tbl);
    cassandraColumnNames = parseOrCreateColumnMapping(tbl);

    cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
    for (String columnName : cassandraColumnNames) {
        cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
    }

    iKey = cassandraColumnNames.indexOf(CassandraColumnSerDe.CASSANDRA_KEY_COLUMN);

    serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

    validatorType = parseOrCreateValidatorType(tbl);

    setTableMapping();

    if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
        throw new SerDeException(serdeName + ": columns has "
                + serdeParams.getColumnNames().size()
                + " elements while cassandra.columns.mapping has "
                + cassandraColumnNames.size() + " elements"
                + " (counting the key if implicit)");
    }

    // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
    for (int i = 0; i < cassandraColumnNames.size(); i++) {
        String cassandraColName = cassandraColumnNames.get(i);
        if (cassandraColName.endsWith(":")) {
            TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
            if ((typeInfo.getCategory() != Category.MAP)
                    || (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
                    != Constants.STRING_TYPE_NAME)) {

                throw new SerDeException(
                        serdeName + ": Cassandra column family '"
                        + cassandraColName
                        + "' should be mapped to map<string,?> but is mapped to "
                        + typeInfo.getTypeName());
            }
        }
    }
}
 
開發者ID:2013Commons,項目名稱:hive-cassandra,代碼行數:56,代碼來源:CassandraColumnSerDe.java

示例6: initCassandraSerDeParameters

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Initialize the cassandra serialization and deserialization parameters from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws SerDeException
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
    throws SerDeException {
  cassandraColumnFamily = getCassandraColumnFamily(tbl);
  cassandraColumnNames = parseOrCreateColumnMapping(tbl);

  cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
  for (String columnName : cassandraColumnNames) {
    cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
  }

  iKey = cassandraColumnNames.indexOf(AbstractColumnSerDe.CASSANDRA_KEY_COLUMN);

  serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

  validatorType = parseOrCreateValidatorType(tbl);

  setTableMapping();

  if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
    throw new SerDeException(serdeName + ": columns has " +
        serdeParams.getColumnNames().size() +
        " elements while cassandra.columns.mapping has " +
        cassandraColumnNames.size() + " elements" +
        " (counting the key if implicit)");
  }

  // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
  for (int i = 0; i < cassandraColumnNames.size(); i++) {
    String cassandraColName = cassandraColumnNames.get(i);
    if (cassandraColName.endsWith(":")) {
      TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
      if ((typeInfo.getCategory() != Category.MAP) ||
          (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
              != Constants.STRING_TYPE_NAME)) {

        throw new SerDeException(
            serdeName + ": Cassandra column family '"
                + cassandraColName
                + "' should be mapped to map<string,?> but is mapped to "
                + typeInfo.getTypeName());
      }
    }
  }
}
 
開發者ID:dvasilen,項目名稱:Hive-Cassandra,代碼行數:54,代碼來源:CassandraColumnSerDe.java

示例7: getFieldSchemaFromTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Convert TypeInfo to FieldSchema.
 */
public static FieldSchema getFieldSchemaFromTypeInfo(String fieldName,
    TypeInfo typeInfo) {
  return new FieldSchema(fieldName, typeInfo.getTypeName(),
      "generated by TypeInfoUtils.getFieldSchemaFromTypeInfo");
}
 
開發者ID:facebookarchive,項目名稱:swift-hive-metastore,代碼行數:9,代碼來源:MetaStoreUtils.java


注:本文中的org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.getTypeName方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。