本文整理匯總了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.getTypeName方法的典型用法代碼示例。如果您正苦於以下問題:Java TypeInfo.getTypeName方法的具體用法?Java TypeInfo.getTypeName怎麽用?Java TypeInfo.getTypeName使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.serde2.typeinfo.TypeInfo
的用法示例。
在下文中一共展示了TypeInfo.getTypeName方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: initialize
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
serdeParams = ShimsLoader.getHiveShims()
.getSerDeParametersShim(conf, tbl, getClass().getName());
String specifiedColumnMapping = tbl.getProperty(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING);
for (TypeInfo type : serdeParams.getColumnTypes()) {
if (HiveDynamoDBTypeFactory.getTypeObjectFromHiveType(type.getTypeName()) == null) {
throw new SerDeException("Unsupported type: " + type.getTypeName());
}
}
log.info("Provided column mapping: " + specifiedColumnMapping);
columnMappings = Maps.newHashMap();
if (!Strings.isNullOrEmpty(specifiedColumnMapping)) {
columnMappings = HiveDynamoDBUtil.getHiveToDynamoDBSchemaMapping(specifiedColumnMapping);
}
addDefaultColumnMappings(serdeParams.getColumnNames());
log.info("Final column mapping: " + columnMappings);
objectInspector = new DynamoDBObjectInspector(serdeParams.getColumnNames(), serdeParams
.getColumnTypes(), columnMappings);
}
示例2: convertToIndexRSchema
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private static SegmentSchema convertToIndexRSchema(List<String> columnNames,
List<TypeInfo> columnTypes,
Set<String> indexColumns) throws IOException {
List<ColumnSchema> schemas = new ArrayList<ColumnSchema>();
for (int i = 0; i < columnNames.size(); i++) {
String currentColumn = columnNames.get(i);
TypeInfo currentType = columnTypes.get(i);
SQLType convertedType = null;
if (currentType.equals(TypeInfoFactory.intTypeInfo)) {
convertedType = SQLType.INT;
} else if (currentType.equals(TypeInfoFactory.longTypeInfo)) {
convertedType = SQLType.BIGINT;
} else if (currentType.equals(TypeInfoFactory.floatTypeInfo)) {
convertedType = SQLType.FLOAT;
} else if (currentType.equals(TypeInfoFactory.doubleTypeInfo)) {
convertedType = SQLType.DOUBLE;
} else if (currentType.equals(TypeInfoFactory.stringTypeInfo)) {
convertedType = SQLType.VARCHAR;
} else if (currentType.equals(TypeInfoFactory.dateTypeInfo)) {
convertedType = SQLType.DATE;
} else if (currentType.equals(TypeInfoFactory.timestampTypeInfo)) {
convertedType = SQLType.DATETIME;
} else {
throw new IOException("can't recognize this type [" + currentType.getTypeName() + "]");
}
boolean isIndexed = indexColumns.contains(currentColumn.toLowerCase());
schemas.add(new ColumnSchema(currentColumn, convertedType, isIndexed));
}
return new SegmentSchema(schemas);
}
示例3: HiveType
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private HiveType(TypeInfo typeInfo)
{
requireNonNull(typeInfo, "typeInfo is null");
this.hiveTypeName = typeInfo.getTypeName();
this.typeInfo = typeInfo;
}
示例4: initCassandraSerDeParameters
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
* Initialize the cassandra serialization and deserialization parameters
* from table properties and configuration.
*
* @param job
* @param tbl
* @param serdeName
* @throws org.apache.hadoop.hive.serde2.SerDeException
*
*/
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
throws SerDeException {
cassandraKeyspace = parseCassandraKeyspace(tbl);
cassandraColumnFamily = parseCassandraColumnFamily(tbl);
cassandraColumnNames = parseOrCreateColumnMapping(tbl);
cassandraColumnNamesText = new ArrayList<Text>();
for (String columnName : cassandraColumnNames) {
cassandraColumnNamesText.add(new Text(columnName));
}
serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);
validatorType = parseOrCreateValidatorType(tbl);
setTableMapping();
if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
throw new SerDeException(serdeName + ": columns has "
+ serdeParams.getColumnNames().size()
+ " elements while cassandra.columns.mapping has "
+ cassandraColumnNames.size() + " elements"
+ " (counting the key if implicit)");
}
// we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
for (int i = 0; i < cassandraColumnNames.size(); i++) {
String cassandraColName = cassandraColumnNames.get(i);
if (cassandraColName.endsWith(":")) {
TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
if ((typeInfo.getCategory() != Category.MAP)
|| (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
!= Constants.STRING_TYPE_NAME)) {
throw new SerDeException(
serdeName + ": Cassandra column family '"
+ cassandraColName
+ "' should be mapped to map<string,?> but is mapped to "
+ typeInfo.getTypeName());
}
}
}
}
示例5: initCassandraSerDeParameters
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
* Initialize the cassandra serialization and deserialization parameters
* from table properties and configuration.
*
* @param job
* @param tbl
* @param serdeName
* @throws SerDeException
*/
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
throws SerDeException {
cassandraKeyspace = parseCassandraKeyspace(tbl);
cassandraColumnFamily = parseCassandraColumnFamily(tbl);
cassandraColumnNames = parseOrCreateColumnMapping(tbl);
cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
for (String columnName : cassandraColumnNames) {
cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
}
iKey = cassandraColumnNames.indexOf(CassandraColumnSerDe.CASSANDRA_KEY_COLUMN);
serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);
validatorType = parseOrCreateValidatorType(tbl);
setTableMapping();
if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
throw new SerDeException(serdeName + ": columns has "
+ serdeParams.getColumnNames().size()
+ " elements while cassandra.columns.mapping has "
+ cassandraColumnNames.size() + " elements"
+ " (counting the key if implicit)");
}
// we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
for (int i = 0; i < cassandraColumnNames.size(); i++) {
String cassandraColName = cassandraColumnNames.get(i);
if (cassandraColName.endsWith(":")) {
TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
if ((typeInfo.getCategory() != Category.MAP)
|| (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
!= Constants.STRING_TYPE_NAME)) {
throw new SerDeException(
serdeName + ": Cassandra column family '"
+ cassandraColName
+ "' should be mapped to map<string,?> but is mapped to "
+ typeInfo.getTypeName());
}
}
}
}
示例6: initCassandraSerDeParameters
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
* Initialize the cassandra serialization and deserialization parameters from table properties and configuration.
*
* @param job
* @param tbl
* @param serdeName
* @throws SerDeException
*/
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
throws SerDeException {
cassandraColumnFamily = getCassandraColumnFamily(tbl);
cassandraColumnNames = parseOrCreateColumnMapping(tbl);
cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
for (String columnName : cassandraColumnNames) {
cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
}
iKey = cassandraColumnNames.indexOf(AbstractColumnSerDe.CASSANDRA_KEY_COLUMN);
serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);
validatorType = parseOrCreateValidatorType(tbl);
setTableMapping();
if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
throw new SerDeException(serdeName + ": columns has " +
serdeParams.getColumnNames().size() +
" elements while cassandra.columns.mapping has " +
cassandraColumnNames.size() + " elements" +
" (counting the key if implicit)");
}
// we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
for (int i = 0; i < cassandraColumnNames.size(); i++) {
String cassandraColName = cassandraColumnNames.get(i);
if (cassandraColName.endsWith(":")) {
TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
if ((typeInfo.getCategory() != Category.MAP) ||
(((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
!= Constants.STRING_TYPE_NAME)) {
throw new SerDeException(
serdeName + ": Cassandra column family '"
+ cassandraColName
+ "' should be mapped to map<string,?> but is mapped to "
+ typeInfo.getTypeName());
}
}
}
}
示例7: getFieldSchemaFromTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
* Convert TypeInfo to FieldSchema.
*/
public static FieldSchema getFieldSchemaFromTypeInfo(String fieldName,
TypeInfo typeInfo) {
return new FieldSchema(fieldName, typeInfo.getTypeName(),
"generated by TypeInfoUtils.getFieldSchemaFromTypeInfo");
}