当前位置: 首页>>代码示例>>Java>>正文


Java HCatFieldSchema.getType方法代码示例

本文整理汇总了Java中org.apache.hive.hcatalog.data.schema.HCatFieldSchema.getType方法的典型用法代码示例。如果您正苦于以下问题:Java HCatFieldSchema.getType方法的具体用法?Java HCatFieldSchema.getType怎么用?Java HCatFieldSchema.getType使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hive.hcatalog.data.schema.HCatFieldSchema的用法示例。


在下文中一共展示了HCatFieldSchema.getType方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertClobType

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private Object convertClobType(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  ClobRef cr = (ClobRef) val;
  String s = cr.isExternal() ? cr.toString() : cr.getData();

  if (hfsType == HCatFieldSchema.Type.STRING) {
    return s;
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hc = new HiveChar(s, cti.getLength());
    return hc;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:19,代码来源:SqoopHCatImportHelper.java

示例2: getResourceSchemaFromFieldSchema

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private static ResourceFieldSchema getResourceSchemaFromFieldSchema(HCatFieldSchema hfs)
  throws IOException {
  ResourceFieldSchema rfSchema;
  // if we are dealing with a bag or tuple column - need to worry about subschema
  if (hfs.getType() == Type.STRUCT) {
    rfSchema = new ResourceFieldSchema()
      .setName(hfs.getName())
      .setDescription(hfs.getComment())
      .setType(getPigType(hfs))
      .setSchema(getTupleSubSchema(hfs));
  } else if (hfs.getType() == Type.ARRAY) {
    rfSchema = new ResourceFieldSchema()
      .setName(hfs.getName())
      .setDescription(hfs.getComment())
      .setType(getPigType(hfs))
      .setSchema(getBagSubSchema(hfs));
  } else {
    rfSchema = new ResourceFieldSchema()
      .setName(hfs.getName())
      .setDescription(hfs.getComment())
      .setType(getPigType(hfs))
      .setSchema(null); // no munging inner-schemas
  }
  return rfSchema;
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:26,代码来源:PigHCatUtil.java

示例3: convertToSqoopRecord

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
public SqoopRecord convertToSqoopRecord(HCatRecord hcr)
  throws IOException {
  Text key = new Text();
  for (Map.Entry<String, Object> e : sqoopRecord.getFieldMap().entrySet()) {
    String colName = e.getKey();
    String hfn = colName.toLowerCase();
    key.set(hfn);
    Object hCatVal = hcr.get(hfn, hCatFullTableSchema);
    if (!isOdps) {
      String javaColType = colTypesJava.get(key).toString();
      int sqlType = ((IntWritable) colTypesSql.get(key)).get();
      HCatFieldSchema field = hCatFullTableSchema.get(hfn);
      HCatFieldSchema.Type fieldType = field.getType();
      String hCatTypeString = field.getTypeString();
      Object sqlVal = convertToSqoop(hCatVal, fieldType, javaColType, hCatTypeString);
      if (debugHCatExportMapper) {
        LOG.debug("hCatVal " + hCatVal + " of type "
            + (hCatVal == null ? null : hCatVal.getClass().getName()) + ",sqlVal " + sqlVal
            + " of type " + (sqlVal == null ? null : sqlVal.getClass().getName()) + ",java type "
            + javaColType + ", sql type = " + SqoopHCatUtilities.sqlTypeString(sqlType));
      }
      sqoopRecord.setField(colName, sqlVal);
    } else {
      sqoopRecord.setField(colName, hCatVal == null ? null : hCatVal.toString());
    }
  }
  return sqoopRecord;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatExportHelper.java

示例4: convertStringTypes

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatImportHelper.java

示例5: convertBooleanTypes

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private Object convertBooleanTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  Boolean b = (Boolean) val;
  if (hfsType == HCatFieldSchema.Type.BOOLEAN) {
    return b;
  } else if (hfsType == HCatFieldSchema.Type.TINYINT) {
    return (byte) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.SMALLINT) {
    return (short) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.INT) {
    return (int) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.BIGINT) {
    return (long) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.FLOAT) {
    return (float) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.DOUBLE) {
    return (double) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.STRING) {
    return val.toString();
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
    return hChar;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:31,代码来源:SqoopHCatImportHelper.java

示例6: getFieldType

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private TypeInformation getFieldType(HCatFieldSchema fieldSchema) {

		switch(fieldSchema.getType()) {
			case INT:
				return BasicTypeInfo.INT_TYPE_INFO;
			case TINYINT:
				return BasicTypeInfo.BYTE_TYPE_INFO;
			case SMALLINT:
				return BasicTypeInfo.SHORT_TYPE_INFO;
			case BIGINT:
				return BasicTypeInfo.LONG_TYPE_INFO;
			case BOOLEAN:
				return BasicTypeInfo.BOOLEAN_TYPE_INFO;
			case FLOAT:
				return BasicTypeInfo.FLOAT_TYPE_INFO;
			case DOUBLE:
				return BasicTypeInfo.DOUBLE_TYPE_INFO;
			case STRING:
				return BasicTypeInfo.STRING_TYPE_INFO;
			case BINARY:
				return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
			case ARRAY:
				return new GenericTypeInfo(List.class);
			case MAP:
				return new GenericTypeInfo(Map.class);
			case STRUCT:
				return new GenericTypeInfo(List.class);
			default:
				throw new IllegalArgumentException("Unknown data type \"" + fieldSchema.getType() + "\" encountered.");
		}
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:32,代码来源:HCatInputFormatBase.java

示例7: getFieldType

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private TypeInformation getFieldType(HCatFieldSchema fieldSchema) {

		switch(fieldSchema.getType()) {
			case INT:
				return BasicTypeInfo.INT_TYPE_INFO;
			case TINYINT:
				return BasicTypeInfo.BYTE_TYPE_INFO;
			case SMALLINT:
				return BasicTypeInfo.SHORT_TYPE_INFO;
			case BIGINT:
				return BasicTypeInfo.LONG_TYPE_INFO;
			case BOOLEAN:
				return BasicTypeInfo.BOOLEAN_TYPE_INFO;
			case FLOAT:
				return BasicTypeInfo.FLOAT_TYPE_INFO;
			case DOUBLE:
				return BasicTypeInfo.DOUBLE_TYPE_INFO;
			case STRING:
				return BasicTypeInfo.STRING_TYPE_INFO;
			case BINARY:
				return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
			case ARRAY:
				return new GenericTypeInfo(List.class);
			case MAP:
				return new GenericTypeInfo(Map.class);
			case STRUCT:
				return new GenericTypeInfo(List.class);
			default:
				throw new IllegalArgumentException("Unknown data type \""+fieldSchema.getType()+"\" encountered.");
		}
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:32,代码来源:HCatInputFormatBase.java

示例8: validateHcatFieldFollowsPigRules

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private static void validateHcatFieldFollowsPigRules(HCatFieldSchema hcatField)
    throws PigException {
  try {
    Type hType = hcatField.getType();
    switch (hType) {
    case BOOLEAN:
      if (!pigHasBooleanSupport) {
        throw new PigException("Incompatible type found in HCat table schema: "
            + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
      }
      break;
    case ARRAY:
      validateHCatSchemaFollowsPigRules(hcatField.getArrayElementSchema());
      break;
    case STRUCT:
      validateHCatSchemaFollowsPigRules(hcatField.getStructSubSchema());
      break;
    case MAP:
      // key is only string
      if (hcatField.getMapKeyType() != Type.STRING) {
        LOG.info("Converting non-String key of map " + hcatField.getName() + " from "
          + hcatField.getMapKeyType() + " to String.");
      }
      validateHCatSchemaFollowsPigRules(hcatField.getMapValueSchema());
      break;
    }
  } catch (HCatException e) {
    throw new PigException("Incompatible type found in hcat table schema: " + hcatField,
        PigHCatUtil.PIG_EXCEPTION_CODE, e);
  }
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:32,代码来源:PigHCatUtil.java

示例9: toHCat

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
private Object toHCat(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (val == null) {
    return null;
  }

  Object retVal = null;

  if (val instanceof Number) {
    retVal = convertNumberTypes(val, hfs);
  } else if (val instanceof Boolean) {
    retVal = convertBooleanTypes(val, hfs);
  } else if (val instanceof String) {
    retVal = convertStringTypes(val, hfs);
  } else if (val instanceof java.util.Date) {
    retVal = converDateTypes(val, hfs);
  } else if (val instanceof BytesWritable) {
    if (hfsType == HCatFieldSchema.Type.BINARY) {
      BytesWritable bw = (BytesWritable) val;
      retVal = bw.getBytes();
    }
  } else if (val instanceof BlobRef) {
    if (hfsType == HCatFieldSchema.Type.BINARY) {
      BlobRef br = (BlobRef) val;
      byte[] bytes = br.isExternal() ? br.toString().getBytes() : br
        .getData();
      retVal = bytes;
    }
  } else if (val instanceof ClobRef) {
    retVal = convertClobType(val, hfs);
  } else {
    throw new UnsupportedOperationException("Objects of type "
      + val.getClass().getName() + " are not suported");
  }
  if (retVal == null) {
    LOG.error("Unable to convert [" + val
      + "]  of type " + val.getClass().getName()
      + " to HCatalog type " + hfs.getTypeString());
  }
  return retVal;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:42,代码来源:SqoopHCatImportHelper.java

示例10: getBagSubSchema

import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; //导入方法依赖的package包/类
protected static ResourceSchema getBagSubSchema(HCatFieldSchema hfs)
    throws IOException {
  // there are two cases - array<Type> and array<struct<...>>
  // in either case the element type of the array is represented in a
  // tuple field schema in the bag's field schema - the second case (struct)
  // more naturally translates to the tuple - in the first case (array<Type>)
  // we simulate the tuple by putting the single field in a tuple

  Properties props = UDFContext.getUDFContext().getClientSystemProps();
  String innerTupleName = HCatConstants.HCAT_PIG_INNER_TUPLE_NAME_DEFAULT;
  if (props != null && props.containsKey(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME)) {
    innerTupleName = props.getProperty(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME)
      .replaceAll("FIELDNAME", hfs.getName());
  }
  String innerFieldName = HCatConstants.HCAT_PIG_INNER_FIELD_NAME_DEFAULT;
  if (props != null && props.containsKey(HCatConstants.HCAT_PIG_INNER_FIELD_NAME)) {
    innerFieldName = props.getProperty(HCatConstants.HCAT_PIG_INNER_FIELD_NAME)
      .replaceAll("FIELDNAME", hfs.getName());
  }

  ResourceFieldSchema[] bagSubFieldSchemas = new ResourceFieldSchema[1];
  bagSubFieldSchemas[0] = new ResourceFieldSchema().setName(innerTupleName)
    .setDescription("The tuple in the bag")
    .setType(DataType.TUPLE);
  HCatFieldSchema arrayElementFieldSchema = hfs.getArrayElementSchema().get(0);
  if (arrayElementFieldSchema.getType() == Type.STRUCT) {
    bagSubFieldSchemas[0].setSchema(getTupleSubSchema(arrayElementFieldSchema));
  } else if (arrayElementFieldSchema.getType() == Type.ARRAY) {
    ResourceSchema s = new ResourceSchema();
    List<ResourceFieldSchema> lrfs = Arrays.asList(
        getResourceSchemaFromFieldSchema(arrayElementFieldSchema));
    s.setFields(lrfs.toArray(new ResourceFieldSchema[lrfs.size()]));
    bagSubFieldSchemas[0].setSchema(s);
  } else {
    ResourceFieldSchema[] innerTupleFieldSchemas = new ResourceFieldSchema[1];
    innerTupleFieldSchemas[0] = new ResourceFieldSchema().setName(innerFieldName)
      .setDescription("The inner field in the tuple in the bag")
      .setType(getPigType(arrayElementFieldSchema))
      .setSchema(null); // the element type is not a tuple - so no subschema
    bagSubFieldSchemas[0].setSchema(
        new ResourceSchema().setFields(innerTupleFieldSchemas));
  }
  return new ResourceSchema().setFields(bagSubFieldSchemas);

}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:46,代码来源:PigHCatUtil.java


注:本文中的org.apache.hive.hcatalog.data.schema.HCatFieldSchema.getType方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。