当前位置: 首页>>代码示例>>Java>>正文


Java HCatSchema类代码示例

本文整理汇总了Java中org.apache.hive.hcatalog.data.schema.HCatSchema的典型用法代码示例。如果您正苦于以下问题:Java HCatSchema类的具体用法?Java HCatSchema怎么用?Java HCatSchema使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


HCatSchema类属于org.apache.hive.hcatalog.data.schema包,在下文中一共展示了HCatSchema类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getGeneralSchemaFromHCatFieldSchema

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
public static IField getGeneralSchemaFromHCatFieldSchema( final HCatFieldSchema hCatFieldSchema ) throws IOException{
  if( hCatFieldSchema.getCategory() == HCatFieldSchema.Category.ARRAY ){
    HCatSchema arrayElementSchema = hCatFieldSchema.getArrayElementSchema();
    return new ArrayContainerField( hCatFieldSchema.getName() , getGeneralSchemaFromHCatFieldSchema( arrayElementSchema.get(0) ) );
  }
  else if( hCatFieldSchema.getCategory() == HCatFieldSchema.Category.MAP ){
    HCatSchema mapValueElementSchema = hCatFieldSchema.getMapValueSchema();
    return new MapContainerField( hCatFieldSchema.getName() , getGeneralSchemaFromHCatFieldSchema( mapValueElementSchema.get(0) ) );
  }
  else if( hCatFieldSchema.getCategory() == HCatFieldSchema.Category.STRUCT ){
    HCatSchema structSchema = hCatFieldSchema.getStructSubSchema();
    StructContainerField field = new StructContainerField( hCatFieldSchema.getName() );
    for( int i = 0 ; i < structSchema.size() ; i++ ){
      field.set( getGeneralSchemaFromHCatFieldSchema( structSchema.get(i) ) );
    }
    return field;
  }
  else if( hCatFieldSchema.getCategory() == HCatFieldSchema.Category.PRIMITIVE ){
    TypeInfo typeInfo = hCatFieldSchema.getTypeInfo();
    return HiveSchemaFactory.getGeneralSchema( hCatFieldSchema.getName() , typeInfo );
  }
  else{
    throw new IOException( "Unknown HCatalog field type : " + hCatFieldSchema.toString() );
  }
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:26,代码来源:HCatalogSchemaFactory.java

示例2: generateHCatRecords

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
private List<HCatRecord> generateHCatRecords(int numRecords,
  HCatSchema hCatTblSchema, ColumnGenerator... extraCols) throws Exception {
  List<HCatRecord> records = new ArrayList<HCatRecord>();
  List<HCatFieldSchema> hCatTblCols = hCatTblSchema.getFields();
  int size = hCatTblCols.size();
  for (int i = 0; i < numRecords; ++i) {
    DefaultHCatRecord record = new DefaultHCatRecord(size);
    record.set(hCatTblCols.get(0).getName(), hCatTblSchema, i);
    record.set(hCatTblCols.get(1).getName(), hCatTblSchema, "textfield" + i);
    int idx = 0;
    for (int j = 0; j < extraCols.length; ++j) {
      if (extraCols[j].getKeyType() == KeyType.STATIC_KEY) {
        continue;
      }
      record.set(hCatTblCols.get(idx + 2).getName(), hCatTblSchema,
        extraCols[j].getHCatValue(i));
      ++idx;
    }

    records.add(record);
  }
  return records;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:24,代码来源:HCatalogTestUtils.java

示例3: readObject

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
	this.fieldNames = new String[in.readInt()];
	for (int i = 0; i < this.fieldNames.length; i++) {
		this.fieldNames[i] = in.readUTF();
	}

	Configuration configuration = new Configuration();
	configuration.readFields(in);

	if (this.configuration == null) {
		this.configuration = configuration;
	}

	this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat();
	this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema"));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:HCatInputFormatBase.java

示例4: readObject

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
	this.fieldNames = new String[in.readInt()];
	for(int i=0; i<this.fieldNames.length; i++) {
		this.fieldNames[i] = in.readUTF();
	}

	Configuration configuration = new Configuration();
	configuration.readFields(in);

	if(this.configuration == null) {
		this.configuration = configuration;
	}

	this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat();
	this.outputSchema = (HCatSchema)HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema"));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:HCatInputFormatBase.java

示例5: extractPartInfo

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
private static PartInfo extractPartInfo(HCatSchema schema, StorageDescriptor sd,
                    Map<String, String> parameters, Configuration conf,
                    InputJobInfo inputJobInfo) throws IOException {

  StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);

  Properties hcatProperties = new Properties();
  HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);

  // copy the properties from storageHandler to jobProperties
  Map<String, String> jobProperties =
      HCatRSUtil.getInputJobProperties(storageHandler, inputJobInfo);

  for (String key : parameters.keySet()) {
    hcatProperties.put(key, parameters.get(key));
  }
  // FIXME
  // Bloating partinfo with inputJobInfo is not good
  return new PartInfo(schema, storageHandler, sd.getLocation(),
    hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:22,代码来源:InitializeInput.java

示例6: HCatTableInfo

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
/**
 * Initializes a new HCatTableInfo instance to be used with
 * {@link org.apache.hive.hcatalog.mapreduce.HCatInputFormat} for reading data from
 * a table. Work with hadoop security, the kerberos principal name of the server
 * - else null. The principal name should be of the form:
 * <servicename>/[email protected]<realm> like "hcat/[email protected]"
 * The special string _HOST will be replaced automatically with the correct host name
 * @param databaseName the db name
 * @param tableName the table name
 * @param dataColumns schema of columns which contain data
 * @param partitionColumns schema of partition columns
 * @param storerInfo information about storage descriptor
 * @param table hive metastore table class
 */
HCatTableInfo(
  String databaseName,
  String tableName,
  HCatSchema dataColumns,
  HCatSchema partitionColumns,
  StorerInfo storerInfo,
  Table table) {
  this.databaseName = (databaseName == null) ?
      MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName;
  this.tableName = tableName;
  this.dataColumns = dataColumns;
  this.table = table;
  this.storerInfo = storerInfo;
  this.partitionColumns = partitionColumns;
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:30,代码来源:HCatTableInfo.java

示例7: getHCatSchema

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
HCatSchema getHCatSchema(List<RequiredField> fields, String signature,
                         Class<?> classForUDFCLookup) throws IOException {
  if (fields == null) {
    return null;
  }

  Properties props = UDFContext.getUDFContext().getUDFProperties(
    classForUDFCLookup, new String[]{signature});
  HCatSchema hcatTableSchema = (HCatSchema) props.get(HCatConstants.HCAT_TABLE_SCHEMA);

  ArrayList<HCatFieldSchema> fcols = new ArrayList<HCatFieldSchema>();
  for (RequiredField rf : fields) {
    fcols.add(hcatTableSchema.getFields().get(rf.getIndex()));
  }
  return new HCatSchema(fcols);
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:17,代码来源:PigHCatUtil.java

示例8: getSchema

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
@Override
public ResourceSchema getSchema(String location, Job job) throws IOException {
  HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get()
    .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);
  Table table = phutil.getTable(location,
    hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job),
    PigHCatUtil.getHCatServerPrincipal(job),

    // Pass job to initialize metastore conf overrides for embedded metastore case
    // (hive.metastore.uris = "").
    job);
  HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
  try {
    PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema);
  } catch (IOException e) {
    throw new PigException(
      "Table schema incompatible for reading through HCatLoader :" + e.getMessage()
        + ";[Table schema was " + hcatTableSchema.toString() + "]"
      , PigHCatUtil.PIG_EXCEPTION_CODE, e);
  }
  storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema);
  outputSchema = hcatTableSchema;
  return PigHCatUtil.getResourceSchema(hcatTableSchema);
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:25,代码来源:HCatRSLoader.java

示例9: mapper_emits_values_by_reading_schema_from_hcatalog

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
@Test
public void mapper_emits_values_by_reading_schema_from_hcatalog() throws Exception {
    HCatSchema hCatSchema = buildTestSchema();
    provider.setSchema(hCatSchema);

    // setup hcatrecords. We care most about 15 and 48
    List<DefaultHCatRecord> hCatRecords = new ArrayList<DefaultHCatRecord>();
    for (String[] rec : readFile("src/test/resources/sample.txt")) {
        DefaultHCatRecord hcr = new DefaultHCatRecord(rec.length);
        for (int i = 0; i < rec.length; i++) {
            hcr.set(i, rec[i]);
        }
        hCatRecords.add(hcr);
        mapDriver.withInput(new LongWritable(), hcr);
    }

    mapDriver.withOutput(new Text("CA"), new Text("69"));
    mapDriver.withOutput(new Text("TX"), new Text("14.38"));
    mapDriver.withOutput(new Text("MI"), new Text("13.53"));
    mapDriver.withOutput(new Text("CA"), new Text("18.3"));
    mapDriver.withOutput(new Text("IL"), new Text("16.06"));
    mapDriver.runTest();
}
 
开发者ID:mmiklavc,项目名称:hadoop-testing,代码行数:24,代码来源:CMSTopStateTest.java

示例10: parse

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
@Override
public List<Object[]> parse(File file, HCatSchema schema, List<String> names) {
  try {
    List<String> lines = Files.readAllLines(file.toPath(), charset);

    if (this.hasHeader) {
      lines = lines.subList(1, lines.size());
    }

    List<Object[]> records = new ArrayList<>(lines.size());
    for (String line : lines) {
      records.add(parseRow(line, names.size()));
    }
    return records;
  } catch (IOException e) {
    throw new RuntimeException("Error while reading file", e);
  }
}
 
开发者ID:klarna,项目名称:HiveRunner,代码行数:19,代码来源:TsvFileParser.java

示例11: HCatalogMapParser

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
public HCatalogMapParser( final Map<Object,Object> record , final HCatSchema schema ) throws IOException{
  this.schema = schema;
  this.record = record;

  childSchema = schema.get(0);
  childConverter = HCatalogPrimitiveConverterFactory.get( childSchema );
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:8,代码来源:HCatalogMapParser.java

示例12: HCatalogStructParser

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
public HCatalogStructParser( final List<Object> record , final HCatSchema schema ) throws IOException{
  this.record = record;

  fieldIndexMap = new HashMap<String,Integer>();
  converterList = new ArrayList<IHCatalogPrimitiveConverter>();
  schemaList = new ArrayList<HCatFieldSchema>();

  for( int i = 0 ; i < schema.size() ; i++ ){
    HCatFieldSchema fieldSchema = schema.get(i);
    fieldIndexMap.put( fieldSchema.getName() , Integer.valueOf(i) );
    converterList.add( HCatalogPrimitiveConverterFactory.get( fieldSchema ) );
    schemaList.add( schema.get(i) );
  }
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:15,代码来源:HCatalogStructParser.java

示例13: HCatalogRootParser

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
public HCatalogRootParser( final HCatRecord record , final HCatSchema schema ) throws IOException{
  this.record = record;

  fieldIndexMap = new HashMap<String,Integer>();
  converterList = new ArrayList<IHCatalogPrimitiveConverter>();
  schemaList = new ArrayList<HCatFieldSchema>();

  for( int i = 0 ; i < schema.size() ; i++ ){
    HCatFieldSchema fieldSchema = schema.get(i);
    fieldIndexMap.put( fieldSchema.getName() , Integer.valueOf(i) );
    converterList.add( HCatalogPrimitiveConverterFactory.get( fieldSchema ) );
    schemaList.add( schema.get(i) );
  }
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:15,代码来源:HCatalogRootParser.java

示例14: HCatalogArrayParser

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
public HCatalogArrayParser( final List<Object> record , final HCatSchema schema ) throws IOException{
  this.schema = schema;
  this.record = record;

  childSchema = schema.get(0);
  childConverter = HCatalogPrimitiveConverterFactory.get( childSchema );
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:8,代码来源:HCatalogArrayParser.java

示例15: getGeneralSchema

import org.apache.hive.hcatalog.data.schema.HCatSchema; //导入依赖的package包/类
public static IField getGeneralSchema( final HCatSchema hCatSchema ) throws IOException{
  StructContainerField schema = new StructContainerField( "hcat_schema" );
  for( int i = 0 ; i < hCatSchema.size() ; i++ ){
    schema.set( getGeneralSchemaFromHCatFieldSchema( hCatSchema.get( i ) ) );
  }
  return schema;
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:8,代码来源:HCatalogSchemaFactory.java


注:本文中的org.apache.hive.hcatalog.data.schema.HCatSchema类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。