当前位置: 首页>>代码示例>>Java>>正文


Java StorageDescriptor.getCols方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.StorageDescriptor.getCols方法的典型用法代码示例。如果您正苦于以下问题:Java StorageDescriptor.getCols方法的具体用法?Java StorageDescriptor.getCols怎么用?Java StorageDescriptor.getCols使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.metastore.api.StorageDescriptor的用法示例。


在下文中一共展示了StorageDescriptor.getCols方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: StorageDescriptorWrapper

import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public StorageDescriptorWrapper(StorageDescriptor sd) {
      this.sd = sd;
      this.cols = Lists.newArrayList();
      for (FieldSchema f : sd.getCols()) {
        this.cols.add(new FieldSchemaWrapper(f));
      }
      this.location = sd.getLocation();
      this.inputFormat = sd.getInputFormat();
      this.outputFormat = sd.getOutputFormat();
      this.compressed = sd.isCompressed();
      this.numBuckets = sd.getNumBuckets();
      this.serDeInfo = new SerDeInfoWrapper(sd.getSerdeInfo());
//      this.bucketCols = sd.getBucketCols();
      this.sortCols = Lists.newArrayList();
      for (Order o : sd.getSortCols()) {
        this.sortCols.add(new OrderWrapper(o));
      }
      this.parameters = sd.getParameters();
    }
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:20,代码来源:HiveTable.java

示例2: renameColumn

import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
@Override
public void renameColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle source, String target)
{
    if (!allowRenameColumn) {
        throw new PrestoException(PERMISSION_DENIED, "Renaming columns is disabled in this Hive catalog");
    }

    HiveTableHandle hiveTableHandle = checkType(tableHandle, HiveTableHandle.class, "tableHandle");
    HiveColumnHandle sourceHandle = checkType(source, HiveColumnHandle.class, "columnHandle");
    Optional<Table> tableMetadata = metastore.getTable(hiveTableHandle.getSchemaName(), hiveTableHandle.getTableName());
    if (!tableMetadata.isPresent()) {
        throw new TableNotFoundException(hiveTableHandle.getSchemaTableName());
    }
    Table table = tableMetadata.get();
    StorageDescriptor sd = table.getSd();
    ImmutableList.Builder<FieldSchema> columns = ImmutableList.builder();
    for (FieldSchema fieldSchema : sd.getCols()) {
        if (fieldSchema.getName().equals(sourceHandle.getName())) {
            columns.add(new FieldSchema(target, fieldSchema.getType(), fieldSchema.getComment()));
        }
        else {
            columns.add(fieldSchema);
        }
    }
    sd.setCols(columns.build());
    table.setSd(sd);
    metastore.alterTable(hiveTableHandle.getSchemaName(), hiveTableHandle.getTableName(), table);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:29,代码来源:HiveMetadata.java

示例3: testGetTableAvro

import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
@Test
public void testGetTableAvro() {
  final String databaseName = "testdb";
  final String tableName = "testtable";

  HiveTable.Builder builder = new HiveTable.Builder();

  builder.withDbName(databaseName).withTableName(tableName);

  State serdeProps = new State();
  serdeProps.setProp("avro.schema.literal", "{\"type\": \"record\", \"name\": \"TestEvent\","
      + " \"namespace\": \"test.namespace\", \"fields\": [{\"name\":\"a\"," + " \"type\": \"int\"}]}");
  builder.withSerdeProps(serdeProps);

  HiveTable hiveTable = builder.build();
  hiveTable.setInputFormat(AvroContainerInputFormat.class.getName());
  hiveTable.setOutputFormat(AvroContainerOutputFormat.class.getName());
  hiveTable.setSerDeType(AvroSerDe.class.getName());

  Table table = HiveMetaStoreUtils.getTable(hiveTable);
  Assert.assertEquals(table.getDbName(), databaseName);
  Assert.assertEquals(table.getTableName(), tableName);

  StorageDescriptor sd = table.getSd();
  Assert.assertEquals(sd.getInputFormat(), AvroContainerInputFormat.class.getName());
  Assert.assertEquals(sd.getOutputFormat(), AvroContainerOutputFormat.class.getName());
  Assert.assertNotNull(sd.getSerdeInfo());
  Assert.assertEquals(sd.getSerdeInfo().getSerializationLib(), AvroSerDe.class.getName());

  List<FieldSchema> fields = sd.getCols();
  Assert.assertTrue(fields != null && fields.size() == 1);
  FieldSchema fieldA = fields.get(0);
  Assert.assertEquals(fieldA.getName(), "a");
  Assert.assertEquals(fieldA.getType(), "int");
}
 
开发者ID:apache,项目名称:incubator-gobblin,代码行数:36,代码来源:HiveMetaStoreUtilsTest.java

示例4: testGetTableAvroInvalidSchema

import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
@Test
public void testGetTableAvroInvalidSchema() {
  final String databaseName = "testdb";
  final String tableName = "testtable";

  HiveTable.Builder builder = new HiveTable.Builder();

  builder.withDbName(databaseName).withTableName(tableName);

  State serdeProps = new State();
  serdeProps.setProp("avro.schema.literal", "invalid schema");
  builder.withSerdeProps(serdeProps);

  HiveTable hiveTable = builder.build();
  hiveTable.setInputFormat(AvroContainerInputFormat.class.getName());
  hiveTable.setOutputFormat(AvroContainerOutputFormat.class.getName());
  hiveTable.setSerDeType(AvroSerDe.class.getName());

  Table table = HiveMetaStoreUtils.getTable(hiveTable);
  Assert.assertEquals(table.getDbName(), databaseName);
  Assert.assertEquals(table.getTableName(), tableName);

  StorageDescriptor sd = table.getSd();
  Assert.assertEquals(sd.getInputFormat(), AvroContainerInputFormat.class.getName());
  Assert.assertEquals(sd.getOutputFormat(), AvroContainerOutputFormat.class.getName());
  Assert.assertNotNull(sd.getSerdeInfo());
  Assert.assertEquals(sd.getSerdeInfo().getSerializationLib(), AvroSerDe.class.getName());

  List<FieldSchema> fields = sd.getCols();
  Assert.assertTrue(fields != null && fields.size() == 0);
}
 
开发者ID:apache,项目名称:incubator-gobblin,代码行数:32,代码来源:HiveMetaStoreUtilsTest.java


注:本文中的org.apache.hadoop.hive.metastore.api.StorageDescriptor.getCols方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。