本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.StorageDescriptor.getCols方法的典型用法代码示例。如果您正苦于以下问题:Java StorageDescriptor.getCols方法的具体用法?Java StorageDescriptor.getCols怎么用?Java StorageDescriptor.getCols使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.metastore.api.StorageDescriptor
的用法示例。
在下文中一共展示了StorageDescriptor.getCols方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: StorageDescriptorWrapper
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public StorageDescriptorWrapper(StorageDescriptor sd) {
this.sd = sd;
this.cols = Lists.newArrayList();
for (FieldSchema f : sd.getCols()) {
this.cols.add(new FieldSchemaWrapper(f));
}
this.location = sd.getLocation();
this.inputFormat = sd.getInputFormat();
this.outputFormat = sd.getOutputFormat();
this.compressed = sd.isCompressed();
this.numBuckets = sd.getNumBuckets();
this.serDeInfo = new SerDeInfoWrapper(sd.getSerdeInfo());
// this.bucketCols = sd.getBucketCols();
this.sortCols = Lists.newArrayList();
for (Order o : sd.getSortCols()) {
this.sortCols.add(new OrderWrapper(o));
}
this.parameters = sd.getParameters();
}
示例2: renameColumn
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
@Override
public void renameColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle source, String target)
{
if (!allowRenameColumn) {
throw new PrestoException(PERMISSION_DENIED, "Renaming columns is disabled in this Hive catalog");
}
HiveTableHandle hiveTableHandle = checkType(tableHandle, HiveTableHandle.class, "tableHandle");
HiveColumnHandle sourceHandle = checkType(source, HiveColumnHandle.class, "columnHandle");
Optional<Table> tableMetadata = metastore.getTable(hiveTableHandle.getSchemaName(), hiveTableHandle.getTableName());
if (!tableMetadata.isPresent()) {
throw new TableNotFoundException(hiveTableHandle.getSchemaTableName());
}
Table table = tableMetadata.get();
StorageDescriptor sd = table.getSd();
ImmutableList.Builder<FieldSchema> columns = ImmutableList.builder();
for (FieldSchema fieldSchema : sd.getCols()) {
if (fieldSchema.getName().equals(sourceHandle.getName())) {
columns.add(new FieldSchema(target, fieldSchema.getType(), fieldSchema.getComment()));
}
else {
columns.add(fieldSchema);
}
}
sd.setCols(columns.build());
table.setSd(sd);
metastore.alterTable(hiveTableHandle.getSchemaName(), hiveTableHandle.getTableName(), table);
}
示例3: testGetTableAvro
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
@Test
public void testGetTableAvro() {
final String databaseName = "testdb";
final String tableName = "testtable";
HiveTable.Builder builder = new HiveTable.Builder();
builder.withDbName(databaseName).withTableName(tableName);
State serdeProps = new State();
serdeProps.setProp("avro.schema.literal", "{\"type\": \"record\", \"name\": \"TestEvent\","
+ " \"namespace\": \"test.namespace\", \"fields\": [{\"name\":\"a\"," + " \"type\": \"int\"}]}");
builder.withSerdeProps(serdeProps);
HiveTable hiveTable = builder.build();
hiveTable.setInputFormat(AvroContainerInputFormat.class.getName());
hiveTable.setOutputFormat(AvroContainerOutputFormat.class.getName());
hiveTable.setSerDeType(AvroSerDe.class.getName());
Table table = HiveMetaStoreUtils.getTable(hiveTable);
Assert.assertEquals(table.getDbName(), databaseName);
Assert.assertEquals(table.getTableName(), tableName);
StorageDescriptor sd = table.getSd();
Assert.assertEquals(sd.getInputFormat(), AvroContainerInputFormat.class.getName());
Assert.assertEquals(sd.getOutputFormat(), AvroContainerOutputFormat.class.getName());
Assert.assertNotNull(sd.getSerdeInfo());
Assert.assertEquals(sd.getSerdeInfo().getSerializationLib(), AvroSerDe.class.getName());
List<FieldSchema> fields = sd.getCols();
Assert.assertTrue(fields != null && fields.size() == 1);
FieldSchema fieldA = fields.get(0);
Assert.assertEquals(fieldA.getName(), "a");
Assert.assertEquals(fieldA.getType(), "int");
}
示例4: testGetTableAvroInvalidSchema
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
@Test
public void testGetTableAvroInvalidSchema() {
final String databaseName = "testdb";
final String tableName = "testtable";
HiveTable.Builder builder = new HiveTable.Builder();
builder.withDbName(databaseName).withTableName(tableName);
State serdeProps = new State();
serdeProps.setProp("avro.schema.literal", "invalid schema");
builder.withSerdeProps(serdeProps);
HiveTable hiveTable = builder.build();
hiveTable.setInputFormat(AvroContainerInputFormat.class.getName());
hiveTable.setOutputFormat(AvroContainerOutputFormat.class.getName());
hiveTable.setSerDeType(AvroSerDe.class.getName());
Table table = HiveMetaStoreUtils.getTable(hiveTable);
Assert.assertEquals(table.getDbName(), databaseName);
Assert.assertEquals(table.getTableName(), tableName);
StorageDescriptor sd = table.getSd();
Assert.assertEquals(sd.getInputFormat(), AvroContainerInputFormat.class.getName());
Assert.assertEquals(sd.getOutputFormat(), AvroContainerOutputFormat.class.getName());
Assert.assertNotNull(sd.getSerdeInfo());
Assert.assertEquals(sd.getSerdeInfo().getSerializationLib(), AvroSerDe.class.getName());
List<FieldSchema> fields = sd.getCols();
Assert.assertTrue(fields != null && fields.size() == 0);
}