当前位置: 首页>>代码示例>>Java>>正文


Java Table.setTableName方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.Table.setTableName方法的典型用法代码示例。如果您正苦于以下问题:Java Table.setTableName方法的具体用法?Java Table.setTableName怎么用?Java Table.setTableName使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.metastore.api.Table的用法示例。


在下文中一共展示了Table.setTableName方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createPartitionedTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
static Table createPartitionedTable(HiveMetaStoreClient metaStoreClient, String database, String table, File location)
  throws Exception {

  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  hiveTable.setPartitionKeys(PARTITION_COLUMNS);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  return hiveTable;
}
 
开发者ID:HotelsDotCom,项目名称:waggle-dance,代码行数:24,代码来源:TestUtils.java

示例2: newTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
private Table newTable() {
  Table table = new Table();
  table.setDbName(DB_NAME);
  table.setTableName(TABLE_NAME);
  table.setTableType(TableType.EXTERNAL_TABLE.name());

  StorageDescriptor sd = new StorageDescriptor();
  sd.setLocation(tableLocation);
  table.setSd(sd);

  HashMap<String, String> parameters = new HashMap<>();
  parameters.put(StatsSetupConst.ROW_COUNT, "1");
  table.setParameters(parameters);

  table.setPartitionKeys(PARTITIONS);
  return table;
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:18,代码来源:ReplicaTest.java

示例3: createUnpartitionedTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
static Table createUnpartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    File location)
  throws TException {
  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  return hiveTable;
}
 
开发者ID:HotelsDotCom,项目名称:waggle-dance,代码行数:25,代码来源:TestUtils.java

示例4: createUnpartitionedTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
private Table createUnpartitionedTable(String databaseName, String tableName) throws Exception {
  Table table = new Table();
  table.setDbName(databaseName);
  table.setTableName(tableName);
  table.setSd(new StorageDescriptor());
  table.getSd().setCols(Arrays.asList(new FieldSchema("id", "int", null), new FieldSchema("name", "string", null)));
  table.getSd().setInputFormat("org.apache.hadoop.mapred.TextInputFormat");
  table.getSd().setOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat");
  table.getSd().setSerdeInfo(new SerDeInfo());
  table.getSd().getSerdeInfo().setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
  HiveMetaStoreClient client = server.newClient();
  client.createTable(table);
  client.close();
  return table;
}
 
开发者ID:HotelsDotCom,项目名称:beeju,代码行数:16,代码来源:HiveServer2JUnitRuleTest.java

示例5: get_table_req

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
@Test
public void get_table_req() throws MetaException, NoSuchObjectException, TException {
  Table table = new Table();
  table.setDbName(DB_P);
  table.setTableName("table");
  GetTableRequest request = new GetTableRequest(table.getDbName(), table.getTableName());
  GetTableResult response = new GetTableResult(table);
  when(primaryClient.get_table_req(request)).thenReturn(response);
  when(primaryMapping.transformInboundGetTableRequest(request)).thenReturn(request);
  when(primaryMapping.transformOutboundGetTableResult(response)).thenReturn(response);
  GetTableResult result = handler.get_table_req(request);
  assertThat(result.getTable().getDbName(), is(DB_P));
  assertThat(result.getTable().getTableName(), is("table"));
}
 
开发者ID:HotelsDotCom,项目名称:waggle-dance,代码行数:15,代码来源:FederatedHMSHandlerTest.java

示例6: transformOutboundGetTablesResult

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
@Test
public void transformOutboundGetTablesResult() throws Exception {
  Table table = new Table();
  table.setDbName(DB_NAME);
  table.setTableName(TABLE_NAME);
  GetTablesResult result = new GetTablesResult();
  result.setTables(Arrays.asList(table));
  GetTablesResult transformedResult = databaseMapping.transformOutboundGetTablesResult(result);
  assertThat(transformedResult, is(sameInstance(result)));
  assertThat(transformedResult.getTables().size(), is(1));
  assertThat(transformedResult.getTables().get(0), is(sameInstance(result.getTables().get(0))));
  assertThat(transformedResult.getTables().get(0).getDbName(), is(OUT_DB_NAME));
  assertThat(transformedResult.getTables().get(0).getTableName(), is(TABLE_NAME));
}
 
开发者ID:HotelsDotCom,项目名称:waggle-dance,代码行数:15,代码来源:DatabaseMappingImplTest.java

示例7: createTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
private void createTable(File sourceTableUri) throws Exception {
  File partitionEurope = new File(sourceTableUri, "local_date=2000-01-01");
  File partitionUk = new File(partitionEurope, "local_hour=0");
  File dataFileUk = new File(partitionUk, PART_00000);
  FileUtils.writeStringToFile(dataFileUk, "1\tadam\tlondon\n2\tsusan\tglasgow\n");

  File partitionAsia = new File(sourceTableUri, "local_date=2000-01-02");
  File partitionChina = new File(partitionAsia, "local_hour=0");
  File dataFileChina = new File(partitionChina, PART_00000);
  String data = "1\tchun\tbeijing\n2\tshanghai\tmilan\n";
  FileUtils.writeStringToFile(dataFileChina, data);

  HiveMetaStoreClient sourceClient = sourceCatalog.client();

  Table source = new Table();
  source.setDbName(DATABASE);
  source.setTableName(TABLE);
  source.setTableType(TableType.EXTERNAL_TABLE.name());
  source.setParameters(new HashMap<String, String>());

  List<FieldSchema> partitionColumns = Arrays.asList(new FieldSchema("local_date", "string", ""),
      new FieldSchema("local_hour", "string", ""));
  source.setPartitionKeys(partitionColumns);

  List<FieldSchema> dataColumns = Arrays.asList(new FieldSchema("id", "bigint", ""),
      new FieldSchema("name", "string", ""), new FieldSchema("city", "tinyint", ""));

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(dataColumns);
  sd.setLocation(sourceTableUri.toURI().toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setSerdeInfo(new SerDeInfo());

  source.setSd(sd);

  sourceClient.createTable(source);
  LOG.info(">>>> Partitions added: {}",
      +sourceClient.add_partitions(Arrays.asList(newPartition(sd, Arrays.asList("2000-01-01", "0"), partitionUk),
          newPartition(sd, Arrays.asList("2000-01-02", "0"), partitionChina))));
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:41,代码来源:FilterToolIntegrationTest.java

示例8: before

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
@Before
public void before() throws TException, IOException {
  Table table = new Table();
  table.setDbName(DATABASE);
  table.setTableName("source_" + TABLE);
  table.setTableType(TableType.EXTERNAL_TABLE.name());
  table.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(Arrays.asList(new FieldSchema("col1", "string", null)));
  sd.setSerdeInfo(new SerDeInfo());
  table.setSd(sd);

  hive.client().createTable(table);
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:16,代码来源:CircusTrainTest.java

示例9: setupTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
@Before
public void setupTable() {
  sourceTable = new Table();
  sourceTable.setDbName(DB_NAME);
  sourceTable.setTableName(TABLE_NAME);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setLocation(TABLE_LOCATION);
  sourceTable.setSd(sd);
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:11,代码来源:HdfsSnapshotLocationManagerTest.java

示例10: newTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
public static Table newTable(String name, String dbName, List<FieldSchema> partitionKeys, StorageDescriptor sd) {
  Table table = new Table();
  table.setTableName(name);
  table.setDbName(dbName);
  table.setSd(sd);
  table.setPartitionKeys(partitionKeys);
  table.setTableType(TableType.EXTERNAL_TABLE.name());
  table.setParameters(new HashMap<String, String>());
  return table;
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:11,代码来源:HiveEntityFactory.java

示例11: createUnpartitionedTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
public static Table createUnpartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    URI location)
  throws TException {
  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setInputFormat(TextInputFormat.class.getName());
  sd.setOutputFormat(TextOutputFormat.class.getName());
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setSerializationLib("org.apache.hadoop.hive.serde2.OpenCSVSerde");

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, database, table);
  ColumnStatisticsData statsData = new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1L, 2L));
  ColumnStatisticsObj cso1 = new ColumnStatisticsObj("id", "bigint", statsData);
  List<ColumnStatisticsObj> statsObj = Collections.singletonList(cso1);
  metaStoreClient.updateTableColumnStatistics(new ColumnStatistics(statsDesc, statsObj));

  return hiveTable;
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:34,代码来源:TestUtils.java

示例12: createPartitionedTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
public static Table createPartitionedTable(
    HiveMetaStoreClient metaStoreClient,
    String database,
    String table,
    URI location)
  throws Exception {

  Table hiveTable = new Table();
  hiveTable.setDbName(database);
  hiveTable.setTableName(table);
  hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
  hiveTable.putToParameters("EXTERNAL", "TRUE");

  hiveTable.setPartitionKeys(PARTITION_COLUMNS);

  StorageDescriptor sd = new StorageDescriptor();
  sd.setCols(DATA_COLUMNS);
  sd.setLocation(location.toString());
  sd.setParameters(new HashMap<String, String>());
  sd.setInputFormat(TextInputFormat.class.getName());
  sd.setOutputFormat(TextOutputFormat.class.getName());
  sd.setSerdeInfo(new SerDeInfo());
  sd.getSerdeInfo().setSerializationLib("org.apache.hadoop.hive.serde2.OpenCSVSerde");

  hiveTable.setSd(sd);

  metaStoreClient.createTable(hiveTable);

  ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, database, table);
  ColumnStatisticsData statsData = new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1L, 2L));
  ColumnStatisticsObj cso1 = new ColumnStatisticsObj("id", "bigint", statsData);
  List<ColumnStatisticsObj> statsObj = Collections.singletonList(cso1);
  metaStoreClient.updateTableColumnStatistics(new ColumnStatistics(statsDesc, statsObj));

  return hiveTable;
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:37,代码来源:TestUtils.java

示例13: transformOutboundGetTableResult

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
@Test
public void transformOutboundGetTableResult() throws Exception {
  Table table = new Table();
  table.setDbName(DB_NAME);
  table.setTableName(TABLE_NAME);
  GetTableResult result = new GetTableResult();
  result.setTable(table);
  GetTableResult transformedResult = databaseMapping.transformOutboundGetTableResult(result);
  assertThat(transformedResult, is(sameInstance(result)));
  assertThat(transformedResult.getTable(), is(sameInstance(result.getTable())));
  assertThat(transformedResult.getTable().getDbName(), is(OUT_DB_NAME));
  assertThat(transformedResult.getTable().getTableName(), is(TABLE_NAME));
}
 
开发者ID:HotelsDotCom,项目名称:waggle-dance,代码行数:14,代码来源:DatabaseMappingImplTest.java

示例14: newTable

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
private static Table newTable(String databaseName, String tableName, String location) {
  Table table = new Table();

  table.setDbName(databaseName);
  table.setTableName(tableName);
  table.setParameters(new HashMap<String, String>());
  table.setPartitionKeys(Arrays.asList(new FieldSchema("a", "string", null)));

  StorageDescriptor sd = new StorageDescriptor();
  sd.setLocation(location);
  table.setSd(sd);

  return table;
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:15,代码来源:HiveDifferencesTest.java

示例15: get_table_objects_by_name_req

import org.apache.hadoop.hive.metastore.api.Table; //导入方法依赖的package包/类
@Test
public void get_table_objects_by_name_req()
  throws MetaException, InvalidOperationException, UnknownDBException, TException {
  Table table0 = new Table();
  table0.setDbName(DB_P);
  table0.setTableName("table0");
  Table table1 = new Table();
  table1.setDbName(DB_P);
  table1.setTableName("table1");
  GetTablesRequest request = new GetTablesRequest(DB_P);
  request.setTblNames(Arrays.asList(table0.getTableName(), table1.getTableName()));
  GetTablesResult response = new GetTablesResult(Arrays.asList(table0, table1));
  when(primaryClient.get_table_objects_by_name_req(request)).thenReturn(response);
  when(primaryMapping.transformInboundGetTablesRequest(request)).thenReturn(request);
  when(primaryMapping.transformOutboundGetTablesResult(response)).thenReturn(response);
  GetTablesResult result = handler.get_table_objects_by_name_req(request);
  assertThat(result.getTables().size(), is(2));
  assertThat(result.getTables().get(0).getDbName(), is(DB_P));
  assertThat(result.getTables().get(0).getTableName(), is("table0"));
  assertThat(result.getTables().get(1).getDbName(), is(DB_P));
  assertThat(result.getTables().get(1).getTableName(), is("table1"));
}
 
开发者ID:HotelsDotCom,项目名称:waggle-dance,代码行数:23,代码来源:FederatedHMSHandlerTest.java


注:本文中的org.apache.hadoop.hive.metastore.api.Table.setTableName方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。