当前位置: 首页>>代码示例>>Java>>正文


Java Table.setFields方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.metadata.Table.setFields方法的典型用法代码示例。如果您正苦于以下问题:Java Table.setFields方法的具体用法?Java Table.setFields怎么用?Java Table.setFields使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.ql.metadata.Table的用法示例。


在下文中一共展示了Table.setFields方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: constructAvroTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
    throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(avroSerde);
  try {
    table.setInputFormatClass(avroInputFormat);
    table.setOutputFormatClass(avroOutputFormat);
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
  return table;
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:21,代码来源:AvroHiveUtil.java

示例2: constructParquetTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(getHiveParquetSerde());
  try {
    table.setInputFormatClass(getHiveParquetInputFormat());
    table.setOutputFormatClass(getHiveParquetOutputFormat());
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  // convert copycat schema schema to Hive columns
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  return table;
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:20,代码来源:ParquetHiveUtil.java

示例3: constructAvroTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
    throws HiveMetaStoreException {
  Table table = new Table(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(avroSerde);
  try {
    table.setInputFormatClass(avroInputFormat);
    table.setOutputFormatClass(avroOutputFormat);
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
  return table;
}
 
开发者ID:qubole,项目名称:streamx,代码行数:21,代码来源:AvroHiveUtil.java

示例4: constructParquetTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
  Table table = new Table(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(getHiveParquetSerde());
  try {
    table.setInputFormatClass(getHiveParquetInputFormat());
    table.setOutputFormatClass(getHiveParquetOutputFormat());
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  // convert copycat schema schema to Hive columns
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  return table;
}
 
开发者ID:qubole,项目名称:streamx,代码行数:20,代码来源:ParquetHiveUtil.java

示例5: createTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
/**
 * Creates the table.
 *
 * @param db     the db
 * @param table  the table
 * @param udb    the udb
 * @param utable the utable
 * @param setCustomSerde whether to set custom serde or not
 * @param columnMapping columnmapping for the table
 *
 * @throws Exception the exception
 */
void createTable(HiveConf conf, String db, String table, String udb, String utable, boolean setCustomSerde,
  Map<String, String> columnMapping) throws Exception {
  Table tbl1 = new Table(db, table);
  if (setCustomSerde) {
    tbl1.setSerializationLib("DatabaseJarSerde");
  }
  if (StringUtils.isNotBlank(udb)) {
    tbl1.setProperty(LensConfConstants.NATIVE_DB_NAME, udb);
  }
  if (StringUtils.isNotBlank(utable)) {
    tbl1.setProperty(LensConfConstants.NATIVE_TABLE_NAME, utable);
  }
  if (columnMapping != null && !columnMapping.isEmpty()) {
    tbl1.setProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING, StringUtils.join(columnMapping.entrySet(), ","));
    log.info("columnMapping property:{}", tbl1.getProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING));
  }

  List<FieldSchema> columns = new ArrayList<FieldSchema>();
  columns.add(new FieldSchema("id", "int", "col1"));
  columns.add(new FieldSchema("name", "string", "col2"));
  tbl1.setFields(columns);

  Hive.get(conf).createTable(tbl1);
  System.out.println("Created table " + table);
}
 
开发者ID:apache,项目名称:lens,代码行数:38,代码来源:TestColumnarSQLRewriter.java

示例6: alterSchema

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
@Override
public void alterSchema(String database, String tableName, Schema schema) {
  Table table = hiveMetaStore.getTable(database, tableName);
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  hiveMetaStore.alterTable(table);
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:8,代码来源:ParquetHiveUtil.java

示例7: createTestTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
private Table createTestTable(String databaseName, String tableName) throws HiveException {
    Table table = new Table(databaseName, tableName);
    table.setInputFormatClass(TextInputFormat.class);
    table.setFields(new ArrayList<FieldSchema>() {{
        add(new FieldSchema("col1", "string", "comment1"));
    }
    });
    table.setTableType(TableType.EXTERNAL_TABLE);
    table.setDataLocation(new Path("somehdfspath"));
    return table;
}
 
开发者ID:apache,项目名称:incubator-atlas,代码行数:12,代码来源:HiveMetaStoreBridgeTest.java

示例8: getHiveTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
@Override
public Table getHiveTable(HiveConf conf) {
  Table table = new Table(conf.get(LensConfConstants.STATISTICS_DATABASE_KEY,
    LensConfConstants.DEFAULT_STATISTICS_DATABASE), this.getClass().getSimpleName());
  LinkedList<FieldSchema> colList = new LinkedList<FieldSchema>();
  colList.add(new FieldSchema("handle", "string", "Query Handle"));
  colList.add(new FieldSchema("userQuery", "string", "User Query before rewrite"));
  colList.add(new FieldSchema("submitter", "string", "submitter"));
  colList.add(new FieldSchema("clusterUser", "string", "Cluster User which will do all operations on hdfs"));
  colList.add(new FieldSchema("sessionId", "string", "Lens Session which ran the query"));
  colList.add(new FieldSchema("submissionTime", "bigint", "Time which query was submitted"));
  colList.add(new FieldSchema("startTime", "bigint", "Timestamp which query was Started"));
  colList.add(new FieldSchema("endTime", "bigint", "Timestamp which query was finished"));
  colList.add(new FieldSchema("result", "string", "path to result of query"));
  colList.add(new FieldSchema("cause", "string", "failure/eror cause if any"));
  colList.add(new FieldSchema("status", "map<string,string>", "status object of the query"));
  colList.add(new FieldSchema("driverStats", "map<string,string>", "driver statistics of the query"));
  table.setFields(colList);
  LinkedList<FieldSchema> partCols = new LinkedList<FieldSchema>();
  partCols.add(new FieldSchema("dt", "string", "partCol"));
  table.setPartCols(partCols);
  table.setSerializationLib(JSonSerde.class.getName());
  try {
    table.setInputFormatClass(TextInputFormat.class.getName());
  } catch (HiveException e) {
    log.error("Encountered hive exception.", e);
  }
  return table;
}
 
开发者ID:apache,项目名称:lens,代码行数:30,代码来源:QueryExecutionStatistics.java

示例9: createTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
/**
 * Creates the table.
 *
 * @param db             the db
 * @param table          the table
 * @param udb            the udb
 * @param utable         the utable
 * @param setCustomSerde whether to set custom serde or not
 * @param columnMapping  columnmapping for the table
 * @throws Exception the exception
 */
void createTable(
  HiveConf conf, String db, String table, String udb, String utable, boolean setCustomSerde,
  Map<String, String> columnMapping) throws Exception {
  Table tbl1 = new Table(db, table);

  if (StringUtils.isNotBlank(udb)) {
    tbl1.setProperty(LensConfConstants.NATIVE_DB_NAME, udb);
  }
  if (StringUtils.isNotBlank(utable)) {
    tbl1.setProperty(LensConfConstants.NATIVE_TABLE_NAME, utable);
  }
  if (columnMapping != null && !columnMapping.isEmpty()) {
    tbl1.setProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING, StringUtils.join(columnMapping.entrySet(), ","));
    log.info("columnMapping property:{}", tbl1.getProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING));
  }

  List<FieldSchema> columns = new ArrayList<FieldSchema>();
  columns.add(new FieldSchema("id", "int", "col1"));
  columns.add(new FieldSchema("name", "string", "col2"));
  columns.add(new FieldSchema("dollars_sold", "double", "col3"));
  columns.add(new FieldSchema("units_sold", "int", "col4"));

  tbl1.setFields(columns);

  Hive.get(conf).createTable(tbl1);
  System.out.println("Created table " + table);
}
 
开发者ID:apache,项目名称:lens,代码行数:39,代码来源:TestDruidSQLRewriter.java

示例10: createHiveTable

import org.apache.hadoop.hive.ql.metadata.Table; //导入方法依赖的package包/类
/**
 * Creates the hive table.
 *
 * @param db      the db
 * @param table   the table
 * @param columns the columns
 * @throws Exception the exception
 */
void createHiveTable(String db, String table, List<FieldSchema> columns) throws Exception {
  Table tbl1 = new Table(db, table);
  tbl1.setFields(columns);

  Hive.get().createTable(tbl1);
  System.out.println("Created table : " + table);
}
 
开发者ID:apache,项目名称:lens,代码行数:16,代码来源:TestColumnarSQLRewriter.java


注:本文中的org.apache.hadoop.hive.ql.metadata.Table.setFields方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。