本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.Partition.setParameters方法的典型用法代码示例。如果您正苦于以下问题:Java Partition.setParameters方法的具体用法?Java Partition.setParameters怎么用?Java Partition.setParameters使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.metastore.api.Partition
的用法示例。
在下文中一共展示了Partition.setParameters方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: newPartition
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
private Partition newPartition(String... values) {
Partition partition = new Partition();
partition.setDbName(DB_NAME);
partition.setTableName(TABLE_NAME);
StorageDescriptor sd = new StorageDescriptor();
sd.setLocation(new Path(tableLocation, partitionName(values)).toUri().toString());
sd.setCols(FIELDS);
partition.setSd(sd);
HashMap<String, String> parameters = new HashMap<>();
parameters.put(StatsSetupConst.ROW_COUNT, "1");
partition.setParameters(parameters);
partition.setValues(Arrays.asList(values));
return partition;
}
示例2: newPartition
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
public static Partition newPartition(Table table, String... partitionValues) {
Partition partition = new Partition();
partition.setTableName(table.getTableName());
partition.setDbName(table.getDbName());
partition.setValues(Arrays.asList(partitionValues));
partition.setSd(table.getSd());
partition.setParameters(new HashMap<String, String>());
return partition;
}
示例3: newPartition
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
private Partition newPartition(
String datbase,
String table,
StorageDescriptor tableStorageDescriptor,
List<String> values,
File location,
String sourceTable,
String sourceLocation,
boolean addChecksum) {
Partition partition = new Partition();
partition.setDbName(datbase);
partition.setTableName(table);
partition.setValues(values);
partition.setSd(new StorageDescriptor(tableStorageDescriptor));
partition.getSd().setLocation(location.toURI().toString());
partition.setParameters(new HashMap<String, String>());
if (sourceTable != null) {
partition.getParameters().put(CircusTrainTableParameter.SOURCE_TABLE.parameterName(), sourceTable);
}
if (sourceLocation != null) {
partition.getParameters().put(CircusTrainTableParameter.SOURCE_LOCATION.parameterName(), sourceLocation);
}
if (addChecksum) {
partition.getParameters().put(CircusTrainTableParameter.PARTITION_CHECKSUM.parameterName(), location.getName());
}
return partition;
}
示例4: newPartition
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
private static Partition newPartition(String databaseName, String tableName, String location) {
Partition partition = new Partition();
partition.setDbName(databaseName);
partition.setTableName(tableName);
partition.setParameters(new HashMap<String, String>());
partition.setValues(Arrays.asList("01"));
StorageDescriptor sd = new StorageDescriptor();
sd.setLocation(location);
partition.setSd(sd);
return partition;
}
示例5: newPartition
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
public static Partition newPartition(String database, String tableName, String partitionValue) {
Partition partition = new Partition();
partition.setDbName(database);
partition.setTableName(tableName);
partition.setCreateTime(CREATE_TIME);
partition.setValues(ImmutableList.of(partitionValue));
Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
privileges.setUserPrivileges(userPrivileges);
partition.setPrivileges(privileges);
StorageDescriptor storageDescriptor = new StorageDescriptor();
storageDescriptor.setCols(COLS);
storageDescriptor.setInputFormat(INPUT_FORMAT);
storageDescriptor.setOutputFormat(OUTPUT_FORMAT);
storageDescriptor.setSerdeInfo(new SerDeInfo(SERDE_INFO_NAME, SERIALIZATION_LIB, new HashMap<String, String>()));
storageDescriptor.setSkewedInfo(new SkewedInfo());
storageDescriptor.setParameters(new HashMap<String, String>());
storageDescriptor.setLocation(DATABASE + "/" + tableName + "/" + partitionValue + "/");
partition.setSd(storageDescriptor);
Map<String, String> parameters = new HashMap<>();
parameters.put("com.company.parameter", "abc");
partition.setParameters(parameters);
return partition;
}
示例6: init
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
@Before
public void init() {
partition = new Partition();
partition.setDbName("database");
partition.setTableName("table");
partition.setValues(ImmutableList.of("part"));
Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
userPrivileges.put("read", ImmutableList.of(new PrivilegeGrantInfo()));
PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
privileges.setUserPrivileges(userPrivileges);
partition.setPrivileges(privileges);
StorageDescriptor storageDescriptor = new StorageDescriptor();
storageDescriptor.setCols(Arrays.asList(new FieldSchema("a", "int", null)));
storageDescriptor.setInputFormat("input_format");
storageDescriptor.setOutputFormat("output_format");
storageDescriptor.setSerdeInfo(new SerDeInfo("serde", "lib", new HashMap<String, String>()));
storageDescriptor.setSkewedInfo(new SkewedInfo());
storageDescriptor.setParameters(new HashMap<String, String>());
storageDescriptor.setLocation("database/table/part/");
partition.setSd(storageDescriptor);
Map<String, String> parameters = new HashMap<>();
parameters.put("com.company.parameter", "abc");
partition.setParameters(parameters);
}
示例7: newPartition
import org.apache.hadoop.hive.metastore.api.Partition; //导入方法依赖的package包/类
public static Partition newPartition() {
Partition partition = new Partition();
StorageDescriptor sd = new StorageDescriptor();
SerDeInfo info = new SerDeInfo();
info.setParameters(new HashMap<String, String>());
sd.setSerdeInfo(info);
partition.setSd(sd);
partition.setParameters(new HashMap<String, String>());
return partition;
}