本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.StorageDescriptor.getLocation方法的典型用法代码示例。如果您正苦于以下问题:Java StorageDescriptor.getLocation方法的具体用法?Java StorageDescriptor.getLocation怎么用?Java StorageDescriptor.getLocation使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.metastore.api.StorageDescriptor
的用法示例。
在下文中一共展示了StorageDescriptor.getLocation方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: StorageDescriptorWrapper
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public StorageDescriptorWrapper(StorageDescriptor sd) {
this.sd = sd;
this.cols = Lists.newArrayList();
for (FieldSchema f : sd.getCols()) {
this.cols.add(new FieldSchemaWrapper(f));
}
this.location = sd.getLocation();
this.inputFormat = sd.getInputFormat();
this.outputFormat = sd.getOutputFormat();
this.compressed = sd.isCompressed();
this.numBuckets = sd.getNumBuckets();
this.serDeInfo = new SerDeInfoWrapper(sd.getSerdeInfo());
// this.bucketCols = sd.getBucketCols();
this.sortCols = Lists.newArrayList();
for (Order o : sd.getSortCols()) {
this.sortCols.add(new OrderWrapper(o));
}
this.parameters = sd.getParameters();
}
示例2: StorageDescriptorWrapper
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public StorageDescriptorWrapper(StorageDescriptor storageDescriptor) {
sd = storageDescriptor;
location = storageDescriptor.getLocation();
inputFormat = storageDescriptor.getInputFormat();
outputFormat = storageDescriptor.getOutputFormat();
compressed = storageDescriptor.isCompressed();
numBuckets = storageDescriptor.getNumBuckets();
serDeInfo = new SerDeInfoWrapper(storageDescriptor.getSerdeInfo());
if (sd.getSortCols() != null) {
sortCols = Lists.newArrayList();
for (Order order : sd.getSortCols()) {
sortCols.add(new OrderWrapper(order));
}
}
parameters = storageDescriptor.getParameters();
if (sd.getCols() != null) {
this.columns = Lists.newArrayList();
for (FieldSchema fieldSchema : sd.getCols()) {
this.columns.add(new FieldSchemaWrapper(fieldSchema));
}
}
}
示例3: extractPartInfo
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
private static PartInfo extractPartInfo(HCatSchema schema, StorageDescriptor sd,
Map<String, String> parameters, Configuration conf,
InputJobInfo inputJobInfo) throws IOException {
StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
Properties hcatProperties = new Properties();
HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
// copy the properties from storageHandler to jobProperties
Map<String, String> jobProperties =
HCatRSUtil.getInputJobProperties(storageHandler, inputJobInfo);
for (String key : parameters.keySet()) {
hcatProperties.put(key, parameters.get(key));
}
// FIXME
// Bloating partinfo with inputJobInfo is not good
return new PartInfo(schema, storageHandler, sd.getLocation(),
hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
示例4: extractPartInfo
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
/**
* Extract partition info.
*
* @param schema Table schema
* @param sd Storage descriptor
* @param parameters Parameters
* @param conf Configuration
* @param inputJobInfo Input job info
* @return Partition info
* @throws IOException
*/
private static PartInfo extractPartInfo(
HCatSchema schema, StorageDescriptor sd, Map<String, String> parameters,
Configuration conf, InputJobInfo inputJobInfo) throws IOException {
StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
Properties hcatProperties = new Properties();
HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf,
storerInfo);
// Copy the properties from storageHandler to jobProperties
Map<String, String> jobProperties =
HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);
for (Map.Entry<String, String> param : parameters.entrySet()) {
hcatProperties.put(param.getKey(), param.getValue());
}
return new PartInfo(schema, storageHandler, sd.getLocation(),
hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
示例5: splitInput
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
private void splitInput(final Properties properties, final StorageDescriptor sd, final Partition partition)
throws ReflectiveOperationException, IOException {
final JobConf job = new JobConf();
for (final Object obj : properties.keySet()) {
job.set((String) obj, (String) properties.get(obj));
}
for (final Map.Entry<String, String> entry : hiveReadEntry.hiveConfigOverride.entrySet()) {
job.set(entry.getKey(), entry.getValue());
}
InputFormat<?, ?> format = (InputFormat<?, ?>)
Class.forName(sd.getInputFormat()).getConstructor().newInstance();
job.setInputFormat(format.getClass());
final Path path = new Path(sd.getLocation());
final FileSystem fs = path.getFileSystem(job);
if (fs.exists(path)) {
FileInputFormat.addInputPath(job, path);
format = job.getInputFormat();
for (final InputSplit split : format.getSplits(job, 1)) {
inputSplits.add(split);
partitionMap.put(split, partition);
}
}
final String numRowsProp = properties.getProperty("numRows");
logger.trace("HiveScan num rows property = {}", numRowsProp);
if (numRowsProp != null) {
final long numRows = Long.valueOf(numRowsProp);
// starting from hive-0.13, when no statistics are available, this property is set to -1
// it's important to note that the value returned by hive may not be up to date
if (numRows > 0) {
rowCount += numRows;
}
}
}
示例6: addInputPath
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
private static boolean addInputPath(StorageDescriptor sd, JobConf job) throws IOException {
final Path path = new Path(sd.getLocation());
final FileSystem fs = FileSystemWrapper.get(path, job);
if (fs.exists(path)) {
FileInputFormat.addInputPath(job, path);
return true;
}
return false;
}
示例7: getSdLocation
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
private String getSdLocation(StorageDescriptor sd) {
if (sd == null) {
return "";
} else {
return sd.getLocation();
}
}
示例8: locationOnS3
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
private boolean locationOnS3(StorageDescriptor sd) {
String location = sd.getLocation();
return location != null && (location.startsWith("s3n") || location.startsWith("s3a"));
}