本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.StorageDescriptor.getInputFormat方法的典型用法代码示例。如果您正苦于以下问题:Java StorageDescriptor.getInputFormat方法的具体用法?Java StorageDescriptor.getInputFormat怎么用?Java StorageDescriptor.getInputFormat使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.metastore.api.StorageDescriptor
的用法示例。
在下文中一共展示了StorageDescriptor.getInputFormat方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: StorageDescriptorWrapper
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public StorageDescriptorWrapper(StorageDescriptor sd) {
this.sd = sd;
this.cols = Lists.newArrayList();
for (FieldSchema f : sd.getCols()) {
this.cols.add(new FieldSchemaWrapper(f));
}
this.location = sd.getLocation();
this.inputFormat = sd.getInputFormat();
this.outputFormat = sd.getOutputFormat();
this.compressed = sd.isCompressed();
this.numBuckets = sd.getNumBuckets();
this.serDeInfo = new SerDeInfoWrapper(sd.getSerdeInfo());
// this.bucketCols = sd.getBucketCols();
this.sortCols = Lists.newArrayList();
for (Order o : sd.getSortCols()) {
this.sortCols.add(new OrderWrapper(o));
}
this.parameters = sd.getParameters();
}
示例2: getInputFormatFromSD
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
/**
* Get the input format from given {@link StorageDescriptor}
* @param properties
* @param hiveReadEntry
* @param sd
* @return {@link InputFormat} class or null if a failure has occurred. Failure is logged as warning.
*/
private Class<? extends InputFormat<?, ?>> getInputFormatFromSD(final Properties properties,
final HiveReadEntry hiveReadEntry, final StorageDescriptor sd, final HiveConf hiveConf) {
final Table hiveTable = hiveReadEntry.getTable();
try {
final String inputFormatName = sd.getInputFormat();
if (!Strings.isNullOrEmpty(inputFormatName)) {
return (Class<? extends InputFormat<?, ?>>) Class.forName(inputFormatName);
}
final JobConf job = new JobConf(hiveConf);
HiveUtilities.addConfToJob(job, properties);
return HiveUtilities.getInputFormatClass(job, sd, hiveTable);
} catch (final Exception e) {
logger.warn("Failed to get InputFormat class from Hive table '{}.{}'. StorageDescriptor [{}]",
hiveTable.getDbName(), hiveTable.getTableName(), sd.toString(), e);
return null;
}
}
示例3: getInputFormatClass
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
/**
* Utility method which gets table or partition {@link InputFormat} class. First it
* tries to get the class name from given StorageDescriptor object. If it doesn't contain it tries to get it from
* StorageHandler class set in table properties. If not found throws an exception.
* @param job {@link JobConf} instance needed incase the table is StorageHandler based table.
* @param sd {@link StorageDescriptor} instance of currently reading partition or table (for non-partitioned tables).
* @param table Table object
* @throws Exception
*/
public static Class<? extends InputFormat<?, ?>> getInputFormatClass(final JobConf job, final StorageDescriptor sd,
final Table table) throws Exception {
final String inputFormatName = sd.getInputFormat();
if (Strings.isNullOrEmpty(inputFormatName)) {
final String storageHandlerClass = table.getParameters().get(META_TABLE_STORAGE);
if (Strings.isNullOrEmpty(storageHandlerClass)) {
throw new ExecutionSetupException("Unable to get Hive table InputFormat class. There is neither " +
"InputFormat class explicitly specified nor StorageHandler class");
}
final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, storageHandlerClass);
return (Class<? extends InputFormat<?, ?>>) storageHandler.getInputFormatClass();
} else {
return (Class<? extends InputFormat<?, ?>>) Class.forName(inputFormatName) ;
}
}
示例4: StorageDescriptorWrapper
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public StorageDescriptorWrapper(StorageDescriptor storageDescriptor) {
sd = storageDescriptor;
location = storageDescriptor.getLocation();
inputFormat = storageDescriptor.getInputFormat();
outputFormat = storageDescriptor.getOutputFormat();
compressed = storageDescriptor.isCompressed();
numBuckets = storageDescriptor.getNumBuckets();
serDeInfo = new SerDeInfoWrapper(storageDescriptor.getSerdeInfo());
if (sd.getSortCols() != null) {
sortCols = Lists.newArrayList();
for (Order order : sd.getSortCols()) {
sortCols.add(new OrderWrapper(order));
}
}
parameters = storageDescriptor.getParameters();
if (sd.getCols() != null) {
this.columns = Lists.newArrayList();
for (FieldSchema fieldSchema : sd.getCols()) {
this.columns.add(new FieldSchemaWrapper(fieldSchema));
}
}
}
示例5: extractStorerInfo
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
static StorerInfo extractStorerInfo(
StorageDescriptor sd, Map<String, String> properties) throws IOException {
Properties hcatProperties = new Properties();
for (String key : properties.keySet()) {
hcatProperties.put(key, properties.get(key));
}
// also populate with StorageDescriptor->SerDe.Parameters
for (Map.Entry<String, String> param :
sd.getSerdeInfo().getParameters().entrySet()) {
hcatProperties.put(param.getKey(), param.getValue());
}
// TODO: this is the only difference between this file and the one in Hive - think
// about how to avoid the copying.
// Need to hard code the serDe class for, as views are not typically supported by pig
return new StorerInfo(
sd.getInputFormat(), sd.getOutputFormat(),
"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe",
properties.get(org.apache.hadoop.hive.metastore.
api.hive_metastoreConstants.META_TABLE_STORAGE),
hcatProperties);
}
示例6: getDataFormat
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; //导入方法依赖的package包/类
public static String getDataFormat(StorageDescriptor descriptor) {
Preconditions.checkNotNull(descriptor);
String serde = descriptor.getSerdeInfo().getSerializationLib();
String inputFormat = descriptor.getInputFormat();
if (LazySimpleSerDe.class.getName().equals(serde)) {
if (TextInputFormat.class.getName().equals(inputFormat)) {
return BuiltinStorages.TEXT;
} else if (SequenceFileInputFormat.class.getName().equals(inputFormat)) {
return BuiltinStorages.SEQUENCE_FILE;
} else {
throw new TajoRuntimeException(new UnknownDataFormatException(inputFormat));
}
} else if (LazyBinarySerDe.class.getName().equals(serde)) {
if (SequenceFileInputFormat.class.getName().equals(inputFormat)) {
return BuiltinStorages.SEQUENCE_FILE;
} else {
throw new TajoRuntimeException(new UnknownDataFormatException(inputFormat));
}
} else if (LazyBinaryColumnarSerDe.class.getName().equals(serde) || ColumnarSerDe.class.getName().equals(serde)) {
if (RCFileInputFormat.class.getName().equals(inputFormat)) {
return BuiltinStorages.RCFILE;
} else {
throw new TajoRuntimeException(new UnknownDataFormatException(inputFormat));
}
} else if (ParquetHiveSerDe.class.getName().equals(serde)) {
return BuiltinStorages.PARQUET;
} else if (AvroSerDe.class.getName().equals(serde)) {
return BuiltinStorages.AVRO;
} else if (OrcSerde.class.getName().equals(serde)) {
return BuiltinStorages.ORC;
} else if (RegexSerDe.class.getName().equals(serde)) {
return BuiltinStorages.REGEX;
} else {
throw new TajoRuntimeException(new UnknownDataFormatException(inputFormat));
}
}