本文整理汇总了Java中org.apache.hadoop.hive.serde2.SerDeStats类的典型用法代码示例。如果您正苦于以下问题:Java SerDeStats类的具体用法?Java SerDeStats怎么用?Java SerDeStats使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SerDeStats类属于org.apache.hadoop.hive.serde2包,在下文中一共展示了SerDeStats类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getStats() {
if (LOG.isDebugEnabled()) {
LOG.debug("<<<<<<<<<< getStats >>>>>>>>>>");
}
SerDeStats stats = new SerDeStats();
stats.setRowCount(rowCountDelta);
// Don't worry about setting raw data size diff. I have no idea how to calculate that
// without finding the row we are updating or deleting, which would be a mess.
return stats;
}
示例2: initialize
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
String columnNameProperty = tbl.getProperty(IOConstants.COLUMNS);
String columnTypeProperty = tbl.getProperty(IOConstants.COLUMNS_TYPES);
if (Strings.isEmpty(columnNameProperty)) {
columnNames = new ArrayList<String>();
} else {
columnNames = Arrays.asList(columnNameProperty.split(","));
}
if (Strings.isEmpty(columnTypeProperty)) {
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(StringUtils.repeat("string", ":", columnNames.size()));
} else {
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
if (columnNames.size() != columnTypes.size()) {
throw new IllegalArgumentException("IndexRHiveSerde initialization failed. Number of column " +
"name and column type differs. columnNames = " + columnNames + ", columnTypes = " +
columnTypes);
}
TypeInfo rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
this.objInspector = new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo);
stats = new SerDeStats();
serdeSize = 0;
}
示例3: initialize
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public final void initialize(final Configuration conf, final Properties tbl) throws SerDeException {
final TypeInfo rowTypeInfo;
final List<String> columnNames;
final List<TypeInfo> columnTypes;
// Get column names and sort order
final String columnNameProperty = tbl.getProperty(IOConstants.COLUMNS);
final String columnTypeProperty = tbl.getProperty(IOConstants.COLUMNS_TYPES);
if (columnNameProperty.length() == 0) {
columnNames = new ArrayList<String>();
} else {
columnNames = Arrays.asList(columnNameProperty.split(","));
}
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<TypeInfo>();
} else {
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
if (columnNames.size() != columnTypes.size()) {
throw new IllegalArgumentException("ParquetHiveSerde initialization failed. Number of column " +
"name and column type differs. columnNames = " + columnNames + ", columnTypes = " +
columnTypes);
}
// Create row related objects
rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
this.objInspector = new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo);
// Stats part
stats = new SerDeStats();
serializedSize = 0;
deserializedSize = 0;
status = LAST_OPERATION.UNKNOWN;
}
示例4: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// must be different
assert (status != LAST_OPERATION.UNKNOWN);
if (status == LAST_OPERATION.SERIALIZE) {
stats.setRawDataSize(serializedSize);
} else {
stats.setRawDataSize(deserializedSize);
}
return stats;
}
示例5: OrcRecordWriter
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
OrcRecordWriter(FileSystem fs, Path path, Configuration conf,
long stripeSize, String compress,
int compressionSize, int rowIndexStride) {
this.fs = fs;
this.path = path;
this.conf = conf;
this.stripeSize = stripeSize;
this.compress = CompressionKind.valueOf(compress);
this.compressionSize = compressionSize;
this.rowIndexStride = rowIndexStride;
this.stats = new SerDeStats();
}
示例6: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
/**
* Unimplemented
*/
//@Override
public SerDeStats getSerDeStats() {
if (Log.isDebugEnabled())
SpliceLogUtils.trace(Log, "serdeStats");
return null;
}
示例7: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats(){
return null;
}
示例8: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
return null;
}
示例9: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// no support for statistics
return null;
}
示例10: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// not supported
return null;
}
示例11: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// Common practice is to return null here
return null;
}
示例12: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// no support for statistics
return null;
}
示例13: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
stats.setRawDataSize(serdeSize);
return stats;
}
示例14: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// TODO: can compute serialize stats but not deserialized ones
return null;
}
示例15: getSerDeStats
import org.apache.hadoop.hive.serde2.SerDeStats; //导入依赖的package包/类
@Override
public SerDeStats getSerDeStats() {
// TODO How to implement this?
return null;
}