本文整理汇总了Java中org.apache.kylin.cube.kv.AbstractRowKeyEncoder.createInstance方法的典型用法代码示例。如果您正苦于以下问题:Java AbstractRowKeyEncoder.createInstance方法的具体用法?Java AbstractRowKeyEncoder.createInstance怎么用?Java AbstractRowKeyEncoder.createInstance使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kylin.cube.kv.AbstractRowKeyEncoder
的用法示例。
在下文中一共展示了AbstractRowKeyEncoder.createInstance方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: call
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
@Override
public Tuple2<ByteArray, Object[]> call(Row row) throws Exception {
if (initialized == false) {
synchronized (SparkCubingByLayer.class) {
if (initialized == false) {
KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
CubeDesc cubeDesc = cubeInstance.getDescriptor();
CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
CubeJoinedFlatTableEnrich interDesc = new CubeJoinedFlatTableEnrich(
EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
Cuboid baseCuboid = Cuboid.findForMandatory(cubeDesc, baseCuboidId);
baseCuboidBuilder = new BaseCuboidBuilder(kConfig, cubeDesc, cubeSegment, interDesc,
AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
initialized = true;
}
}
}
String[] rowArray = rowToArray(row);
baseCuboidBuilder.resetAggrs();
byte[] rowKey = baseCuboidBuilder.buildKey(rowArray);
Object[] result = baseCuboidBuilder.buildValueObjects(rowArray);
return new Tuple2<>(new ByteArray(rowKey), result);
}
示例2: initVariables
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
private void initVariables(Long cuboidId) {
rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, Cuboid.findForMandatory(cubeDesc, cuboidId));
keyBuf = rowKeyEncoder.createBuf();
dimensions = Long.bitCount(cuboidId);
measureColumns = new ImmutableBitSet(dimensions, dimensions + measureCount);
}
示例3: BaseCuboidBuilder
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
public BaseCuboidBuilder(KylinConfig kylinConfig, CubeDesc cubeDesc, CubeSegment cubeSegment, CubeJoinedFlatTableEnrich intermediateTableDesc) {
this.kylinConfig = kylinConfig;
this.cubeDesc = cubeDesc;
this.cubeSegment = cubeSegment;
this.intermediateTableDesc = intermediateTableDesc;
init();
rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid);
measureCodec = new BufferedMeasureCodec(cubeDesc.getMeasures());
aggrIngesters = MeasureIngester.create(cubeDesc.getMeasures());
dictionaryMap = cubeSegment.buildDictionaryMap();
}
示例4: createKey
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
private byte[] createKey(Long cuboidId, GTRecord record) {
if (rowKeyEncoder == null || rowKeyEncoder.getCuboidID() != cuboidId) {
rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment,
Cuboid.findForMandatory(cubeDesc, cuboidId));
keybuf = rowKeyEncoder.createBuf();
}
rowKeyEncoder.encode(record, record.getInfo().getPrimaryKey(), keybuf);
return keybuf;
}
示例5: setup
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
@Override
protected void setup(Context context) throws IOException {
super.publishConfiguration(context.getConfiguration());
cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase();
segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME);
KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration());
metadataManager = MetadataManager.getInstance(config);
cube = CubeManager.getInstance(config).getCube(cubeName);
cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW);
cubeDesc = cube.getDescriptor();
factTableDesc = metadataManager.getTableDesc(cubeDesc.getFactTable());
long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId);
// intermediateTableDesc = new
// JoinedFlatTableDesc(cube.getDescriptor());
rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid);
measureCodec = new MeasureCodec(cubeDesc.getMeasures());
measures = new Object[cubeDesc.getMeasures().size()];
int colCount = cubeDesc.getRowkey().getRowKeyColumns().length;
keyBytesBuf = new byte[colCount][];
bytesSplitter = new BytesSplitter(factTableDesc.getColumns().length, 4096);
nullValue = new byte[] { (byte) '\\', (byte) 'N' };// As in Hive, null
// value is
// represented by \N
prepareJoins();
prepareMetrics();
}
示例6: setup
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
@Override
protected void setup(Context context) throws IOException {
super.publishConfiguration(context.getConfiguration());
cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase();
segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME);
intermediateTableRowDelimiter = context.getConfiguration().get(BatchConstants.CFG_CUBE_INTERMEDIATE_TABLE_ROW_DELIMITER, Character.toString(BatchConstants.INTERMEDIATE_TABLE_ROW_DELIMITER));
if (Bytes.toBytes(intermediateTableRowDelimiter).length > 1) {
throw new RuntimeException("Expected delimiter byte length is 1, but got " + Bytes.toBytes(intermediateTableRowDelimiter).length);
}
byteRowDelimiter = Bytes.toBytes(intermediateTableRowDelimiter)[0];
KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration());
cube = CubeManager.getInstance(config).getCube(cubeName);
cubeDesc = cube.getDescriptor();
cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW);
long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId);
intermediateTableDesc = new CubeJoinedFlatTableDesc(cube.getDescriptor(), cubeSegment);
bytesSplitter = new BytesSplitter(200, 4096);
rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid);
measureCodec = new MeasureCodec(cubeDesc.getMeasures());
measures = new Object[cubeDesc.getMeasures().size()];
int colCount = cubeDesc.getRowkey().getRowKeyColumns().length;
keyBytesBuf = new byte[colCount][];
initNullBytes();
}
示例7: init
import org.apache.kylin.cube.kv.AbstractRowKeyEncoder; //导入方法依赖的package包/类
private void init(Collection<ColumnValueRange> andDimensionRanges) {
int size = andDimensionRanges.size();
Map<TblColRef, String> startValues = Maps.newHashMapWithExpectedSize(size);
Map<TblColRef, String> stopValues = Maps.newHashMapWithExpectedSize(size);
Map<TblColRef, Set<String>> fuzzyValues = Maps.newHashMapWithExpectedSize(size);
for (ColumnValueRange dimRange : andDimensionRanges) {
TblColRef column = dimRange.getColumn();
startValues.put(column, dimRange.getBeginValue());
stopValues.put(column, dimRange.getEndValue());
fuzzyValues.put(column, dimRange.getEqualValues());
TblColRef partitionDateColumnRef = cubeSeg.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef();
if (column.equals(partitionDateColumnRef)) {
initPartitionRange(dimRange);
}
}
AbstractRowKeyEncoder encoder = AbstractRowKeyEncoder.createInstance(cubeSeg, cuboid);
encoder.setBlankByte(RowConstants.ROWKEY_LOWER_BYTE);
this.startKey = encoder.encode(startValues);
encoder.setBlankByte(RowConstants.ROWKEY_UPPER_BYTE);
// In order to make stopRow inclusive add a trailing 0 byte. #See
// Scan.setStopRow(byte [] stopRow)
this.stopKey = Bytes.add(encoder.encode(stopValues), ZERO_TAIL_BYTES);
// restore encoder defaults for later reuse (note
// AbstractRowKeyEncoder.createInstance() caches instances)
encoder.setBlankByte(AbstractRowKeyEncoder.DEFAULT_BLANK_BYTE);
// always fuzzy match cuboid ID to lock on the selected cuboid
this.fuzzyKeys = buildFuzzyKeys(fuzzyValues);
}