當前位置: 首頁>>代碼示例>>Java>>正文


Java ExecutableConstants類代碼示例

本文整理匯總了Java中org.apache.kylin.job.constant.ExecutableConstants的典型用法代碼示例。如果您正苦於以下問題:Java ExecutableConstants類的具體用法?Java ExecutableConstants怎麽用?Java ExecutableConstants使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


ExecutableConstants類屬於org.apache.kylin.job.constant包,在下文中一共展示了ExecutableConstants類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: createFlatHiveTable

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
protected void createFlatHiveTable(KylinConfig config) throws IOException {
    final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
    hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride());
    hiveCmdBuilder.addStatement(getInitStatement());
    hiveCmdBuilder.addStatementWithRedistributeBy(getCreateTableStatement());
    final String cmd = hiveCmdBuilder.toString();

    stepLogger.log("Create and distribute table, cmd: ");
    stepLogger.log(cmd);

    Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger);
    Map<String, String> info = stepLogger.getInfo();

    //get the flat Hive table size
    Matcher matcher = HDFS_LOCATION.matcher(cmd);
    if (matcher.find()) {
        String hiveFlatTableHdfsUrl = matcher.group(1);
        long size = getFileSize(hiveFlatTableHdfsUrl);
        info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, "" + size);
        logger.info("HDFS_Bytes_Writen: " + size);
    }
    getManager().addJobInfo(getId(), info);
    if (response.getFirst() != 0) {
        throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst());
    }
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:27,代碼來源:CreateFlatHiveTableStep.java

示例2: extractJobLog

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
private void extractJobLog(String taskId, File destDir, boolean onlyFail) throws Exception {
    final Map<String, String> jobInfo = executableDao.getJobOutput(taskId).getInfo();
    FileUtils.forceMkdir(destDir);
    if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
        String applicationId = jobInfo.get(ExecutableConstants.MR_JOB_ID).replace("job", "application");
        if (!onlyFail || !isYarnAppSucc(applicationId)) {
            File destFile = new File(destDir, applicationId + ".log");
            String yarnCmd = "yarn logs -applicationId " + applicationId + " > " + destFile.getAbsolutePath();
            logger.debug(yarnCmd);
            try {
                kylinConfig.getCliCommandExecutor().execute(yarnCmd);
            } catch (Exception ex) {
                logger.warn("Failed to get yarn logs. ", ex);
            }
        }
    }
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:18,代碼來源:JobDiagnosisInfoCLI.java

示例3: addInMemCubingSteps

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
protected void addInMemCubingSteps(final CubingJob result, String jobId, String cuboidRootPath) {
    // base cuboid job
    MapReduceExecutable cubeStep = new MapReduceExecutable();

    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd, JobEngineConfig.IN_MEM_JOB_CONF_SUFFIX);

    cubeStep.setName(ExecutableConstants.STEP_NAME_BUILD_IN_MEM_CUBE);

    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, cuboidRootPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Cube_Builder_" + seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);

    cubeStep.setMapReduceParams(cmd.toString());
    cubeStep.setMapReduceJobClass(getInMemCuboidJob());
    result.addTask(cubeStep);
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:20,代碼來源:BatchCubingJobBuilder2.java

示例4: createBaseCuboidStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
private MapReduceExecutable createBaseCuboidStep(String cuboidOutputPath, String jobId) {
    // base cuboid job
    MapReduceExecutable baseCuboidStep = new MapReduceExecutable();

    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd);

    baseCuboidStep.setName(ExecutableConstants.STEP_NAME_BUILD_BASE_CUBOID);

    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, "FLAT_TABLE"); // marks flat table input
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, cuboidOutputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Base_Cuboid_Builder_" + seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "0");
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);

    baseCuboidStep.setMapReduceParams(cmd.toString());
    baseCuboidStep.setMapReduceJobClass(getBaseCuboidJob());
    //        baseCuboidStep.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES);
    return baseCuboidStep;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:23,代碼來源:BatchCubingJobBuilder2.java

示例5: createNDimensionCuboidStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
private MapReduceExecutable createNDimensionCuboidStep(String parentPath, String outputPath, int level, String jobId) {
    // ND cuboid job
    MapReduceExecutable ndCuboidStep = new MapReduceExecutable();

    ndCuboidStep.setName(ExecutableConstants.STEP_NAME_BUILD_N_D_CUBOID + " : level " + level);
    StringBuilder cmd = new StringBuilder();

    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, parentPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_ND-Cuboid_Builder_" + seg.getRealization().getName() + "_Step");
    appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "" + level);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);

    ndCuboidStep.setMapReduceParams(cmd.toString());
    ndCuboidStep.setMapReduceJobClass(getNDCuboidJob());
    return ndCuboidStep;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:21,代碼來源:BatchCubingJobBuilder2.java

示例6: createFactDistinctColumnsStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
public MapReduceExecutable createFactDistinctColumnsStep(String jobId) {
    MapReduceExecutable result = new MapReduceExecutable();
    result.setName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS);
    result.setMapReduceJobClass(FactDistinctColumnsJob.class);
    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, getFactDistinctColumnsPath(jobId));
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_OUTPUT, getStatisticsPath(jobId));
    appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_SAMPLING_PERCENT, String.valueOf(config.getConfig().getCubingInMemSamplingPercent()));
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Fact_Distinct_Columns_" + seg.getRealization().getName() + "_Step");
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
    result.setMapReduceParams(cmd.toString());
    result.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES);
    return result;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:18,代碼來源:JobBuilderSupport.java

示例7: createCalculateStatsFromBaseCuboid

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
public MapReduceExecutable createCalculateStatsFromBaseCuboid(String inputPath, String outputPath,
        CuboidModeEnum cuboidMode) {
    MapReduceExecutable result = new MapReduceExecutable();
    result.setName(ExecutableConstants.STEP_NAME_CALCULATE_STATS_FROM_BASE_CUBOID);
    result.setMapReduceJobClass(CalculateStatsFromBaseCuboidJob.class);
    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_SAMPLING_PERCENT,
            String.valueOf(config.getConfig().getCubingInMemSamplingPercent()));
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
            "Calculate_Stats_For_Segment_" + seg.getRealization().getName() + "_Step");
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBOID_MODE, cuboidMode.toString());

    result.setMapReduceParams(cmd.toString());
    return result;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:21,代碼來源:JobBuilderSupport.java

示例8: createFilterRecommendCuboidDataStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
public MapReduceExecutable createFilterRecommendCuboidDataStep(String inputPath, String outputPath) {
    MapReduceExecutable result = new MapReduceExecutable();
    result.setName(ExecutableConstants.STEP_NAME_FILTER_RECOMMEND_CUBOID_DATA_FOR_OPTIMIZATION);

    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
            "Kylin_Filter_Recommend_Cuboid_Data_" + seg.getRealization().getName());

    result.setMapReduceParams(cmd.toString());
    result.setMapReduceJobClass(FilterRecommendCuboidDataJob.class);
    return result;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:18,代碼來源:BatchOptimizeJobBuilder2.java

示例9: createUpdateShardForOldCuboidDataStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
private MapReduceExecutable createUpdateShardForOldCuboidDataStep(String inputPath, String outputPath) {
    MapReduceExecutable result = new MapReduceExecutable();
    result.setName(ExecutableConstants.STEP_NAME_UPDATE_OLD_CUBOID_SHARD);

    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
            "Kylin_Update_Old_Cuboid_Shard_" + seg.getRealization().getName());

    result.setMapReduceParams(cmd.toString());
    result.setMapReduceJobClass(UpdateOldCuboidShardJob.class);
    return result;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:18,代碼來源:BatchOptimizeJobBuilder2.java

示例10: createNDimensionCuboidStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
private MapReduceExecutable createNDimensionCuboidStep(String parentPath, String outputPath, int level,
        String jobId, CuboidModeEnum cuboidMode) {
    // ND cuboid job
    MapReduceExecutable ndCuboidStep = new MapReduceExecutable();

    ndCuboidStep.setName(ExecutableConstants.STEP_NAME_BUILD_N_D_CUBOID + " : level " + level);
    StringBuilder cmd = new StringBuilder();

    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, parentPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
            "Kylin_ND-Cuboid_Builder_" + seg.getRealization().getName() + "_Step");
    appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "" + level);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBOID_MODE, cuboidMode.toString());

    ndCuboidStep.setMapReduceParams(cmd.toString());
    ndCuboidStep.setMapReduceJobClass(getNDCuboidJob());
    return ndCuboidStep;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:24,代碼來源:BatchOptimizeJobBuilder2.java

示例11: createInMemCubingStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
private MapReduceExecutable createInMemCubingStep(String jobId, CuboidModeEnum cuboidMode, String cuboidRootPath) {
    MapReduceExecutable cubeStep = new MapReduceExecutable();

    StringBuilder cmd = new StringBuilder();
    appendMapReduceParameters(cmd, JobEngineConfig.IN_MEM_JOB_CONF_SUFFIX);

    cubeStep.setName(ExecutableConstants.STEP_NAME_BUILD_IN_MEM_CUBE);

    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, getBaseCuboidPath(cuboidRootPath));
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, getInMemCuboidPath(cuboidRootPath));
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
            "Kylin_Cube_Builder_" + seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBOID_MODE, cuboidMode.toString());

    cubeStep.setMapReduceParams(cmd.toString());
    cubeStep.setMapReduceJobClass(InMemCuboidFromBaseCuboidJob.class);
    cubeStep.setCounterSaveAs(
            CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES + "," + CubingJob.CUBE_SIZE_BYTES);
    return cubeStep;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:24,代碼來源:BatchOptimizeJobBuilder2.java

示例12: updateMetrics

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
protected void updateMetrics(ExecutableContext context, ExecuteResult result, ExecutableState state) {
    JobMetricsFacade.JobStatisticsResult jobStats = new JobMetricsFacade.JobStatisticsResult();
    jobStats.setWrapper(getSubmitter(), getProjectName(),
            CubingExecutableUtil.getCubeName(getParams()), getId(), getJobType(),
            getAlgorithm() == null ? "NULL" : getAlgorithm().toString());

    if (state == ExecutableState.SUCCEED) {
        jobStats.setJobStats(findSourceSizeBytes(), findCubeSizeBytes(), getDuration(), getMapReduceWaitTime(),
                getPerBytesTimeCost(findSourceSizeBytes(), getDuration()));
        if (CubingJobTypeEnum.getByName(getJobType()) == CubingJobTypeEnum.BUILD) {
            jobStats.setJobStepStats(
                    getTaskByName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS).getDuration(),
                    getTaskByName(ExecutableConstants.STEP_NAME_BUILD_DICTIONARY).getDuration(),
                    getTaskByName(ExecutableConstants.STEP_NAME_BUILD_IN_MEM_CUBE).getDuration(),
                    getTaskByName(ExecutableConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE).getDuration());
        }
    } else if (state == ExecutableState.ERROR) {
        jobStats.setJobException(result.getThrowable() != null ? result.getThrowable() : new Exception());
    }
    JobMetricsFacade.updateMetrics(jobStats);
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:22,代碼來源:CubingJob.java

示例13: createRangeRowkeyDistributionStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
public MapReduceExecutable createRangeRowkeyDistributionStep(String cuboidRootPath, String jobId) {
    String inputPath = cuboidRootPath + (cuboidRootPath.endsWith("/") ? "" : "/") + "*";

    MapReduceExecutable rowkeyDistributionStep = new MapReduceExecutable();
    rowkeyDistributionStep.setName(ExecutableConstants.STEP_NAME_GET_CUBOID_KEY_DISTRIBUTION);
    StringBuilder cmd = new StringBuilder();

    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, getRowkeyDistributionOutputPath(jobId));
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Region_Splits_Calculator_" + seg.getRealization().getName() + "_Step");

    rowkeyDistributionStep.setMapReduceParams(cmd.toString());
    rowkeyDistributionStep.setMapReduceJobClass(RangeKeyDistributionJob.class);
    return rowkeyDistributionStep;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:18,代碼來源:HBaseMRSteps.java

示例14: createMergeCuboidDataStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
public MapReduceExecutable createMergeCuboidDataStep(CubeSegment seg, List<CubeSegment> mergingSegments, String jobID, Class<? extends AbstractHadoopJob> clazz) {

        final List<String> mergingCuboidPaths = Lists.newArrayList();
        for (CubeSegment merging : mergingSegments) {
            mergingCuboidPaths.add(getCuboidRootPath(merging) + "*");
        }
        String formattedPath = StringUtil.join(mergingCuboidPaths, ",");
        String outputPath = getCuboidRootPath(jobID);

        MapReduceExecutable mergeCuboidDataStep = new MapReduceExecutable();
        mergeCuboidDataStep.setName(ExecutableConstants.STEP_NAME_MERGE_CUBOID);
        StringBuilder cmd = new StringBuilder();

        appendMapReduceParameters(cmd);
        appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getCubeInstance().getName());
        appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
        appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, formattedPath);
        appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Merge_Cuboid_" + seg.getCubeInstance().getName() + "_Step");

        mergeCuboidDataStep.setMapReduceParams(cmd.toString());
        mergeCuboidDataStep.setMapReduceJobClass(clazz);
        return mergeCuboidDataStep;
    }
 
開發者ID:apache,項目名稱:kylin,代碼行數:25,代碼來源:HBaseMRSteps.java

示例15: createConvertCuboidToHfileStep

import org.apache.kylin.job.constant.ExecutableConstants; //導入依賴的package包/類
public MapReduceExecutable createConvertCuboidToHfileStep(String jobId) {
    String cuboidRootPath = getCuboidRootPath(jobId);
    String inputPath = cuboidRootPath + (cuboidRootPath.endsWith("/") ? "" : "/") + "*";

    MapReduceExecutable createHFilesStep = new MapReduceExecutable();
    createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE);
    StringBuilder cmd = new StringBuilder();

    appendMapReduceParameters(cmd);
    appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
    appendExecCmdParameters(cmd, BatchConstants.ARG_PARTITION, getRowkeyDistributionOutputPath(jobId) + "/part-r-00000_hfile");
    appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
    appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, getHFilePath(jobId));
    appendExecCmdParameters(cmd, BatchConstants.ARG_HTABLE_NAME, seg.getStorageLocationIdentifier());
    appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_HFile_Generator_" + seg.getRealization().getName() + "_Step");

    createHFilesStep.setMapReduceParams(cmd.toString());
    createHFilesStep.setMapReduceJobClass(CubeHFileJob.class);
    createHFilesStep.setCounterSaveAs(",," + CubingJob.CUBE_SIZE_BYTES);

    return createHFilesStep;
}
 
開發者ID:apache,項目名稱:kylin,代碼行數:23,代碼來源:HBaseMRSteps.java


注:本文中的org.apache.kylin.job.constant.ExecutableConstants類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。