当前位置: 首页>>代码示例>>Java>>正文


Java ControlledJob类代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob的典型用法代码示例。如果您正苦于以下问题:Java ControlledJob类的具体用法?Java ControlledJob怎么用?Java ControlledJob使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ControlledJob类属于org.apache.hadoop.mapreduce.lib.jobcontrol包,在下文中一共展示了ControlledJob类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: run

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
public int run(String[] args) throws Exception {
    System.out.println(Arrays.toString(args));
   /* getting the chunk of data and converting to corresponding Key Value Pairs */
    @SuppressWarnings("deprecation")
    String intermediateFileDir = "tmp";
    String intermediateFileDirFile =intermediateFileDir +"/part-r-00000";
    JobControl control = new JobControl("ChainMapReduce");
    ControlledJob step1 = new ControlledJob(jobListFriends(args[0], intermediateFileDir), null);
    ControlledJob step2 = new ControlledJob(jobRecommendFriends(intermediateFileDirFile, args[1]), Arrays.asList(step1));
    control.addJob(step1);
    control.addJob(step2);
    Thread workFlowThread =  new Thread(control, "workflowthread");
    workFlowThread.setDaemon(true);
    workFlowThread.start();  
    return 0;
}
 
开发者ID:dhruvmalik007,项目名称:Deep_learning_using_Java,代码行数:17,代码来源:Recommendation_program.java

示例2: addJob

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("combiner.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = addTSSuffix("perf-out/combiner-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-r ").append(numReducers).append(" ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir);
  sb.append("-mapoutputCompressed ");
  sb.append(mapoutputCompressed).append(" ");
  sb.append("-outputCompressed ").append(outputCompressed);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = CombinerJobCreator.createJob(args);
    job.setJobName("GridmixCombinerJob." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
 
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:26,代码来源:GridMixRunner.java

示例3: castToJobList

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:8,代码来源:JobControl.java

示例4: createValueAggregatorJobs

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {
  
  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:16,代码来源:ValueAggregatorJob.java

示例5: addJob

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
/**
 * Add new job to JobControl instance.
 */
public synchronized void addJob(String[] args) throws IOException {
    Job job = createJob(args);
    this.jc.addJob(new ControlledJob(job, null));
    if(this.jobIndexMap.containsKey(job.getJobName())) {
        throw new IllegalStateException("Job name should be unique. please check name with: " + job.getJobName());
    }
    this.jobIndexMap.put(job.getJobName(), this.jobIndex);
    this.jobIndexParams.put(this.jobIndex, args);
    this.jobRunningTimes.put(this.jobIndex, 1);
    this.jobIndex += 1;
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:15,代码来源:GuaguaMapReduceClient.java

示例6: toFakedStateString

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
public String toFakedStateString(ControlledJob controlledJob) {
    StringBuffer sb = new StringBuffer();
    sb.append("job name:\t").append(controlledJob.getJob().getJobName()).append("\n");
    sb.append("job id:\t").append(controlledJob.getJobID()).append("\n");
    sb.append("job state:\t").append("SUCCESS").append("\n");
    sb.append("job mapred id:\t").append(controlledJob.getJob().getJobID()).append("\n");
    sb.append("job message:\t").append(" successful job").append("\n");
    sb.append("job has no depending job:\t").append("\n");
    return sb.toString();
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:11,代码来源:GuaguaMapReduceClient.java

示例7: checkState

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
private State checkState(ControlledJob j) {
  try {
    return (State)checkState.invoke(j);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:8,代码来源:PigJobControl.java

示例8: submit

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
private State submit(ControlledJob j) {
  try {
    return (State)submit.invoke(j);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:8,代码来源:PigJobControl.java

示例9: getJobs

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
@SuppressWarnings("unchecked")
private LinkedList<ControlledJob> getJobs(Field field) {
  try {
    return (LinkedList<ControlledJob>)field.get(this);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:9,代码来源:PigJobControl.java

示例10: Job

import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; //导入依赖的package包/类
/** 
 * Construct a job.
 * @param jobConf a mapred job configuration representing a job to be executed.
 * @param dependingJobs an array of jobs the current job depends on
 */
@SuppressWarnings("unchecked")
public Job(JobConf jobConf, ArrayList<?> dependingJobs) throws IOException {
  super(org.apache.hadoop.mapreduce.Job.getInstance(jobConf),
        (List<ControlledJob>) dependingJobs);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:11,代码来源:Job.java


注:本文中的org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。