當前位置: 首頁>>代碼示例>>Java>>正文


Java Context類代碼示例

本文整理匯總了Java中org.apache.hadoop.mapreduce.Mapper.Context的典型用法代碼示例。如果您正苦於以下問題:Java Context類的具體用法?Java Context怎麽用?Java Context使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


Context類屬於org.apache.hadoop.mapreduce.Mapper包,在下文中一共展示了Context類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getSentiFile

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
private void getSentiFile(Context context) throws IOException {
   	Configuration conf = context.getConfiguration();
   	String swnPath = conf.get("sentwordnetfile");
   	System.out.println("@@@ Path: " + swnPath);
   	this.linhas = new ArrayList<String>();
       try{
        Path pt=new Path(swnPath);
        FileSystem fs = FileSystem.get(new Configuration());
        BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
        String line;
        line=br.readLine();
        while (line != null){
                linhas.add(line);
                line=br.readLine();
        }
	}catch(Exception e){
		System.out.println("@@@@ ERRO: " + e.getMessage());
		throw new IOException(e);
	}   
       sdc = new SentiWordNetDemoCode(linhas);
}
 
開發者ID:cleuton,項目名稱:bigdatasample,代碼行數:22,代碼來源:TokenizerMapper.java

示例2: map

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
public void map(NullWritable key, NullWritable value, Context context) throws IOException, InterruptedException {

  int counter = 0;
  
  System.out.println("starting mapper");
  System.out.println();
  for (int i = 0; i < numberOfRecords; i++) {
    String keyRoot = StringUtils.leftPad(Integer.toString(r.nextInt(Short.MAX_VALUE)), 5, '0');

    if (i % 1000 == 0) {
      System.out.print(".");
    }

    for (int j = 0; j < 10; j++) {
      hKey.set(Bytes.toBytes(keyRoot + "|" + runID + "|" + taskId));
      kv = new KeyValue(hKey.get(), columnFamily, Bytes.toBytes("C" + j), Bytes.toBytes("counter:" + counter++ ));
      context.write(hKey, kv);
    }
  }

  System.out.println("finished mapper");
}
 
開發者ID:tmalaska,項目名稱:HBase-ToHDFS,代碼行數:24,代碼來源:PopulateTable.java

示例3: map

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
public void map(LongWritable key, Writable value, Context context)
  throws IOException, InterruptedException {
  try {
    String str = value.toString();
    if (value instanceof Text) {
      writer.write(str, 0, str.length());
      writer.newLine();
    } else if (value instanceof SqoopRecord) {
      writer.write(str, 0, str.length());
    }
  } catch (Exception e) {
    doExecuteUpdate("DROP TABLE " + tmpTableName);
    cleanup(context);
    throw new IOException(e);
  }
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:17,代碼來源:PGBulkloadExportMapper.java

示例4: map

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
public void map(LongWritable key, Writable value, Context context)
  throws IOException, InterruptedException {
  line.setLength(0);
  line.append(value.toString());
  if (value instanceof Text) {
    line.append(System.getProperty("line.separator"));
  }
  try {
    byte[]data = line.toString().getBytes("UTF-8");
    copyin.writeToCopy(data, 0, data.length);
  } catch (SQLException ex) {
    LoggingUtils.logAll(LOG, "Unable to execute copy", ex);
    close();
    throw new IOException(ex);
  }
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:18,代碼來源:PostgreSQLCopyExportMapper.java

示例5: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
protected void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);

  Configuration conf = context.getConfiguration();

  // Instantiate a copy of the user's class to hold and parse the record.
  String recordClassName = conf.get(
      ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
        + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
        + ") is not set!");
  }

  try {
    Class cls = Class.forName(recordClassName, true,
        Thread.currentThread().getContextClassLoader());
    recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == recordImpl) {
    throw new IOException("Could not instantiate object of type "
        + recordClassName);
  }

  columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
      MapWritable.class);
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:32,代碼來源:ParquetExportMapper.java

示例6: map

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

			String line = value.toString();

			line = line.trim().toLowerCase();
			line = line.replaceAll("[^a-z]+", " ");
			String words[] = line.split("\\s+"); //split by ' ', '\t', '\n', etc.
			
			if(words.length < 2) {
				return;
			}
			
			StringBuilder sb;
			for (int i = 0; i < words.length-1; i++) {
				sb = new StringBuilder();
				for (int j = 0;  i + j < words.length && j < noGram; j++) {
					sb.append(" ");
					sb.append(words[i + j]);
					context.write(new Text(sb.toString().trim()), new IntWritable(1));
				}
			}
		}
 
開發者ID:yogykwan,項目名稱:mapreduce-samples,代碼行數:23,代碼來源:NGramLibraryBuilder.java

示例7: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
public void setup(Context context) throws IOException, InterruptedException {
	Configuration conf = context.getConfiguration();

	 multipleOutputs = new MultipleOutputs(context);
	lowerBoundary = conf.get("LOWER_DATE");
	upperBoundary = conf.get("HIGHER_DATE");

}
 
開發者ID:gatripat,項目名稱:InsAdjustment,代碼行數:9,代碼來源:CSVparserMapper.java

示例8: JobHistoryFileReplayHelper

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
JobHistoryFileReplayHelper(Context context) throws IOException {
  Configuration conf = context.getConfiguration();
  int taskId = context.getTaskAttemptID().getTaskID().getId();
  int size = conf.getInt(MRJobConfig.NUM_MAPS,
      TimelineServicePerformance.NUM_MAPS_DEFAULT);
  replayMode = conf.getInt(JobHistoryFileReplayHelper.REPLAY_MODE,
          JobHistoryFileReplayHelper.REPLAY_MODE_DEFAULT);
  String processingDir =
      conf.get(JobHistoryFileReplayHelper.PROCESSING_PATH);

  Path processingPath = new Path(processingDir);
  FileSystem processingFs = processingPath.getFileSystem(conf);
  parser = new JobHistoryFileParser(processingFs);
  jobFiles = selectJobFiles(processingFs, processingPath, taskId, size);
}
 
開發者ID:aliyun-beta,項目名稱:aliyun-oss-hadoop-fs,代碼行數:16,代碼來源:JobHistoryFileReplayHelper.java

示例9: testLoadMapper

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@SuppressWarnings({"rawtypes", "unchecked"})
@Test (timeout=10000)
public void testLoadMapper() throws Exception {

  Configuration conf = new Configuration();
  conf.setInt(JobContext.NUM_REDUCES, 2);

  CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
  conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);

  TaskAttemptID taskId = new TaskAttemptID();
  RecordReader<NullWritable, GridmixRecord> reader = new FakeRecordReader();

  LoadRecordGkGrWriter writer = new LoadRecordGkGrWriter();

  OutputCommitter committer = new CustomOutputCommitter();
  StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
  LoadSplit split = getLoadSplit();

  MapContext<NullWritable, GridmixRecord, GridmixKey, GridmixRecord> mapContext = new MapContextImpl<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>(
          conf, taskId, reader, writer, committer, reporter, split);
  // context
  Context ctx = new WrappedMapper<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>()
          .getMapContext(mapContext);

  reader.initialize(split, ctx);
  ctx.getConfiguration().setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
  CompressionEmulationUtil.setCompressionEmulationEnabled(
          ctx.getConfiguration(), true);

  LoadJob.LoadMapper mapper = new LoadJob.LoadMapper();
  // setup, map, clean
  mapper.run(ctx);

  Map<GridmixKey, GridmixRecord> data = writer.getData();
  // check result
  assertEquals(2, data.size());

}
 
開發者ID:yncxcw,項目名稱:big-c,代碼行數:40,代碼來源:TestGridMixClasses.java

示例10: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    overrideRdfContext = conf.getBoolean(OVERRIDE_CONTEXT_PROPERTY, false);
    String defCtx = conf.get(DEFAULT_CONTEXT_PROPERTY);
    defaultRdfContext = defCtx == null ? null : SimpleValueFactory.getInstance().createIRI(defCtx);
    decimationFactor = conf.getInt(DECIMATION_FACTOR_PROPERTY, DEFAULT_DECIMATION_FACTOR);
    for (byte b = 1; b < 6; b++) {
        context.write(new ImmutableBytesWritable(new byte[] {b}), new LongWritable(1));
    }
    timestamp = conf.getLong(DEFAULT_TIMESTAMP_PROPERTY, System.currentTimeMillis());
}
 
開發者ID:Merck,項目名稱:Halyard,代碼行數:13,代碼來源:HalyardPreSplit.java

示例11: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
protected void setup(Context context)
		throws IOException, InterruptedException {
	// TODO Auto-generated method stu
	super.setup(context);
	//read data to memory on the mapper.
	myMap = new HashMap<String,String>();
	Configuration conf = context.getConfiguration();
	String mybusinessdataPath = conf.get("businessdata");
	//e.g /user/hue/input/
	Path part=new Path("hdfs://cshadoop1"+mybusinessdataPath);//Location of file in HDFS
	
	
	FileSystem fs = FileSystem.get(conf);
	FileStatus[] fss = fs.listStatus(part);
    for (FileStatus status : fss) {
        Path pt = status.getPath();
        
        BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
        String line;
        line=br.readLine();
        while (line != null){
        	String[] arr=line.split("\\^");
        	if(arr.length == 3){
            myMap.put(arr[0].trim(), line); //businessid and the remain datacolumns
        	}
            line=br.readLine();
        }
       
    }
	
	
	
      
}
 
開發者ID:BhargaviRavula,項目名稱:Bigdata,代碼行數:36,代碼來源:UserRatedStanford.java

示例12: map

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
	//from ratings
	
	String[] mydata = value.toString().split("\\^");
	
	if (mydata.length == 3){
			if (mydata[1].contains("Palo Alto")){
			context.write(new Text(mydata[0].trim()),new IntWritable(1));
			}
			
		/*if("review".compareTo(mydata[mydata.length-2].trim())== 0){
			
			context.write(new Text(mydata[mydata.length-2].trim()),new IntWritable(1));
		}
		if("user".compareTo(mydata[mydata.length-2].trim())== 0){
			
			context.write(new Text(mydata[mydata.length-2].trim()),new IntWritable(1));
		}*/
	}	
			
}
 
開發者ID:BhargaviRavula,項目名稱:Bigdata,代碼行數:22,代碼來源:CountYelpReview.java

示例13: cleanup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
protected void cleanup(
		Context context)
		throws IOException, InterruptedException {
	// TODO Auto-generated method stub
	super.cleanup(context);
	Collections.sort(myarray,new MyMovieComparator());
	int count =0;
	for(MyBusinessData data : myarray){
		
		result.set(""+data.rating);
		myKey.set(data.businessId);
		context.write(myKey, result); // create a pair <keyword, number of occurences>
		count++;
		if(count >=10)break;}
	
}
 
開發者ID:BhargaviRavula,項目名稱:Bigdata,代碼行數:18,代碼來源:Top10BusRev.java

示例14: startAligner

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
@Override
protected void startAligner(Mapper.Context context) throws IOException, InterruptedException {
    // make command
    String customArgs = HalvadeConf.getCustomArgs(context.getConfiguration(), "bwa", "mem");
    String[] command = CommandGenerator.bwaMem(bin, ref, null, null, isPaired, true, threads, customArgs);
    pbw = new ProcessBuilderWrapper(command, bin);
    // run command
    // needs to be streamed to output otherwise the process blocks ...
    pbw.startProcess(null, System.err);
    // check if alive.
    if(!pbw.isAlive())
        throw new ProcessException("BWA mem", pbw.getExitState());
    pbw.getSTDINWriter();
    // make a SAMstream handler
    ssh = new SAMStreamHandler(instance, context, false);
    ssh.start();
}
 
開發者ID:biointec,項目名稱:halvade,代碼行數:18,代碼來源:BWAMemInstance.java

示例15: logEvent

import org.apache.hadoop.mapreduce.Mapper.Context; //導入依賴的package包/類
static public void logEvent(FileSystem fs, Path path, LOGTYPES type,
    LOGRESULTS result, Codec codec, Context context, LogSample sample,
    String tag) {
  try {
    if (context == null) {
      incrRaidNodeMetricCounter(fs, type, result, tag);
    } else {
      incrLogMetricCounter(context, fs, type, result, tag);
    } 
    if (sample == null) sample = new LogSample();
    if (path != null) sample.addNormalValue(LOGKEYS.Path.name(), path.toString());
    if (codec != null) sample.addNormalValue(LOGKEYS.Code.name(), codec.id);
    sample.addNormalValue(LOGKEYS.Type.name(), type.name()); 
    sample.addNormalValue(LOGKEYS.Cluster.name(), fs.getUri().getAuthority());
    EVENTS_LOG.info(sample.toJSON());
  } catch (Exception e) {
    LOG.warn("Exception when logging the File_Fix_WaitTime metric : " +
        e.getMessage(), e);
  }
}
 
開發者ID:rhli,項目名稱:hadoop-EAR,代碼行數:21,代碼來源:LogUtils.java


注:本文中的org.apache.hadoop.mapreduce.Mapper.Context類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。