當前位置: 首頁>>代碼示例>>Java>>正文


Java Context.getConfiguration方法代碼示例

本文整理匯總了Java中org.apache.hadoop.mapreduce.Mapper.Context.getConfiguration方法的典型用法代碼示例。如果您正苦於以下問題:Java Context.getConfiguration方法的具體用法?Java Context.getConfiguration怎麽用?Java Context.getConfiguration使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.mapreduce.Mapper.Context的用法示例。


在下文中一共展示了Context.getConfiguration方法的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getSentiFile

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
private void getSentiFile(Context context) throws IOException {
   	Configuration conf = context.getConfiguration();
   	String swnPath = conf.get("sentwordnetfile");
   	System.out.println("@@@ Path: " + swnPath);
   	this.linhas = new ArrayList<String>();
       try{
        Path pt=new Path(swnPath);
        FileSystem fs = FileSystem.get(new Configuration());
        BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
        String line;
        line=br.readLine();
        while (line != null){
                linhas.add(line);
                line=br.readLine();
        }
	}catch(Exception e){
		System.out.println("@@@@ ERRO: " + e.getMessage());
		throw new IOException(e);
	}   
       sdc = new SentiWordNetDemoCode(linhas);
}
 
開發者ID:cleuton,項目名稱:bigdatasample,代碼行數:22,代碼來源:TokenizerMapper.java

示例2: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
protected void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);

  Configuration conf = context.getConfiguration();

  // Instantiate a copy of the user's class to hold and parse the record.
  String recordClassName = conf.get(
      ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
        + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
        + ") is not set!");
  }

  try {
    Class cls = Class.forName(recordClassName, true,
        Thread.currentThread().getContextClassLoader());
    recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == recordImpl) {
    throw new IOException("Could not instantiate object of type "
        + recordClassName);
  }

  columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
      MapWritable.class);
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:32,代碼來源:ParquetExportMapper.java

示例3: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
public void setup(Context context) throws IOException, InterruptedException {
	Configuration conf = context.getConfiguration();

	 multipleOutputs = new MultipleOutputs(context);
	lowerBoundary = conf.get("LOWER_DATE");
	upperBoundary = conf.get("HIGHER_DATE");

}
 
開發者ID:gatripat,項目名稱:InsAdjustment,代碼行數:9,代碼來源:CSVparserMapper.java

示例4: JobHistoryFileReplayHelper

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
JobHistoryFileReplayHelper(Context context) throws IOException {
  Configuration conf = context.getConfiguration();
  int taskId = context.getTaskAttemptID().getTaskID().getId();
  int size = conf.getInt(MRJobConfig.NUM_MAPS,
      TimelineServicePerformance.NUM_MAPS_DEFAULT);
  replayMode = conf.getInt(JobHistoryFileReplayHelper.REPLAY_MODE,
          JobHistoryFileReplayHelper.REPLAY_MODE_DEFAULT);
  String processingDir =
      conf.get(JobHistoryFileReplayHelper.PROCESSING_PATH);

  Path processingPath = new Path(processingDir);
  FileSystem processingFs = processingPath.getFileSystem(conf);
  parser = new JobHistoryFileParser(processingFs);
  jobFiles = selectJobFiles(processingFs, processingPath, taskId, size);
}
 
開發者ID:aliyun-beta,項目名稱:aliyun-oss-hadoop-fs,代碼行數:16,代碼來源:JobHistoryFileReplayHelper.java

示例5: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    overrideRdfContext = conf.getBoolean(OVERRIDE_CONTEXT_PROPERTY, false);
    String defCtx = conf.get(DEFAULT_CONTEXT_PROPERTY);
    defaultRdfContext = defCtx == null ? null : SimpleValueFactory.getInstance().createIRI(defCtx);
    decimationFactor = conf.getInt(DECIMATION_FACTOR_PROPERTY, DEFAULT_DECIMATION_FACTOR);
    for (byte b = 1; b < 6; b++) {
        context.write(new ImmutableBytesWritable(new byte[] {b}), new LongWritable(1));
    }
    timestamp = conf.getLong(DEFAULT_TIMESTAMP_PROPERTY, System.currentTimeMillis());
}
 
開發者ID:Merck,項目名稱:Halyard,代碼行數:13,代碼來源:HalyardPreSplit.java

示例6: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
protected void setup(Context context)
		throws IOException, InterruptedException {
	// TODO Auto-generated method stu
	super.setup(context);
	//read data to memory on the mapper.
	myMap = new HashMap<String,String>();
	Configuration conf = context.getConfiguration();
	String mybusinessdataPath = conf.get("businessdata");
	//e.g /user/hue/input/
	Path part=new Path("hdfs://cshadoop1"+mybusinessdataPath);//Location of file in HDFS
	
	
	FileSystem fs = FileSystem.get(conf);
	FileStatus[] fss = fs.listStatus(part);
    for (FileStatus status : fss) {
        Path pt = status.getPath();
        
        BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
        String line;
        line=br.readLine();
        while (line != null){
        	String[] arr=line.split("\\^");
        	if(arr.length == 3){
            myMap.put(arr[0].trim(), line); //businessid and the remain datacolumns
        	}
            line=br.readLine();
        }
       
    }
	
	
	
      
}
 
開發者ID:BhargaviRavula,項目名稱:Bigdata,代碼行數:36,代碼來源:UserRatedStanford.java

示例7: run

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
public void run(Context context) throws IOException, InterruptedException {
  this.conf = context.getConfiguration();
  setup(context);
  initCpImportProcess();
  try {
    while (context.nextKeyValue()) {
      map(context.getCurrentKey(), context.getCurrentValue(), context);
    }
    cleanup(context);
  } finally {
    // Shut down the cpimport process.
    closeExportHandles();
  }
}
 
開發者ID:infinidb,項目名稱:sqoop,代碼行數:16,代碼來源:InfiniDBExportMapper.java

示例8: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
protected void setup(Context context) {
  this.conf = context.getConfiguration();

  // TODO: Support additional encodings.
  // rtw-TODO: figure out if this is relevant
  this.cpCharSet = MySQLUtils.MYSQL_DEFAULT_CHARSET;

}
 
開發者ID:infinidb,項目名稱:sqoop,代碼行數:10,代碼來源:InfiniDBExportMapper.java

示例9: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
public void setup(Context context) {
	Configuration conf = context.getConfiguration();
	noGram = conf.getInt("noGram", 5);
}
 
開發者ID:yogykwan,項目名稱:mapreduce-samples,代碼行數:6,代碼來源:NGramLibraryBuilder.java

示例10: map

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
public void map(IntWritable key, IntWritable val, Context context) throws IOException {
  TimelineClient tlc = new TimelineClientImpl();
  Configuration conf = context.getConfiguration();

  final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);

  long totalTime = 0;
  final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
  final Random rand = new Random();
  final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
  final char[] payLoad = new char[kbs * 1024];

  for (int i = 0; i < testtimes; i++) {
    // Generate a fixed length random payload
    for (int xx = 0; xx < kbs * 1024; xx++) {
      int alphaNumIdx =
          rand.nextInt(ALPHA_NUMS.length);
      payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
    }
    String entId = taskAttemptId + "_" + Integer.toString(i);
    final TimelineEntity entity = new TimelineEntity();
    entity.setEntityId(entId);
    entity.setEntityType("FOO_ATTEMPT");
    entity.addOtherInfo("PERF_TEST", payLoad);
    // add an event
    TimelineEvent event = new TimelineEvent();
    event.setTimestamp(System.currentTimeMillis());
    event.setEventType("foo_event");
    entity.addEvent(event);

    // use the current user for this purpose
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    long startWrite = System.nanoTime();
    try {
      tlc.putEntities(entity);
    } catch (Exception e) {
      context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).
          increment(1);
      LOG.error("writing to the timeline service failed", e);
    }
    long endWrite = System.nanoTime();
    totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite-startWrite);
  }
  LOG.info("wrote " + testtimes + " entities (" + kbs*testtimes +
      " kB) in " + totalTime + " ms");
  context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).
      increment(totalTime);
  context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).
      increment(testtimes);
  context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).
      increment(kbs*testtimes);
}
 
開發者ID:aliyun-beta,項目名稱:aliyun-oss-hadoop-fs,代碼行數:53,代碼來源:SimpleEntityWriterV1.java

示例11: cleanup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    HalyardTableUtils.getTable(conf, conf.get(TABLE_PROPERTY), true, splits.toArray(new byte[splits.size()][])).close();
}
 
開發者ID:Merck,項目名稱:Halyard,代碼行數:6,代碼來源:HalyardPreSplit.java

示例12: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
@Override
protected void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);    //To change body of overridden methods use File | Settings | File Templates.

  Configuration conf = context.getConfiguration();

  URI[] files = DistributedCache.getCacheFiles(conf);

  if (files == null || files.length < 2) {
    throw new IOException("not enough paths in the DistributedCache");
  }
  
  dataset = Dataset.load(conf, new Path(files[0].getPath()));

  converter = new DataConverter(dataset);
  
  ruleBase = RuleBase.load(conf, new Path(files[1].getPath()));  
  
  if (ruleBase == null) {
    throw new InterruptedException("Model not found!");
  }
}
 
開發者ID:saradelrio,項目名稱:Chi-FRBCS-BigData-Ave,代碼行數:23,代碼來源:Chi_RWClassifier.java

示例13: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
/**
 * Configures the Reduce plan, the POPackage operator
 * and the reporter thread
 */
@SuppressWarnings("unchecked")
@Override
protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    inIllustrator = inIllustrator(context);
    if (inIllustrator)
        pack = getPack(context);
    Configuration jConf = context.getConfiguration();
    SpillableMemoryManager.configure(ConfigurationUtil.toProperties(jConf));
    sJobContext = context;
    sJobConfInternal.set(context.getConfiguration());
    sJobConf = context.getConfiguration();
    try {
        PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(jConf.get("udf.import.list")));
        pigContext = (PigContext)ObjectSerializer.deserialize(jConf.get("pig.pigContext"));
        
        if (rp == null)
            rp = (PhysicalPlan) ObjectSerializer.deserialize(jConf
                    .get("pig.reducePlan"));
        stores = PlanHelper.getStores(rp);

        if (!inIllustrator)
            pack = (POPackage)ObjectSerializer.deserialize(jConf.get("pig.reduce.package"));
        // To be removed
        if(rp.isEmpty())
            log.debug("Reduce Plan empty!");
        else{
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            rp.explain(baos);
            log.debug(baos.toString());
        }
        pigReporter = new ProgressableReporter();
        if(!(rp.isEmpty())) {
            roots = rp.getRoots().toArray(new PhysicalOperator[1]);
            leaf = rp.getLeaves().get(0);
        }
        
        // Get the UDF specific context
    	MapRedUtil.setupUDFContext(jConf);
    
    } catch (IOException ioe) {
        String msg = "Problem while configuring reduce plan.";
        throw new RuntimeException(msg, ioe);
    }
}
 
開發者ID:PonIC,項目名稱:PonIC,代碼行數:50,代碼來源:PigGenericMapReduce.java

示例14: setup

import org.apache.hadoop.mapreduce.Mapper.Context; //導入方法依賴的package包/類
/**
 * Configures the mapper with the map plan and the
 * reproter thread
 */
@SuppressWarnings("unchecked")
@Override
public void setup(Context context) throws IOException, InterruptedException {       	
    super.setup(context);
    
    Configuration job = context.getConfiguration();
    SpillableMemoryManager.configure(ConfigurationUtil.toProperties(job));
    PigMapReduce.sJobContext = context;
    PigMapReduce.sJobConfInternal.set(context.getConfiguration());
    PigMapReduce.sJobConf = context.getConfiguration();
    inIllustrator = inIllustrator(context);
    
    PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(job.get("udf.import.list")));
    pigContext = (PigContext)ObjectSerializer.deserialize(job.get("pig.pigContext"));
    if (pigContext.getLog4jProperties()!=null)
        PropertyConfigurator.configure(pigContext.getLog4jProperties());
    
    if (mp == null)
        mp = (PhysicalPlan) ObjectSerializer.deserialize(
            job.get("pig.mapPlan"));
    stores = PlanHelper.getStores(mp);
    
    // To be removed
    if(mp.isEmpty())
        log.debug("Map Plan empty!");
    else{
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        mp.explain(baos);
        log.debug(baos.toString());
    }
    keyType = ((byte[])ObjectSerializer.deserialize(job.get("pig.map.keytype")))[0];
    // till here
    
    pigReporter = new ProgressableReporter();
    // Get the UDF specific context
    MapRedUtil.setupUDFContext(job);

    if(!(mp.isEmpty())) {

        PigSplit split = (PigSplit)context.getInputSplit();
        List<OperatorKey> targetOpKeys = split.getTargetOps();
        
        ArrayList<PhysicalOperator> targetOpsAsList = new ArrayList<PhysicalOperator>();
        for (OperatorKey targetKey : targetOpKeys) {                    
            targetOpsAsList.add(mp.getOperator(targetKey));
        }
        roots = targetOpsAsList.toArray(new PhysicalOperator[1]);
        leaf = mp.getLeaves().get(0);               
    }
    
    PigStatusReporter.setContext(context);
 
}
 
開發者ID:PonIC,項目名稱:PonIC,代碼行數:58,代碼來源:PigGenericMapBase.java


注:本文中的org.apache.hadoop.mapreduce.Mapper.Context.getConfiguration方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。