當前位置: 首頁>>代碼示例>>Java>>正文


Java RecordWriter類代碼示例

本文整理匯總了Java中org.apache.hadoop.mapreduce.RecordWriter的典型用法代碼示例。如果您正苦於以下問題:Java RecordWriter類的具體用法?Java RecordWriter怎麽用?Java RecordWriter使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


RecordWriter類屬於org.apache.hadoop.mapreduce包,在下文中一共展示了RecordWriter類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: ReduceContextImpl

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
public ReduceContextImpl(Configuration conf, TaskAttemptID taskid,
                         RawKeyValueIterator input, 
                         Counter inputKeyCounter,
                         Counter inputValueCounter,
                         RecordWriter<KEYOUT,VALUEOUT> output,
                         OutputCommitter committer,
                         StatusReporter reporter,
                         RawComparator<KEYIN> comparator,
                         Class<KEYIN> keyClass,
                         Class<VALUEIN> valueClass
                        ) throws InterruptedException, IOException{
  super(conf, taskid, output, committer, reporter);
  this.input = input;
  this.inputKeyCounter = inputKeyCounter;
  this.inputValueCounter = inputValueCounter;
  this.comparator = comparator;
  this.serializationFactory = new SerializationFactory(conf);
  this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
  this.keyDeserializer.open(buffer);
  this.valueDeserializer = serializationFactory.getDeserializer(valueClass);
  this.valueDeserializer.open(buffer);
  hasMore = input.next();
  this.keyClass = keyClass;
  this.valueClass = valueClass;
  this.conf = conf;
  this.taskid = taskid;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:28,代碼來源:ReduceContextImpl.java

示例2: getRecordWriter

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
/**
 * 定義每條數據的輸出格式,輸入數據是由reduce任務每次執行write方法輸出的數據
 */
@Override
public RecordWriter<BaseDimension, BaseStatsValueWritable> getRecordWriter(TaskAttemptContext context)
		throws IOException, InterruptedException {
	Configuration conf = context.getConfiguration();
	Connection conn = null;
	IDimensionConverter converter = new DimensionConverterImpl();
	try {
		conn = JdbcManager.getConnection(conf, GlobalConstants.WAREHOUSE_OF_REPORT);
		conn.setAutoCommit(false);
	} catch (SQLException e) {
		logger.error("獲取數據庫連接失敗", e);
		throw new IOException("獲取數據庫連接失敗", e);
	}
	return new TransformerRecordWriter(conn, conf, converter);
}
 
開發者ID:liuhaozzu,項目名稱:big_data,代碼行數:19,代碼來源:TransformerOutputFormat.java

示例3: getRecordWriter

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
@Override
/** {@inheritDoc} */
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context)
    throws IOException {
  DBConfiguration dbConf = new DBConfiguration(context.getConfiguration());
  String tableName = dbConf.getOutputTableName();
  String[] fieldNames = dbConf.getOutputFieldNames();

  if (fieldNames == null) {
    fieldNames = new String[dbConf.getOutputFieldCount()];
  }

  try {
    Connection connection = dbConf.getConnection();
    PreparedStatement statement = null;

    statement = connection.prepareStatement(
                  constructQuery(tableName, fieldNames));
    return new com.cloudera.sqoop.mapreduce.db.DBOutputFormat.DBRecordWriter(
                   connection, statement);
  } catch (Exception ex) {
    throw new IOException(ex);
  }
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:25,代碼來源:DBOutputFormat.java

示例4: writeBadOutput

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
private void writeBadOutput(TaskAttempt attempt, Configuration conf)
  throws Exception {
  TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, 
      TypeConverter.fromYarn(attempt.getID()));
 
  TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
  RecordWriter theRecordWriter = theOutputFormat
      .getRecordWriter(tContext);
  
  NullWritable nullWritable = NullWritable.get();
  try {
    theRecordWriter.write(key2, val2);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val2);
    theRecordWriter.write(nullWritable, val1);
    theRecordWriter.write(key1, nullWritable);
    theRecordWriter.write(key2, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key1, val1);
  } finally {
    theRecordWriter.close(tContext);
  }
  
  OutputFormat outputFormat = ReflectionUtils.newInstance(
      tContext.getOutputFormatClass(), conf);
  OutputCommitter committer = outputFormat.getOutputCommitter(tContext);
  committer.commitTask(tContext);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:29,代碼來源:TestRecovery.java

示例5: writeOutput

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
private void writeOutput(TaskAttempt attempt, Configuration conf)
  throws Exception {
  TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, 
      TypeConverter.fromYarn(attempt.getID()));
  
  TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
  RecordWriter theRecordWriter = theOutputFormat
      .getRecordWriter(tContext);
  
  NullWritable nullWritable = NullWritable.get();
  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(tContext);
  }
  
  OutputFormat outputFormat = ReflectionUtils.newInstance(
      tContext.getOutputFormatClass(), conf);
  OutputCommitter committer = outputFormat.getOutputCommitter(tContext);
  committer.commitTask(tContext);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:29,代碼來源:TestRecovery.java

示例6: writeOutput

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
@SuppressWarnings("unchecked")
private void writeOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  NullWritable nullWritable = NullWritable.get();

  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(context);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:19,代碼來源:TestMRCJCFileOutputCommitter.java

示例7: getRecordWriter

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
/** {@inheritDoc} */
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) 
    throws IOException {
  DBConfiguration dbConf = new DBConfiguration(context.getConfiguration());
  String tableName = dbConf.getOutputTableName();
  String[] fieldNames = dbConf.getOutputFieldNames();
  
  if(fieldNames == null) {
    fieldNames = new String[dbConf.getOutputFieldCount()];
  }
  
  try {
    Connection connection = dbConf.getConnection();
    PreparedStatement statement = null;

    statement = connection.prepareStatement(
                  constructQuery(tableName, fieldNames));
    return new DBRecordWriter(connection, statement);
  } catch (Exception ex) {
    throw new IOException(ex.getMessage());
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:DBOutputFormat.java

示例8: addMapper

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
/**
 * Add mapper(the first mapper) that reads input from the input
 * context and writes to queue
 */
@SuppressWarnings("unchecked")
void addMapper(TaskInputOutputContext inputContext,
    ChainBlockingQueue<KeyValuePair<?, ?>> output, int index)
    throws IOException, InterruptedException {
  Configuration conf = getConf(index);
  Class<?> keyOutClass = conf.getClass(MAPPER_OUTPUT_KEY_CLASS, Object.class);
  Class<?> valueOutClass = conf.getClass(MAPPER_OUTPUT_VALUE_CLASS,
      Object.class);

  RecordReader rr = new ChainRecordReader(inputContext);
  RecordWriter rw = new ChainRecordWriter(keyOutClass, valueOutClass, output,
      conf);
  Mapper.Context mapperContext = createMapContext(rr, rw,
      (MapContext) inputContext, getConf(index));
  MapRunner runner = new MapRunner(mappers.get(index), mapperContext, rr, rw);
  threads.add(runner);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:22,代碼來源:Chain.java

示例9: writeOutput

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
private void writeOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  NullWritable nullWritable = NullWritable.get();

  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(context);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestFileOutputCommitter.java

示例10: writeRandomKeyValues

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
/**
 * Write random values to the writer assuming a table created using
 * {@link #FAMILIES} as column family descriptors
 */
private void writeRandomKeyValues(RecordWriter<ImmutableBytesWritable, Cell> writer,
    TaskAttemptContext context, Set<byte[]> families, int numRows)
    throws IOException, InterruptedException {
  byte keyBytes[] = new byte[Bytes.SIZEOF_INT];
  int valLength = 10;
  byte valBytes[] = new byte[valLength];

  int taskId = context.getTaskAttemptID().getTaskID().getId();
  assert taskId < Byte.MAX_VALUE : "Unit tests dont support > 127 tasks!";
  final byte [] qualifier = Bytes.toBytes("data");
  Random random = new Random();
  for (int i = 0; i < numRows; i++) {

    Bytes.putInt(keyBytes, 0, i);
    random.nextBytes(valBytes);
    ImmutableBytesWritable key = new ImmutableBytesWritable(keyBytes);

    for (byte[] family : families) {
      Cell kv = new KeyValue(keyBytes, family, qualifier, valBytes);
      writer.write(key, kv);
    }
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:28,代碼來源:TestHFileOutputFormat2.java

示例11: writeRandomKeyValues

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
/**
 * Write random values to the writer assuming a table created using
 * {@link #FAMILIES} as column family descriptors
 */
private void writeRandomKeyValues(RecordWriter<ImmutableBytesWritable, KeyValue> writer,
    TaskAttemptContext context, Set<byte[]> families, int numRows)
    throws IOException, InterruptedException {
  byte keyBytes[] = new byte[Bytes.SIZEOF_INT];
  int valLength = 10;
  byte valBytes[] = new byte[valLength];

  int taskId = context.getTaskAttemptID().getTaskID().getId();
  assert taskId < Byte.MAX_VALUE : "Unit tests dont support > 127 tasks!";
  final byte [] qualifier = Bytes.toBytes("data");
  Random random = new Random();
  for (int i = 0; i < numRows; i++) {

    Bytes.putInt(keyBytes, 0, i);
    random.nextBytes(valBytes);
    ImmutableBytesWritable key = new ImmutableBytesWritable(keyBytes);

    for (byte[] family : families) {
      KeyValue kv = new KeyValue(keyBytes, family, qualifier, valBytes);
      writer.write(key, kv);
    }
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:28,代碼來源:TestHFileOutputFormat.java

示例12: prepareToWrite

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void prepareToWrite(RecordWriter writer) throws IOException {
    this.writer = writer;

    Properties props = getUDFProperties();
    String s = props.getProperty(ResourceSchema.class.getName());
    if (!StringUtils.hasText(s)) {
        log.warn("No resource schema found; using an empty one....");
        this.schema = new ResourceSchema();
    }
    else {
        this.schema = IOUtils.deserializeFromBase64(s);
    }
    this.pigTuple = new PigTuple(schema);
}
 
開發者ID:xushjie1987,項目名稱:es-hadoop-v2.2.0,代碼行數:17,代碼來源:EsStorage.java

示例13: testWriteLongData

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
@Test(enabled = true)
public void testWriteLongData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<Long> sess =
      new MneDurableOutputSession<Long>(m_tacontext, null,
          MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<Long> mdvalue =
      new MneDurableOutputValue<Long>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<Long>> outputFormat =
      new MneOutputFormat<MneDurableOutputValue<Long>>();
  RecordWriter<NullWritable, MneDurableOutputValue<Long>> writer =
      outputFormat.getRecordWriter(m_tacontext);
  Long val = null;
  for (int i = 0; i < m_reccnt; ++i) {
    val = m_rand.nextLong();
    m_sum += val;
    writer.write(nada, mdvalue.of(val));
  }
  writer.close(m_tacontext);
  sess.close();
}
 
開發者ID:apache,項目名稱:mnemonic,代碼行數:22,代碼來源:MneMapreduceLongDataTest.java

示例14: testWritePersonData

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
@Test(enabled = true)
public void testWritePersonData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<Person<Long>> sess =
      new MneDurableOutputSession<Person<Long>>(m_tacontext, null,
          MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<Person<Long>> mdvalue =
      new MneDurableOutputValue<Person<Long>>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat =
      new MneOutputFormat<MneDurableOutputValue<Person<Long>>>();
  RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer =
      outputFormat.getRecordWriter(m_tacontext);
  Person<Long> person = null;
  for (int i = 0; i < m_reccnt; ++i) {
    person = sess.newDurableObjectRecord();
    person.setAge((short) m_rand.nextInt(50));
    person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
    m_sumage += person.getAge();
    writer.write(nada, mdvalue.of(person));
  }
  writer.close(m_tacontext);
  sess.close();
}
 
開發者ID:apache,項目名稱:mnemonic,代碼行數:24,代碼來源:MneMapreducePersonDataTest.java

示例15: getRecordWriter

import org.apache.hadoop.mapreduce.RecordWriter; //導入依賴的package包/類
@Override
@SuppressWarnings("unchecked")
public RecordWriter<K, T> getRecordWriter(TaskAttemptContext context)
    throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  Class<? extends DataStore<K,T>> dataStoreClass
    = (Class<? extends DataStore<K,T>>) conf.getClass(DATA_STORE_CLASS, null);
  Class<K> keyClass = (Class<K>) conf.getClass(OUTPUT_KEY_CLASS, null);
  Class<T> rowClass = (Class<T>) conf.getClass(OUTPUT_VALUE_CLASS, null);
  final DataStore<K, T> store =
    DataStoreFactory.createDataStore(dataStoreClass, keyClass, rowClass, context.getConfiguration());

  setOutputPath(store, context);

  return new GoraRecordWriter(store, context);
}
 
開發者ID:jianglibo,項目名稱:gora-boot,代碼行數:17,代碼來源:GoraOutputFormat.java


注:本文中的org.apache.hadoop.mapreduce.RecordWriter類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。