當前位置: 首頁>>代碼示例>>Java>>正文


Java WritableUtils.writeString方法代碼示例

本文整理匯總了Java中org.apache.hadoop.io.WritableUtils.writeString方法的典型用法代碼示例。如果您正苦於以下問題:Java WritableUtils.writeString方法的具體用法?Java WritableUtils.writeString怎麽用?Java WritableUtils.writeString使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.io.WritableUtils的用法示例。


在下文中一共展示了WritableUtils.writeString方法的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: setupResponse

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
 * Setup response for the IPC Call.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param status {@link Status} of the IPC call
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, 
                           Call call, Status status, 
                           Writable rv, String errorClass, String error) 
throws IOException {
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.id);                // write call id
  out.writeInt(status.state);           // write status

  if (status == Status.SUCCESS) {
    rv.write(out);
  } else {
    WritableUtils.writeString(out, errorClass);
    WritableUtils.writeString(out, error);
  }
  /*if (call.connection.useWrap) {
    wrapWithSasl(response, call);
  }*/
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
開發者ID:spafka,項目名稱:spark_deep,代碼行數:32,代碼來源:Server.java

示例2: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
 * FileSystemGroup ::= #scheme (scheme #counter (key value)*)*
 */
@Override
public void write(DataOutput out) throws IOException {
  WritableUtils.writeVInt(out, map.size()); // #scheme
  for (Map.Entry<String, Object[]> entry : map.entrySet()) {
    WritableUtils.writeString(out, entry.getKey()); // scheme
    // #counter for the above scheme
    WritableUtils.writeVInt(out, numSetCounters(entry.getValue()));
    for (Object counter : entry.getValue()) {
      if (counter == null) continue;
      @SuppressWarnings("unchecked")
      FSCounter c = (FSCounter) ((Counter)counter).getUnderlyingCounter();
      WritableUtils.writeVInt(out, c.key.ordinal());  // key
      WritableUtils.writeVLong(out, c.getValue());    // value
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:20,代碼來源:FileSystemCounterGroup.java

示例3: setupResponseOldVersionFatal

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
 * Setup response for the IPC Call on Fatal Error from a 
 * client that is using old version of Hadoop.
 * The response is serialized using the previous protocol's response
 * layout.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponseOldVersionFatal(ByteArrayOutputStream response, 
                           Call call,
                           Writable rv, String errorClass, String error) 
throws IOException {
  final int OLD_VERSION_FATAL_STATUS = -1;
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.callId);                // write call id
  out.writeInt(OLD_VERSION_FATAL_STATUS);   // write FATAL_STATUS
  WritableUtils.writeString(out, errorClass);
  WritableUtils.writeString(out, error);

  if (call.connection.useWrap) {
    wrapWithSasl(response, call);
  }
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:31,代碼來源:Server.java

示例4: setupResponseOldVersionFatal

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
 * Setup response for the IPC Call on Fatal Error from a 
 * client that is using old version of Hadoop.
 * The response is serialized using the previous protocol's response
 * layout.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponseOldVersionFatal(ByteArrayOutputStream response, 
                           Call call,
                           Writable rv, String errorClass, String error) 
throws IOException {
  final int OLD_VERSION_FATAL_STATUS = -1;
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.callId);                // write call id
  out.writeInt(OLD_VERSION_FATAL_STATUS);   // write FATAL_STATUS
  WritableUtils.writeString(out, errorClass);
  WritableUtils.writeString(out, error);
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:27,代碼來源:Server.java

示例5: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void write(DataOutput out) throws IOException {
  taskId.write(out); 
  WritableUtils.writeVInt(out, idWithinJob);
  out.writeBoolean(isMap);
  WritableUtils.writeEnum(out, status); 
  WritableUtils.writeString(out, taskTrackerHttp);
  WritableUtils.writeVInt(out, taskRunTime);
  WritableUtils.writeVInt(out, eventId);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:10,代碼來源:TaskCompletionEvent.java

示例6: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
@Override
public void write(DataOutput out) throws IOException {
  WritableUtils.writeVLong(out, expiryDate);
  WritableUtils.writeVInt(out, keyId);
  WritableUtils.writeString(out, userId);
  WritableUtils.writeString(out, blockPoolId);
  WritableUtils.writeVLong(out, blockId);
  WritableUtils.writeVInt(out, modes.size());
  for (AccessMode aMode : modes) {
    WritableUtils.writeEnum(out, aMode);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:13,代碼來源:BlockTokenIdentifier.java

示例7: writeWritable

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void writeWritable(Writable w) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  DataOutputStream dos = new DataOutputStream(baos);
  WritableUtils.writeString(dos, w.getClass().getName());
  w.write(dos);
  dos.close();
  out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:9,代碼來源:TypedBytesWritableOutput.java

示例8: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
 * write index attributes
 *
 * @param out
 * @throws IOException
 */
@Override public void write(DataOutput out) throws IOException {
  WritableUtils.writeString(out, indexType.toString());
  if (IndexType.isUserDefinedIndex(indexType)) {
    WritableUtils.writeVInt(out, indexRelations.size());
    // write index relations
    for (IndexRelationship r : indexRelations) {
      r.write(out);
    }
  } else {
    // write index family map
    writeFamilyMap(indexFamilyMap, out);
    writeFamilyMap(familyMap, out);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:21,代碼來源:IndexTableRelation.java

示例9: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void write(DataOutput out) throws IOException {
  WritableUtils.writeString(out, file.toString());
  WritableUtils.writeVLong(out, len);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:5,代碼來源:GenericMRLoadGenerator.java

示例10: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void write(DataOutput out) throws IOException {
  id.write(out);
  user.write(out);
  WritableUtils.writeString(out, jobSubmitDir.toString());
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:6,代碼來源:JobInfo.java

示例11: write

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
@Override public void write(DataOutput out) throws IOException {
  WritableUtils.writeCompressedByteArray(out, family);
  WritableUtils.writeCompressedByteArray(out, qualifier);
  WritableUtils.writeString(out, dataType.toString());
  WritableUtils.writeVInt(out, isIndex ? 1 : 0);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:7,代碼來源:ColumnInfo.java

示例12: writeString

import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
 * Writes a string as a typed bytes sequence.
 * 
 * @param s the string to be written
 * @throws IOException
 */
public void writeString(String s) throws IOException {
  out.write(Type.STRING.code);
  WritableUtils.writeString(out, s);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:11,代碼來源:TypedBytesOutput.java


注:本文中的org.apache.hadoop.io.WritableUtils.writeString方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。