当前位置: 首页>>代码示例>>Java>>正文


Java WritableUtils.writeString方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.WritableUtils.writeString方法的典型用法代码示例。如果您正苦于以下问题:Java WritableUtils.writeString方法的具体用法?Java WritableUtils.writeString怎么用?Java WritableUtils.writeString使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.WritableUtils的用法示例。


在下文中一共展示了WritableUtils.writeString方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setupResponse

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
 * Setup response for the IPC Call.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param status {@link Status} of the IPC call
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponse(ByteArrayOutputStream response, 
                           Call call, Status status, 
                           Writable rv, String errorClass, String error) 
throws IOException {
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.id);                // write call id
  out.writeInt(status.state);           // write status

  if (status == Status.SUCCESS) {
    rv.write(out);
  } else {
    WritableUtils.writeString(out, errorClass);
    WritableUtils.writeString(out, error);
  }
  /*if (call.connection.useWrap) {
    wrapWithSasl(response, call);
  }*/
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
开发者ID:spafka,项目名称:spark_deep,代码行数:32,代码来源:Server.java

示例2: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
 * FileSystemGroup ::= #scheme (scheme #counter (key value)*)*
 */
@Override
public void write(DataOutput out) throws IOException {
  WritableUtils.writeVInt(out, map.size()); // #scheme
  for (Map.Entry<String, Object[]> entry : map.entrySet()) {
    WritableUtils.writeString(out, entry.getKey()); // scheme
    // #counter for the above scheme
    WritableUtils.writeVInt(out, numSetCounters(entry.getValue()));
    for (Object counter : entry.getValue()) {
      if (counter == null) continue;
      @SuppressWarnings("unchecked")
      FSCounter c = (FSCounter) ((Counter)counter).getUnderlyingCounter();
      WritableUtils.writeVInt(out, c.key.ordinal());  // key
      WritableUtils.writeVLong(out, c.getValue());    // value
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:FileSystemCounterGroup.java

示例3: setupResponseOldVersionFatal

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
 * Setup response for the IPC Call on Fatal Error from a 
 * client that is using old version of Hadoop.
 * The response is serialized using the previous protocol's response
 * layout.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponseOldVersionFatal(ByteArrayOutputStream response, 
                           Call call,
                           Writable rv, String errorClass, String error) 
throws IOException {
  final int OLD_VERSION_FATAL_STATUS = -1;
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.callId);                // write call id
  out.writeInt(OLD_VERSION_FATAL_STATUS);   // write FATAL_STATUS
  WritableUtils.writeString(out, errorClass);
  WritableUtils.writeString(out, error);

  if (call.connection.useWrap) {
    wrapWithSasl(response, call);
  }
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:31,代码来源:Server.java

示例4: setupResponseOldVersionFatal

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
 * Setup response for the IPC Call on Fatal Error from a 
 * client that is using old version of Hadoop.
 * The response is serialized using the previous protocol's response
 * layout.
 * 
 * @param response buffer to serialize the response into
 * @param call {@link Call} to which we are setting up the response
 * @param rv return value for the IPC Call, if the call was successful
 * @param errorClass error class, if the the call failed
 * @param error error message, if the call failed
 * @throws IOException
 */
private void setupResponseOldVersionFatal(ByteArrayOutputStream response, 
                           Call call,
                           Writable rv, String errorClass, String error) 
throws IOException {
  final int OLD_VERSION_FATAL_STATUS = -1;
  response.reset();
  DataOutputStream out = new DataOutputStream(response);
  out.writeInt(call.callId);                // write call id
  out.writeInt(OLD_VERSION_FATAL_STATUS);   // write FATAL_STATUS
  WritableUtils.writeString(out, errorClass);
  WritableUtils.writeString(out, error);
  call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:27,代码来源:Server.java

示例5: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void write(DataOutput out) throws IOException {
  taskId.write(out); 
  WritableUtils.writeVInt(out, idWithinJob);
  out.writeBoolean(isMap);
  WritableUtils.writeEnum(out, status); 
  WritableUtils.writeString(out, taskTrackerHttp);
  WritableUtils.writeVInt(out, taskRunTime);
  WritableUtils.writeVInt(out, eventId);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:TaskCompletionEvent.java

示例6: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
@Override
public void write(DataOutput out) throws IOException {
  WritableUtils.writeVLong(out, expiryDate);
  WritableUtils.writeVInt(out, keyId);
  WritableUtils.writeString(out, userId);
  WritableUtils.writeString(out, blockPoolId);
  WritableUtils.writeVLong(out, blockId);
  WritableUtils.writeVInt(out, modes.size());
  for (AccessMode aMode : modes) {
    WritableUtils.writeEnum(out, aMode);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:13,代码来源:BlockTokenIdentifier.java

示例7: writeWritable

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void writeWritable(Writable w) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  DataOutputStream dos = new DataOutputStream(baos);
  WritableUtils.writeString(dos, w.getClass().getName());
  w.write(dos);
  dos.close();
  out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:9,代码来源:TypedBytesWritableOutput.java

示例8: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
 * write index attributes
 *
 * @param out
 * @throws IOException
 */
@Override public void write(DataOutput out) throws IOException {
  WritableUtils.writeString(out, indexType.toString());
  if (IndexType.isUserDefinedIndex(indexType)) {
    WritableUtils.writeVInt(out, indexRelations.size());
    // write index relations
    for (IndexRelationship r : indexRelations) {
      r.write(out);
    }
  } else {
    // write index family map
    writeFamilyMap(indexFamilyMap, out);
    writeFamilyMap(familyMap, out);
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:21,代码来源:IndexTableRelation.java

示例9: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void write(DataOutput out) throws IOException {
  WritableUtils.writeString(out, file.toString());
  WritableUtils.writeVLong(out, len);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:5,代码来源:GenericMRLoadGenerator.java

示例10: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void write(DataOutput out) throws IOException {
  id.write(out);
  user.write(out);
  WritableUtils.writeString(out, jobSubmitDir.toString());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:6,代码来源:JobInfo.java

示例11: write

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
@Override public void write(DataOutput out) throws IOException {
  WritableUtils.writeCompressedByteArray(out, family);
  WritableUtils.writeCompressedByteArray(out, qualifier);
  WritableUtils.writeString(out, dataType.toString());
  WritableUtils.writeVInt(out, isIndex ? 1 : 0);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:7,代码来源:ColumnInfo.java

示例12: writeString

import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
 * Writes a string as a typed bytes sequence.
 * 
 * @param s the string to be written
 * @throws IOException
 */
public void writeString(String s) throws IOException {
  out.write(Type.STRING.code);
  WritableUtils.writeString(out, s);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:11,代码来源:TypedBytesOutput.java


注:本文中的org.apache.hadoop.io.WritableUtils.writeString方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。