本文整理汇总了Java中org.apache.hadoop.io.WritableUtils.writeString方法的典型用法代码示例。如果您正苦于以下问题:Java WritableUtils.writeString方法的具体用法?Java WritableUtils.writeString怎么用?Java WritableUtils.writeString使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.WritableUtils
的用法示例。
在下文中一共展示了WritableUtils.writeString方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setupResponse
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
* Setup response for the IPC Call.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param status {@link Status} of the IPC call
* @param rv return value for the IPC Call, if the call was successful
* @param errorClass error class, if the the call failed
* @param error error message, if the call failed
* @throws IOException
*/
private void setupResponse(ByteArrayOutputStream response,
Call call, Status status,
Writable rv, String errorClass, String error)
throws IOException {
response.reset();
DataOutputStream out = new DataOutputStream(response);
out.writeInt(call.id); // write call id
out.writeInt(status.state); // write status
if (status == Status.SUCCESS) {
rv.write(out);
} else {
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
}
/*if (call.connection.useWrap) {
wrapWithSasl(response, call);
}*/
call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
示例2: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
* FileSystemGroup ::= #scheme (scheme #counter (key value)*)*
*/
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, map.size()); // #scheme
for (Map.Entry<String, Object[]> entry : map.entrySet()) {
WritableUtils.writeString(out, entry.getKey()); // scheme
// #counter for the above scheme
WritableUtils.writeVInt(out, numSetCounters(entry.getValue()));
for (Object counter : entry.getValue()) {
if (counter == null) continue;
@SuppressWarnings("unchecked")
FSCounter c = (FSCounter) ((Counter)counter).getUnderlyingCounter();
WritableUtils.writeVInt(out, c.key.ordinal()); // key
WritableUtils.writeVLong(out, c.getValue()); // value
}
}
}
示例3: setupResponseOldVersionFatal
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
* Setup response for the IPC Call on Fatal Error from a
* client that is using old version of Hadoop.
* The response is serialized using the previous protocol's response
* layout.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param rv return value for the IPC Call, if the call was successful
* @param errorClass error class, if the the call failed
* @param error error message, if the call failed
* @throws IOException
*/
private void setupResponseOldVersionFatal(ByteArrayOutputStream response,
Call call,
Writable rv, String errorClass, String error)
throws IOException {
final int OLD_VERSION_FATAL_STATUS = -1;
response.reset();
DataOutputStream out = new DataOutputStream(response);
out.writeInt(call.callId); // write call id
out.writeInt(OLD_VERSION_FATAL_STATUS); // write FATAL_STATUS
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
if (call.connection.useWrap) {
wrapWithSasl(response, call);
}
call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
示例4: setupResponseOldVersionFatal
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
* Setup response for the IPC Call on Fatal Error from a
* client that is using old version of Hadoop.
* The response is serialized using the previous protocol's response
* layout.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param rv return value for the IPC Call, if the call was successful
* @param errorClass error class, if the the call failed
* @param error error message, if the call failed
* @throws IOException
*/
private void setupResponseOldVersionFatal(ByteArrayOutputStream response,
Call call,
Writable rv, String errorClass, String error)
throws IOException {
final int OLD_VERSION_FATAL_STATUS = -1;
response.reset();
DataOutputStream out = new DataOutputStream(response);
out.writeInt(call.callId); // write call id
out.writeInt(OLD_VERSION_FATAL_STATUS); // write FATAL_STATUS
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
示例5: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void write(DataOutput out) throws IOException {
taskId.write(out);
WritableUtils.writeVInt(out, idWithinJob);
out.writeBoolean(isMap);
WritableUtils.writeEnum(out, status);
WritableUtils.writeString(out, taskTrackerHttp);
WritableUtils.writeVInt(out, taskRunTime);
WritableUtils.writeVInt(out, eventId);
}
示例6: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVLong(out, expiryDate);
WritableUtils.writeVInt(out, keyId);
WritableUtils.writeString(out, userId);
WritableUtils.writeString(out, blockPoolId);
WritableUtils.writeVLong(out, blockId);
WritableUtils.writeVInt(out, modes.size());
for (AccessMode aMode : modes) {
WritableUtils.writeEnum(out, aMode);
}
}
示例7: writeWritable
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void writeWritable(Writable w) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
WritableUtils.writeString(dos, w.getClass().getName());
w.write(dos);
dos.close();
out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
}
示例8: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
* write index attributes
*
* @param out
* @throws IOException
*/
@Override public void write(DataOutput out) throws IOException {
WritableUtils.writeString(out, indexType.toString());
if (IndexType.isUserDefinedIndex(indexType)) {
WritableUtils.writeVInt(out, indexRelations.size());
// write index relations
for (IndexRelationship r : indexRelations) {
r.write(out);
}
} else {
// write index family map
writeFamilyMap(indexFamilyMap, out);
writeFamilyMap(familyMap, out);
}
}
示例9: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void write(DataOutput out) throws IOException {
WritableUtils.writeString(out, file.toString());
WritableUtils.writeVLong(out, len);
}
示例10: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
public void write(DataOutput out) throws IOException {
id.write(out);
user.write(out);
WritableUtils.writeString(out, jobSubmitDir.toString());
}
示例11: write
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
@Override public void write(DataOutput out) throws IOException {
WritableUtils.writeCompressedByteArray(out, family);
WritableUtils.writeCompressedByteArray(out, qualifier);
WritableUtils.writeString(out, dataType.toString());
WritableUtils.writeVInt(out, isIndex ? 1 : 0);
}
示例12: writeString
import org.apache.hadoop.io.WritableUtils; //导入方法依赖的package包/类
/**
* Writes a string as a typed bytes sequence.
*
* @param s the string to be written
* @throws IOException
*/
public void writeString(String s) throws IOException {
out.write(Type.STRING.code);
WritableUtils.writeString(out, s);
}