本文整理匯總了Java中org.apache.hadoop.io.WritableUtils.writeString方法的典型用法代碼示例。如果您正苦於以下問題:Java WritableUtils.writeString方法的具體用法?Java WritableUtils.writeString怎麽用?Java WritableUtils.writeString使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.io.WritableUtils
的用法示例。
在下文中一共展示了WritableUtils.writeString方法的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: setupResponse
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Setup response for the IPC Call.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param status {@link Status} of the IPC call
* @param rv return value for the IPC Call, if the call was successful
* @param errorClass error class, if the the call failed
* @param error error message, if the call failed
* @throws IOException
*/
private void setupResponse(ByteArrayOutputStream response,
Call call, Status status,
Writable rv, String errorClass, String error)
throws IOException {
response.reset();
DataOutputStream out = new DataOutputStream(response);
out.writeInt(call.id); // write call id
out.writeInt(status.state); // write status
if (status == Status.SUCCESS) {
rv.write(out);
} else {
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
}
/*if (call.connection.useWrap) {
wrapWithSasl(response, call);
}*/
call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
示例2: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* FileSystemGroup ::= #scheme (scheme #counter (key value)*)*
*/
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, map.size()); // #scheme
for (Map.Entry<String, Object[]> entry : map.entrySet()) {
WritableUtils.writeString(out, entry.getKey()); // scheme
// #counter for the above scheme
WritableUtils.writeVInt(out, numSetCounters(entry.getValue()));
for (Object counter : entry.getValue()) {
if (counter == null) continue;
@SuppressWarnings("unchecked")
FSCounter c = (FSCounter) ((Counter)counter).getUnderlyingCounter();
WritableUtils.writeVInt(out, c.key.ordinal()); // key
WritableUtils.writeVLong(out, c.getValue()); // value
}
}
}
示例3: setupResponseOldVersionFatal
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Setup response for the IPC Call on Fatal Error from a
* client that is using old version of Hadoop.
* The response is serialized using the previous protocol's response
* layout.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param rv return value for the IPC Call, if the call was successful
* @param errorClass error class, if the the call failed
* @param error error message, if the call failed
* @throws IOException
*/
private void setupResponseOldVersionFatal(ByteArrayOutputStream response,
Call call,
Writable rv, String errorClass, String error)
throws IOException {
final int OLD_VERSION_FATAL_STATUS = -1;
response.reset();
DataOutputStream out = new DataOutputStream(response);
out.writeInt(call.callId); // write call id
out.writeInt(OLD_VERSION_FATAL_STATUS); // write FATAL_STATUS
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
if (call.connection.useWrap) {
wrapWithSasl(response, call);
}
call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
示例4: setupResponseOldVersionFatal
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Setup response for the IPC Call on Fatal Error from a
* client that is using old version of Hadoop.
* The response is serialized using the previous protocol's response
* layout.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param rv return value for the IPC Call, if the call was successful
* @param errorClass error class, if the the call failed
* @param error error message, if the call failed
* @throws IOException
*/
private void setupResponseOldVersionFatal(ByteArrayOutputStream response,
Call call,
Writable rv, String errorClass, String error)
throws IOException {
final int OLD_VERSION_FATAL_STATUS = -1;
response.reset();
DataOutputStream out = new DataOutputStream(response);
out.writeInt(call.callId); // write call id
out.writeInt(OLD_VERSION_FATAL_STATUS); // write FATAL_STATUS
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}
示例5: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void write(DataOutput out) throws IOException {
taskId.write(out);
WritableUtils.writeVInt(out, idWithinJob);
out.writeBoolean(isMap);
WritableUtils.writeEnum(out, status);
WritableUtils.writeString(out, taskTrackerHttp);
WritableUtils.writeVInt(out, taskRunTime);
WritableUtils.writeVInt(out, eventId);
}
示例6: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVLong(out, expiryDate);
WritableUtils.writeVInt(out, keyId);
WritableUtils.writeString(out, userId);
WritableUtils.writeString(out, blockPoolId);
WritableUtils.writeVLong(out, blockId);
WritableUtils.writeVInt(out, modes.size());
for (AccessMode aMode : modes) {
WritableUtils.writeEnum(out, aMode);
}
}
示例7: writeWritable
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void writeWritable(Writable w) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
WritableUtils.writeString(dos, w.getClass().getName());
w.write(dos);
dos.close();
out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
}
示例8: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* write index attributes
*
* @param out
* @throws IOException
*/
@Override public void write(DataOutput out) throws IOException {
WritableUtils.writeString(out, indexType.toString());
if (IndexType.isUserDefinedIndex(indexType)) {
WritableUtils.writeVInt(out, indexRelations.size());
// write index relations
for (IndexRelationship r : indexRelations) {
r.write(out);
}
} else {
// write index family map
writeFamilyMap(indexFamilyMap, out);
writeFamilyMap(familyMap, out);
}
}
示例9: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void write(DataOutput out) throws IOException {
WritableUtils.writeString(out, file.toString());
WritableUtils.writeVLong(out, len);
}
示例10: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public void write(DataOutput out) throws IOException {
id.write(out);
user.write(out);
WritableUtils.writeString(out, jobSubmitDir.toString());
}
示例11: write
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
@Override public void write(DataOutput out) throws IOException {
WritableUtils.writeCompressedByteArray(out, family);
WritableUtils.writeCompressedByteArray(out, qualifier);
WritableUtils.writeString(out, dataType.toString());
WritableUtils.writeVInt(out, isIndex ? 1 : 0);
}
示例12: writeString
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Writes a string as a typed bytes sequence.
*
* @param s the string to be written
* @throws IOException
*/
public void writeString(String s) throws IOException {
out.write(Type.STRING.code);
WritableUtils.writeString(out, s);
}