本文整理汇总了Java中org.apache.hadoop.io.Text.readString方法的典型用法代码示例。如果您正苦于以下问题:Java Text.readString方法的具体用法?Java Text.readString怎么用?Java Text.readString使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.Text
的用法示例。
在下文中一共展示了Text.readString方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN);
this.path = new Path(strPath);
this.length = in.readLong();
this.isdir = in.readBoolean();
this.block_replication = in.readShort();
blocksize = in.readLong();
modification_time = in.readLong();
access_time = in.readLong();
permission.readFields(in);
owner = Text.readString(in, Text.DEFAULT_MAX_LEN);
group = Text.readString(in, Text.DEFAULT_MAX_LEN);
if (in.readBoolean()) {
this.symlink = new Path(Text.readString(in, Text.DEFAULT_MAX_LEN));
} else {
this.symlink = null;
}
}
示例2: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput in) throws IOException {
totLength = in.readLong();
int arrLength = in.readInt();
lengths = new long[arrLength];
for(int i=0; i<arrLength;i++) {
lengths[i] = in.readLong();
}
int filesLength = in.readInt();
paths = new Path[filesLength];
for(int i=0; i<filesLength;i++) {
paths[i] = new Path(Text.readString(in));
}
arrLength = in.readInt();
startoffset = new long[arrLength];
for(int i=0; i<arrLength;i++) {
startoffset[i] = in.readLong();
}
}
示例3: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
/**
* {@inheritDoc}
*/
public void readFields(DataInput in) throws IOException {
this.isNew = in.readBoolean();
String className = Text.readString(in);
if (null == this.sqoopRecord) {
// If we haven't already instantiated an inner SqoopRecord, do so here.
try {
Class<? extends SqoopRecord> recordClass =
(Class<? extends SqoopRecord>) config.getClassByName(className);
this.sqoopRecord = recordClass.newInstance();
} catch (Exception e) {
throw new IOException(e);
}
}
this.sqoopRecord.readFields(in);
}
示例4: readClass
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/** Reads and returns the class as written by {@link #writeClass(DataOutput, Class)} */
static Class<?> readClass(Configuration conf, DataInput in) throws IOException {
Class<?> instanceClass = null;
int b = (byte)WritableUtils.readVInt(in);
if (b == NOT_ENCODED) {
String className = Text.readString(in);
try {
instanceClass = getClassByName(conf, className);
} catch (ClassNotFoundException e) {
LOG.error("Can't find class " + className, e);
throw new IOException("Can't find class " + className, e);
}
} else {
instanceClass = CODE_TO_CLASS.get(b);
}
return instanceClass;
}
示例5: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
protocol = Text.readString(in);
if (protocol.isEmpty()) {
protocol = null;
}
}
示例6: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
// The serialization format for this object is:
// boolean isExternal
// if true, then:
// a string identifying the external storage type
// and external-storage-specific data.
// if false, then we use readFieldsInternal() to allow BlobRef/ClobRef
// to serialize as it sees fit.
//
// Currently the only external storage supported is LobFile, identified
// by the string "lf". This serializes with the filename (as a string),
// followed by a long-valued offset and a long-valued length.
boolean isExternal = in.readBoolean();
if (isExternal) {
this.realData = null;
String storageType = Text.readString(in);
if (!storageType.equals("lf")) {
throw new IOException("Unsupported external LOB storage code: "
+ storageType);
}
// Storage type "lf" is LobFile: filename, offset, length.
this.fileName = Text.readString(in);
this.offset = in.readLong();
this.length = in.readLong();
} else {
readFieldsInternal(in);
this.fileName = null;
this.offset = 0;
this.length = 0;
}
}
示例7: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
this.src = new Path(Text.readString(in));
owner = DistTool.readString(in);
group = DistTool.readString(in);
permission = in.readBoolean()? FsPermission.read(in): null;
}
示例8: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
name = Text.readString(in);
isBlacklisted = in.readBoolean();
reasonForBlacklist = Text.readString(in);
blacklistReport = Text.readString(in);
}
示例9: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
file = new Path(Text.readString(in));
start = in.readLong();
length = in.readLong();
hosts = null;
}
示例10: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Deserializes the AccessControlList object
*/
@Override
public void readFields(DataInput in) throws IOException {
String aclString = Text.readString(in);
buildACL(aclString.split(" ", 2));
}
示例11: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
/** {@inheritDoc} */
public void readFields(DataInput input) throws IOException {
this.lowerBoundClause = Text.readString(input);
this.upperBoundClause = Text.readString(input);
}
示例12: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput in) throws IOException {
from = in.readInt();
length = in.readLong();
hosts = new String[in.readInt()];
for (int i = 0; i < hosts.length; i++) hosts[i] = Text.readString(in);
}
示例13: binaryProtocolStub
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void binaryProtocolStub() {
try {
initSoket();
System.out.println("start OK");
// RUN_MAP.code
// should be 3
int answer = WritableUtils.readVInt(dataInput);
System.out.println("RunMap:" + answer);
TestPipeApplication.FakeSplit split = new TestPipeApplication.FakeSplit();
readObject(split, dataInput);
WritableUtils.readVInt(dataInput);
WritableUtils.readVInt(dataInput);
// end runMap
// get InputTypes
WritableUtils.readVInt(dataInput);
String inText = Text.readString(dataInput);
System.out.println("Key class:" + inText);
inText = Text.readString(dataInput);
System.out.println("Value class:" + inText);
@SuppressWarnings("unused")
int inCode = 0;
// read all data from sender and write to output
while ((inCode = WritableUtils.readVInt(dataInput)) == 4) {
FloatWritable key = new FloatWritable();
NullWritable value = NullWritable.get();
readObject(key, dataInput);
System.out.println("value:" + key.get());
readObject(value, dataInput);
}
WritableUtils.writeVInt(dataOut, 54);
dataOut.flush();
dataOut.close();
} catch (Exception x) {
x.printStackTrace();
} finally {
closeSoket();
}
}
示例14: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput in) throws IOException {
jvmId.readFields(in);
this.pid = Text.readString(in);
}
示例15: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/** {@inheritDoc} */
public void readFields(DataInput input) throws IOException {
this.lowerBoundClause = Text.readString(input);
this.upperBoundClause = Text.readString(input);
}