本文整理汇总了Java中org.apache.hadoop.io.Text.readFields方法的典型用法代码示例。如果您正苦于以下问题:Java Text.readFields方法的具体用法?Java Text.readFields怎么用?Java Text.readFields使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.Text
的用法示例。
在下文中一共展示了Text.readFields方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Read (say, deserialize) an employee
*/
@Override
public void readFields(DataInput in) throws IOException {
name = new Text();
name.readFields(in);
address = new Text();
address.readFields(in);
company = new Text();
company.readFields(in);
salary = new DoubleWritable();
salary.readFields(in);
department = new Text();
department.readFields(in);
isManager = new BooleanWritable();
isManager.readFields(in);
}
示例2: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput dataInput) throws IOException {
Text text = new Text();
text.readFields(dataInput);
Logger.println("value wrapper read class:"+text.toString());
String className = text.toString();
if (className.equals(IntWritable.class.getSimpleName())) {
value = new IntWritable();
} else if (className.equals(NewOldCustomElement.class.getSimpleName())) {
value = new NewOldCustomElement();
} else if (className.equals(CustomerFlowElement.class.getSimpleName())) {
value = new CustomerFlowElement();
} else {
throw new IOException("can not read fields "+className);
}
value.readFields(dataInput);
}
示例3: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput dataInput) throws IOException {
Text text = new Text();
text.readFields(dataInput);
wifiProb = text.toString();
LongWritable reader = new LongWritable();
reader.readFields(dataInput);
hour = reader.get();
reader.readFields(dataInput);
newCustomer = (int)reader.get();
reader.readFields(dataInput);
oldCustomer = (int)reader.get();
}
示例4: readOnDiskMapOutput
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
List<String> keys, List<String> values) throws IOException {
FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));
IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
fs.getFileStatus(path).getLen(), null, null);
DataInputBuffer keyBuff = new DataInputBuffer();
DataInputBuffer valueBuff = new DataInputBuffer();
Text key = new Text();
Text value = new Text();
while (reader.nextRawKey(keyBuff)) {
key.readFields(keyBuff);
keys.add(key.toString());
reader.nextRawValue(valueBuff);
value.readFields(valueBuff);
values.add(value.toString());
}
}
示例5: getSorted
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public String[] getSorted() throws IOException {
String[] ret = new String[indices.length];
Text t = new Text();
DataInputBuffer dib = new DataInputBuffer();
for (int i = 0; i < ret.length; ++i) {
int ii = indices[i];
dib.reset(bytes, offsets[ii],
((ii + 1 == indices.length) ? eob : offsets[ii + 1]) - offsets[ii]);
t.readFields(dib);
ret[i] = t.toString();
}
return ret;
}
示例6: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Read (say, deserialize) a company
*/
@Override
public void readFields(DataInput in) throws IOException {
name = new Text();
name.readFields(in);
}
示例7: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Read (say, deserialize) a department
*/
@Override
public void readFields(DataInput in) throws IOException {
name = new Text();
name.readFields(in);
superDept = new Text();
superDept.readFields(in);
company = new Text();
company.readFields(in);
}
示例8: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput dataInput) throws IOException {
Text text = new Text();
text.readFields(dataInput);
wifiProb = text.toString();
IntWritable intReader = new IntWritable();
intReader.readFields(dataInput);
inNoOutWifi = intReader.get();
intReader.readFields(dataInput);
inNoOutStore = intReader.get();
intReader.readFields(dataInput);
outNoInWifi = intReader.get();
intReader.readFields(dataInput);
outNoInStore = intReader.get();
intReader.readFields(dataInput);
inAndOutWifi = intReader.get();
intReader.readFields(dataInput);
inAndOutStore = intReader.get();
intReader.readFields(dataInput);
stayInWifi = intReader.get();
intReader.readFields(dataInput);
stayInStore = intReader.get();
DoubleWritable doubleWritable = new DoubleWritable();
doubleWritable.readFields(dataInput);
jumpRate = doubleWritable.get();
doubleWritable.readFields(dataInput);
deepVisit = doubleWritable.get();
doubleWritable.readFields(dataInput);
inStoreRate = doubleWritable.get();
}
示例9: readFields
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void readFields(DataInput in) throws IOException {
id = new org.apache.hadoop.mapreduce.JobID();
id.readFields(in);
user = new Text();
user.readFields(in);
jobSubmitDir = new Path(WritableUtils.readString(in));
}
示例10: compare
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
if (sortSpec == null) {
return super.compare(b1, s1, l1, b2, s2, l2);
}
try {
Text logline1 = new Text();
logline1.readFields(new DataInputStream(new ByteArrayInputStream(b1, s1, l1)));
String line1 = logline1.toString();
String[] logColumns1 = line1.split(columnSeparator);
Text logline2 = new Text();
logline2.readFields(new DataInputStream(new ByteArrayInputStream(b2, s2, l2)));
String line2 = logline2.toString();
String[] logColumns2 = line2.split(columnSeparator);
if (logColumns1 == null || logColumns2 == null) {
return super.compare(b1, s1, l1, b2, s2, l2);
}
//Compare column-wise according to *sortSpec*
for(int i=0; i < sortSpec.length; ++i) {
int column = Integer.parseInt(sortSpec[i]);
String c1 = logColumns1[column];
String c2 = logColumns2[column];
//Compare columns
int comparision = super.compareBytes(
c1.getBytes(Charset.forName("UTF-8")), 0, c1.length(),
c2.getBytes(Charset.forName("UTF-8")), 0, c2.length()
);
//They differ!
if (comparision != 0) {
return comparision;
}
}
} catch (IOException ioe) {
LOG.fatal("Caught " + ioe);
return 0;
}
return 0;
}