本文整理汇总了Java中org.apache.hadoop.io.DoubleWritable.set方法的典型用法代码示例。如果您正苦于以下问题:Java DoubleWritable.set方法的具体用法?Java DoubleWritable.set怎么用?Java DoubleWritable.set使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.DoubleWritable
的用法示例。
在下文中一共展示了DoubleWritable.set方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: write
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public void write(DataOutput dataOutput) throws IOException {
Text text = new Text(wifiProb==null?"":wifiProb);
text.write(dataOutput);
IntWritable intWritable = new IntWritable();
intWritable.set(inNoOutWifi);
intWritable.write(dataOutput);
intWritable.set(inNoOutStore);
intWritable.write(dataOutput);
intWritable.set(outNoInWifi);
intWritable.write(dataOutput);
intWritable.set(outNoInStore);
intWritable.write(dataOutput);
intWritable.set(inAndOutWifi);
intWritable.write(dataOutput);
intWritable.set(inAndOutStore);
intWritable.write(dataOutput);
intWritable.set(stayInWifi);
intWritable.write(dataOutput);
intWritable.set(stayInStore);
intWritable.write(dataOutput);
DoubleWritable doubleWritable = new DoubleWritable();
doubleWritable.set(jumpRate);
doubleWritable.write(dataOutput);
doubleWritable.set(deepVisit);
doubleWritable.write(dataOutput);
doubleWritable.set(inStoreRate);
doubleWritable.write(dataOutput);
}
示例2: create
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
DoubleWritable writable = new DoubleWritable();
writable.set(typeConverter.convertTo(Double.class, value));
return writable;
}
示例3: testReadDouble
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Test
public void testReadDouble() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, DoubleWritable.class);
NullWritable keyWritable = NullWritable.get();
DoubleWritable valueWritable = new DoubleWritable();
double value = 3.1415926535;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例4: testReadDouble
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Test
public void testReadDouble() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, DoubleWritable.class);
NullWritable keyWritable = NullWritable.get();
DoubleWritable valueWritable = new DoubleWritable();
double value = 3.1415926535;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例5: readNextKeyValuePairs
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public boolean readNextKeyValuePairs(DoubleWritable readKey, IntWritable readValue)
throws IOException
{
boolean ret = true;
try {
if(contain0s && justFound0)
{
readKey.set(keyAfterZero.get());
readValue.set(valueAfterZero.get());
contain0s=false;
}else
{
readKey.readFields(currentStream);
readValue.readFields(currentStream);
}
} catch(EOFException e) {
// case in which zero is the maximum value in the matrix.
// The zero value from the last entry is not present in the input sorted matrix, but needs to be accounted for.
if (contain0s && !justFound0 ) {
justFound0=true;
readKey.set(0);
readValue.set((int)numZeros);
}
else {
ret = false;
}
}
if(contain0s && !justFound0 && readKey.get()>=0)
{
justFound0=true;
keyAfterZero.set(readKey.get());
valueAfterZero.set(readValue.get());
readKey.set(0);
readValue.set((int)numZeros);
}
return ret;
}