当前位置: 首页>>代码示例>>Java>>正文


Java DoubleWritable.set方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.DoubleWritable.set方法的典型用法代码示例。如果您正苦于以下问题:Java DoubleWritable.set方法的具体用法?Java DoubleWritable.set怎么用?Java DoubleWritable.set使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.DoubleWritable的用法示例。


在下文中一共展示了DoubleWritable.set方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: write

import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public void write(DataOutput dataOutput) throws IOException {
    Text text = new Text(wifiProb==null?"":wifiProb);
    text.write(dataOutput);

    IntWritable intWritable = new IntWritable();

    intWritable.set(inNoOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inNoOutStore);
    intWritable.write(dataOutput);

    intWritable.set(outNoInWifi);
    intWritable.write(dataOutput);
    intWritable.set(outNoInStore);
    intWritable.write(dataOutput);

    intWritable.set(inAndOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inAndOutStore);
    intWritable.write(dataOutput);

    intWritable.set(stayInWifi);
    intWritable.write(dataOutput);
    intWritable.set(stayInStore);
    intWritable.write(dataOutput);

    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.set(jumpRate);
    doubleWritable.write(dataOutput);
    doubleWritable.set(deepVisit);
    doubleWritable.write(dataOutput);
    doubleWritable.set(inStoreRate);
    doubleWritable.write(dataOutput);
}
 
开发者ID:cuiods,项目名称:WIFIProbe,代码行数:35,代码来源:CustomerFlowElement.java

示例2: create

import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
    size.value = SIZE;
    DoubleWritable writable = new DoubleWritable();
    writable.set(typeConverter.convertTo(Double.class, value));
    return writable;
}
 
开发者ID:HydAu,项目名称:Camel,代码行数:8,代码来源:HdfsWritableFactories.java

示例3: testReadDouble

import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Test
public void testReadDouble() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, DoubleWritable.class);
    NullWritable keyWritable = NullWritable.get();
    DoubleWritable valueWritable = new DoubleWritable();
    double value = 3.1415926535;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
开发者ID:HydAu,项目名称:Camel,代码行数:30,代码来源:HdfsConsumerTest.java

示例4: testReadDouble

import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Test
public void testReadDouble() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, DoubleWritable.class);
    NullWritable keyWritable = NullWritable.get();
    DoubleWritable valueWritable = new DoubleWritable();
    double value = 3.1415926535;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
开发者ID:HydAu,项目名称:Camel,代码行数:31,代码来源:HdfsConsumerTest.java

示例5: readNextKeyValuePairs

import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public boolean readNextKeyValuePairs(DoubleWritable readKey, IntWritable readValue)
	throws IOException 
{
	boolean ret = true;
	
	try {
		if(contain0s && justFound0)
		{
			readKey.set(keyAfterZero.get());
			readValue.set(valueAfterZero.get());
			contain0s=false;
		}else
		{
			readKey.readFields(currentStream);
			readValue.readFields(currentStream);
		}
	} catch(EOFException e) {
		// case in which zero is the maximum value in the matrix. 
		// The zero value from the last entry is not present in the input sorted matrix, but needs to be accounted for.
		if (contain0s && !justFound0 ) {
			justFound0=true;
			readKey.set(0);
			readValue.set((int)numZeros);
		}
		else {
			ret = false;
		}
	}
	
	if(contain0s && !justFound0 && readKey.get()>=0)
	{
		justFound0=true;
		keyAfterZero.set(readKey.get());
		valueAfterZero.set(readValue.get());
		readKey.set(0);
		readValue.set((int)numZeros);
	}
	
	return ret;
}
 
开发者ID:apache,项目名称:systemml,代码行数:41,代码来源:ReadWithZeros.java


注:本文中的org.apache.hadoop.io.DoubleWritable.set方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。