當前位置: 首頁>>代碼示例>>Java>>正文


Java DoubleWritable.set方法代碼示例

本文整理匯總了Java中org.apache.hadoop.io.DoubleWritable.set方法的典型用法代碼示例。如果您正苦於以下問題:Java DoubleWritable.set方法的具體用法?Java DoubleWritable.set怎麽用?Java DoubleWritable.set使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.io.DoubleWritable的用法示例。


在下文中一共展示了DoubleWritable.set方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: write

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public void write(DataOutput dataOutput) throws IOException {
    Text text = new Text(wifiProb==null?"":wifiProb);
    text.write(dataOutput);

    IntWritable intWritable = new IntWritable();

    intWritable.set(inNoOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inNoOutStore);
    intWritable.write(dataOutput);

    intWritable.set(outNoInWifi);
    intWritable.write(dataOutput);
    intWritable.set(outNoInStore);
    intWritable.write(dataOutput);

    intWritable.set(inAndOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inAndOutStore);
    intWritable.write(dataOutput);

    intWritable.set(stayInWifi);
    intWritable.write(dataOutput);
    intWritable.set(stayInStore);
    intWritable.write(dataOutput);

    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.set(jumpRate);
    doubleWritable.write(dataOutput);
    doubleWritable.set(deepVisit);
    doubleWritable.write(dataOutput);
    doubleWritable.set(inStoreRate);
    doubleWritable.write(dataOutput);
}
 
開發者ID:cuiods,項目名稱:WIFIProbe,代碼行數:35,代碼來源:CustomerFlowElement.java

示例2: create

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
    size.value = SIZE;
    DoubleWritable writable = new DoubleWritable();
    writable.set(typeConverter.convertTo(Double.class, value));
    return writable;
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:8,代碼來源:HdfsWritableFactories.java

示例3: testReadDouble

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Test
public void testReadDouble() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, DoubleWritable.class);
    NullWritable keyWritable = NullWritable.get();
    DoubleWritable valueWritable = new DoubleWritable();
    double value = 3.1415926535;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:30,代碼來源:HdfsConsumerTest.java

示例4: testReadDouble

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Test
public void testReadDouble() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, DoubleWritable.class);
    NullWritable keyWritable = NullWritable.get();
    DoubleWritable valueWritable = new DoubleWritable();
    double value = 3.1415926535;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:31,代碼來源:HdfsConsumerTest.java

示例5: readNextKeyValuePairs

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public boolean readNextKeyValuePairs(DoubleWritable readKey, IntWritable readValue)
	throws IOException 
{
	boolean ret = true;
	
	try {
		if(contain0s && justFound0)
		{
			readKey.set(keyAfterZero.get());
			readValue.set(valueAfterZero.get());
			contain0s=false;
		}else
		{
			readKey.readFields(currentStream);
			readValue.readFields(currentStream);
		}
	} catch(EOFException e) {
		// case in which zero is the maximum value in the matrix. 
		// The zero value from the last entry is not present in the input sorted matrix, but needs to be accounted for.
		if (contain0s && !justFound0 ) {
			justFound0=true;
			readKey.set(0);
			readValue.set((int)numZeros);
		}
		else {
			ret = false;
		}
	}
	
	if(contain0s && !justFound0 && readKey.get()>=0)
	{
		justFound0=true;
		keyAfterZero.set(readKey.get());
		valueAfterZero.set(readValue.get());
		readKey.set(0);
		readValue.set((int)numZeros);
	}
	
	return ret;
}
 
開發者ID:apache,項目名稱:systemml,代碼行數:41,代碼來源:ReadWithZeros.java


注:本文中的org.apache.hadoop.io.DoubleWritable.set方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。