當前位置: 首頁>>代碼示例>>Java>>正文


Java FloatWritable.set方法代碼示例

本文整理匯總了Java中org.apache.hadoop.io.FloatWritable.set方法的典型用法代碼示例。如果您正苦於以下問題:Java FloatWritable.set方法的具體用法?Java FloatWritable.set怎麽用?Java FloatWritable.set使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.io.FloatWritable的用法示例。


在下文中一共展示了FloatWritable.set方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: forwardModel

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
protected void forwardModel() throws HiveException {
    final IntWritable topicIdx = new IntWritable();
    final Text word = new Text();
    final FloatWritable score = new FloatWritable();

    final Object[] forwardObjs = new Object[3];
    forwardObjs[0] = topicIdx;
    forwardObjs[1] = word;
    forwardObjs[2] = score;

    for (int k = 0; k < topics; k++) {
        topicIdx.set(k);

        final SortedMap<Float, List<String>> topicWords = model.getTopicWords(k);
        for (Map.Entry<Float, List<String>> e : topicWords.entrySet()) {
            score.set(e.getKey());
            List<String> words = e.getValue();
            for (int i = 0; i < words.size(); i++) {
                word.set(words.get(i));
                forward(forwardObjs);
            }
        }
    }

    logger.info("Forwarded topic words each of " + topics + " topics");
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:27,代碼來源:ProbabilisticTopicModelBaseUDTF.java

示例2: reduce

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
public void reduce(FloatWritable key, Iterator<Text> values,
    OutputCollector<FloatWritable, Text> output, Reporter reporter)
    throws IOException {
  while (values.hasNext() && count < topN) {
    key.set(-key.get());
    output.collect(key, values.next());
    count++;
  }
}
 
開發者ID:jorcox,項目名稱:GeoCrawler,代碼行數:10,代碼來源:CrawlDbReader.java

示例3: forwardAsIntFeature

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
private void forwardAsIntFeature(@Nonnull final FactorizationMachineModel model,
        final int factors) throws HiveException {
    final IntWritable f_idx = new IntWritable(0);
    final FloatWritable f_Wi = new FloatWritable(0.f);
    final FloatWritable[] f_Vi = HiveUtils.newFloatArray(factors, 0.f);

    final Object[] forwardObjs = new Object[3];
    forwardObjs[0] = f_idx;
    forwardObjs[1] = f_Wi;
    forwardObjs[2] = null;
    // W0
    f_idx.set(0);
    f_Wi.set(model.getW0());
    // V0 is null
    forward(forwardObjs);

    // Wi, Vif (i starts from 1..P)
    forwardObjs[2] = Arrays.asList(f_Vi);

    for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
        final float[] vi = model.getV(i, false);
        if (vi == null) {
            continue;
        }
        f_idx.set(i);
        // set Wi
        final float w = model.getW(i);
        f_Wi.set(w);
        // set Vif
        for (int f = 0; f < factors; f++) {
            float v = vi[f];
            f_Vi[f].set(v);
        }
        forward(forwardObjs);
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:37,代碼來源:FactorizationMachineUDTF.java

示例4: forwardAsStringFeature

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
private void forwardAsStringFeature(@Nonnull final FMStringFeatureMapModel model,
        final int factors) throws HiveException {
    final Text feature = new Text();
    final FloatWritable f_Wi = new FloatWritable(0.f);
    final FloatWritable[] f_Vi = HiveUtils.newFloatArray(factors, 0.f);

    final Object[] forwardObjs = new Object[3];
    forwardObjs[0] = feature;
    forwardObjs[1] = f_Wi;
    forwardObjs[2] = null;
    // W0
    feature.set("0");
    f_Wi.set(model.getW0());
    // V0 is null
    forward(forwardObjs);

    // Wi, Vif (i starts from 1..P)
    forwardObjs[2] = Arrays.asList(f_Vi);

    for (Map.Entry<String, Entry> e : Fastutil.fastIterable(model.getMap())) {
        String i = e.getKey();
        assert (i != null);
        // set i
        feature.set(i);
        Entry entry = e.getValue();
        // set Wi
        f_Wi.set(entry.W);
        // set Vif
        final float[] Vi = entry.Vf;
        for (int f = 0; f < factors; f++) {
            float v = Vi[f];
            f_Vi[f].set(v);
        }
        forward(forwardObjs);
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:37,代碼來源:FactorizationMachineUDTF.java

示例5: create

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
    size.value = SIZE;
    FloatWritable writable = new FloatWritable();
    writable.set(typeConverter.convertTo(Float.class, value));
    return writable;
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:8,代碼來源:HdfsWritableFactories.java

示例6: testReadFloat

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Test
public void testReadFloat() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, FloatWritable.class);
    NullWritable keyWritable = NullWritable.get();
    FloatWritable valueWritable = new FloatWritable();
    float value = 3.1415926535f;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:30,代碼來源:HdfsConsumerTest.java

示例7: testReadFloat

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Test
public void testReadFloat() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, FloatWritable.class);
    NullWritable keyWritable = NullWritable.get();
    FloatWritable valueWritable = new FloatWritable();
    float value = 3.1415926535f;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:31,代碼來源:HdfsConsumerTest.java

示例8: next

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Override
public boolean next(FloatWritable key, NullWritable value)
        throws IOException {
  key.set(index++);
  return index <= 10;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:7,代碼來源:TestPipeApplication.java

示例9: close

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Override
public void close() throws HiveException {
    if (model != null) {
        if (count == 0) {
            this.model = null; // help GC
            return;
        }
        if (iterations > 1) {
            runIterativeTraining(iterations);
        }
        final IntWritable idx = new IntWritable();
        final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable Bu = new FloatWritable();
        final FloatWritable Bi = new FloatWritable();
        final Object[] forwardObj;
        if (updateMeanRating) {
            assert useBiasClause;
            float meanRating = model.getMeanRating();
            FloatWritable mu = new FloatWritable(meanRating);
            forwardObj = new Object[] {idx, Pu, Qi, Bu, Bi, mu};
        } else {
            if (useBiasClause) {
                forwardObj = new Object[] {idx, Pu, Qi, Bu, Bi};
            } else {
                forwardObj = new Object[] {idx, Pu, Qi};
            }
        }
        int numForwarded = 0;
        for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
            idx.set(i);
            Rating[] userRatings = model.getUserVector(i);
            if (userRatings == null) {
                forwardObj[1] = null;
            } else {
                forwardObj[1] = Pu;
                copyTo(userRatings, Pu);
            }
            Rating[] itemRatings = model.getItemVector(i);
            if (itemRatings == null) {
                forwardObj[2] = null;
            } else {
                forwardObj[2] = Qi;
                copyTo(itemRatings, Qi);
            }
            if (useBiasClause) {
                Bu.set(model.getUserBias(i));
                Bi.set(model.getItemBias(i));
            }
            forward(forwardObj);
            numForwarded++;
        }
        this.model = null; // help GC
        logger.info("Forwarded the prediction model of " + numForwarded
                + " rows. [totalErrors=" + cvState.getTotalErrors() + ", lastLosses="
                + cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]");
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:59,代碼來源:OnlineMatrixFactorizationUDTF.java

示例10: close

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Override
public void close() throws HiveException {
    if (model != null) {
        if (count == 0) {
            this.model = null; // help GC
            return;
        }
        if (iterations > 1) {
            runIterativeTraining(iterations);
        }

        final IntWritable idx = new IntWritable();
        final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f);
        final FloatWritable Bi = useBiasClause ? new FloatWritable() : null;
        final Object[] forwardObj = new Object[] {idx, Pu, Qi, Bi};

        int numForwarded = 0;
        for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
            idx.set(i);
            Rating[] userRatings = model.getUserVector(i);
            if (userRatings == null) {
                forwardObj[1] = null;
            } else {
                forwardObj[1] = Pu;
                copyTo(userRatings, Pu);
            }
            Rating[] itemRatings = model.getItemVector(i);
            if (itemRatings == null) {
                forwardObj[2] = null;
            } else {
                forwardObj[2] = Qi;
                copyTo(itemRatings, Qi);
            }
            if (useBiasClause) {
                Bi.set(model.getItemBias(i));
            }
            forward(forwardObj);
            numForwarded++;
        }
        this.model = null; // help GC
        LOG.info("Forwarded the prediction model of " + numForwarded + " rows. [lastLosses="
                + cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]");
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:46,代碼來源:BPRMatrixFactorizationUDTF.java

示例11: forwardModel

import org.apache.hadoop.io.FloatWritable; //導入方法依賴的package包/類
@Override
protected void forwardModel() throws HiveException {
    this._model = null;
    this._fieldList = null;
    this._sumVfX = null;

    final int factors = _factors;
    final IntWritable idx = new IntWritable();
    final FloatWritable Wi = new FloatWritable(0.f);
    final FloatWritable[] Vi = HiveUtils.newFloatArray(factors, 0.f);
    final List<FloatWritable> ViObj = Arrays.asList(Vi);

    final Object[] forwardObjs = new Object[4];
    String modelId = HadoopUtils.getUniqueTaskIdString();
    forwardObjs[0] = new Text(modelId);
    forwardObjs[1] = idx;
    forwardObjs[2] = Wi;
    forwardObjs[3] = null; // Vi

    // W0
    idx.set(0);
    Wi.set(_ffmModel.getW0());
    forward(forwardObjs);

    final Entry entryW = new Entry(_ffmModel._buf, 1);
    final Entry entryV = new Entry(_ffmModel._buf, _ffmModel._factor);
    final float[] Vf = new float[factors];

    for (Int2LongMap.Entry e : Fastutil.fastIterable(_ffmModel._map)) {
        // set i
        final int i = e.getIntKey();
        idx.set(i);

        final long offset = e.getLongValue();
        if (Entry.isEntryW(i)) {// set Wi
            entryW.setOffset(offset);
            float w = entryV.getW();
            if (w == 0.f) {
                continue; // skip w_i=0
            }
            Wi.set(w);
            forwardObjs[2] = Wi;
            forwardObjs[3] = null;
        } else {// set Vif
            entryV.setOffset(offset);
            entryV.getV(Vf);
            for (int f = 0; f < factors; f++) {
                Vi[f].set(Vf[f]);
            }
            forwardObjs[2] = null;
            forwardObjs[3] = ViObj;
        }

        forward(forwardObjs);
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:57,代碼來源:FieldAwareFactorizationMachineUDTF.java


注:本文中的org.apache.hadoop.io.FloatWritable.set方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。