本文整理汇总了Java中org.apache.hadoop.io.FloatWritable.set方法的典型用法代码示例。如果您正苦于以下问题:Java FloatWritable.set方法的具体用法?Java FloatWritable.set怎么用?Java FloatWritable.set使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.FloatWritable
的用法示例。
在下文中一共展示了FloatWritable.set方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: forwardModel
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
protected void forwardModel() throws HiveException {
final IntWritable topicIdx = new IntWritable();
final Text word = new Text();
final FloatWritable score = new FloatWritable();
final Object[] forwardObjs = new Object[3];
forwardObjs[0] = topicIdx;
forwardObjs[1] = word;
forwardObjs[2] = score;
for (int k = 0; k < topics; k++) {
topicIdx.set(k);
final SortedMap<Float, List<String>> topicWords = model.getTopicWords(k);
for (Map.Entry<Float, List<String>> e : topicWords.entrySet()) {
score.set(e.getKey());
List<String> words = e.getValue();
for (int i = 0; i < words.size(); i++) {
word.set(words.get(i));
forward(forwardObjs);
}
}
}
logger.info("Forwarded topic words each of " + topics + " topics");
}
示例2: reduce
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
public void reduce(FloatWritable key, Iterator<Text> values,
OutputCollector<FloatWritable, Text> output, Reporter reporter)
throws IOException {
while (values.hasNext() && count < topN) {
key.set(-key.get());
output.collect(key, values.next());
count++;
}
}
示例3: forwardAsIntFeature
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
private void forwardAsIntFeature(@Nonnull final FactorizationMachineModel model,
final int factors) throws HiveException {
final IntWritable f_idx = new IntWritable(0);
final FloatWritable f_Wi = new FloatWritable(0.f);
final FloatWritable[] f_Vi = HiveUtils.newFloatArray(factors, 0.f);
final Object[] forwardObjs = new Object[3];
forwardObjs[0] = f_idx;
forwardObjs[1] = f_Wi;
forwardObjs[2] = null;
// W0
f_idx.set(0);
f_Wi.set(model.getW0());
// V0 is null
forward(forwardObjs);
// Wi, Vif (i starts from 1..P)
forwardObjs[2] = Arrays.asList(f_Vi);
for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
final float[] vi = model.getV(i, false);
if (vi == null) {
continue;
}
f_idx.set(i);
// set Wi
final float w = model.getW(i);
f_Wi.set(w);
// set Vif
for (int f = 0; f < factors; f++) {
float v = vi[f];
f_Vi[f].set(v);
}
forward(forwardObjs);
}
}
示例4: forwardAsStringFeature
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
private void forwardAsStringFeature(@Nonnull final FMStringFeatureMapModel model,
final int factors) throws HiveException {
final Text feature = new Text();
final FloatWritable f_Wi = new FloatWritable(0.f);
final FloatWritable[] f_Vi = HiveUtils.newFloatArray(factors, 0.f);
final Object[] forwardObjs = new Object[3];
forwardObjs[0] = feature;
forwardObjs[1] = f_Wi;
forwardObjs[2] = null;
// W0
feature.set("0");
f_Wi.set(model.getW0());
// V0 is null
forward(forwardObjs);
// Wi, Vif (i starts from 1..P)
forwardObjs[2] = Arrays.asList(f_Vi);
for (Map.Entry<String, Entry> e : Fastutil.fastIterable(model.getMap())) {
String i = e.getKey();
assert (i != null);
// set i
feature.set(i);
Entry entry = e.getValue();
// set Wi
f_Wi.set(entry.W);
// set Vif
final float[] Vi = entry.Vf;
for (int f = 0; f < factors; f++) {
float v = Vi[f];
f_Vi[f].set(v);
}
forward(forwardObjs);
}
}
示例5: create
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
FloatWritable writable = new FloatWritable();
writable.set(typeConverter.convertTo(Float.class, value));
return writable;
}
示例6: testReadFloat
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Test
public void testReadFloat() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, FloatWritable.class);
NullWritable keyWritable = NullWritable.get();
FloatWritable valueWritable = new FloatWritable();
float value = 3.1415926535f;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例7: testReadFloat
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Test
public void testReadFloat() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, FloatWritable.class);
NullWritable keyWritable = NullWritable.get();
FloatWritable valueWritable = new FloatWritable();
float value = 3.1415926535f;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例8: next
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Override
public boolean next(FloatWritable key, NullWritable value)
throws IOException {
key.set(index++);
return index <= 10;
}
示例9: close
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Override
public void close() throws HiveException {
if (model != null) {
if (count == 0) {
this.model = null; // help GC
return;
}
if (iterations > 1) {
runIterativeTraining(iterations);
}
final IntWritable idx = new IntWritable();
final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f);
final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f);
final FloatWritable Bu = new FloatWritable();
final FloatWritable Bi = new FloatWritable();
final Object[] forwardObj;
if (updateMeanRating) {
assert useBiasClause;
float meanRating = model.getMeanRating();
FloatWritable mu = new FloatWritable(meanRating);
forwardObj = new Object[] {idx, Pu, Qi, Bu, Bi, mu};
} else {
if (useBiasClause) {
forwardObj = new Object[] {idx, Pu, Qi, Bu, Bi};
} else {
forwardObj = new Object[] {idx, Pu, Qi};
}
}
int numForwarded = 0;
for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
idx.set(i);
Rating[] userRatings = model.getUserVector(i);
if (userRatings == null) {
forwardObj[1] = null;
} else {
forwardObj[1] = Pu;
copyTo(userRatings, Pu);
}
Rating[] itemRatings = model.getItemVector(i);
if (itemRatings == null) {
forwardObj[2] = null;
} else {
forwardObj[2] = Qi;
copyTo(itemRatings, Qi);
}
if (useBiasClause) {
Bu.set(model.getUserBias(i));
Bi.set(model.getItemBias(i));
}
forward(forwardObj);
numForwarded++;
}
this.model = null; // help GC
logger.info("Forwarded the prediction model of " + numForwarded
+ " rows. [totalErrors=" + cvState.getTotalErrors() + ", lastLosses="
+ cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]");
}
}
示例10: close
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Override
public void close() throws HiveException {
if (model != null) {
if (count == 0) {
this.model = null; // help GC
return;
}
if (iterations > 1) {
runIterativeTraining(iterations);
}
final IntWritable idx = new IntWritable();
final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f);
final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f);
final FloatWritable Bi = useBiasClause ? new FloatWritable() : null;
final Object[] forwardObj = new Object[] {idx, Pu, Qi, Bi};
int numForwarded = 0;
for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) {
idx.set(i);
Rating[] userRatings = model.getUserVector(i);
if (userRatings == null) {
forwardObj[1] = null;
} else {
forwardObj[1] = Pu;
copyTo(userRatings, Pu);
}
Rating[] itemRatings = model.getItemVector(i);
if (itemRatings == null) {
forwardObj[2] = null;
} else {
forwardObj[2] = Qi;
copyTo(itemRatings, Qi);
}
if (useBiasClause) {
Bi.set(model.getItemBias(i));
}
forward(forwardObj);
numForwarded++;
}
this.model = null; // help GC
LOG.info("Forwarded the prediction model of " + numForwarded + " rows. [lastLosses="
+ cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]");
}
}
示例11: forwardModel
import org.apache.hadoop.io.FloatWritable; //导入方法依赖的package包/类
@Override
protected void forwardModel() throws HiveException {
this._model = null;
this._fieldList = null;
this._sumVfX = null;
final int factors = _factors;
final IntWritable idx = new IntWritable();
final FloatWritable Wi = new FloatWritable(0.f);
final FloatWritable[] Vi = HiveUtils.newFloatArray(factors, 0.f);
final List<FloatWritable> ViObj = Arrays.asList(Vi);
final Object[] forwardObjs = new Object[4];
String modelId = HadoopUtils.getUniqueTaskIdString();
forwardObjs[0] = new Text(modelId);
forwardObjs[1] = idx;
forwardObjs[2] = Wi;
forwardObjs[3] = null; // Vi
// W0
idx.set(0);
Wi.set(_ffmModel.getW0());
forward(forwardObjs);
final Entry entryW = new Entry(_ffmModel._buf, 1);
final Entry entryV = new Entry(_ffmModel._buf, _ffmModel._factor);
final float[] Vf = new float[factors];
for (Int2LongMap.Entry e : Fastutil.fastIterable(_ffmModel._map)) {
// set i
final int i = e.getIntKey();
idx.set(i);
final long offset = e.getLongValue();
if (Entry.isEntryW(i)) {// set Wi
entryW.setOffset(offset);
float w = entryV.getW();
if (w == 0.f) {
continue; // skip w_i=0
}
Wi.set(w);
forwardObjs[2] = Wi;
forwardObjs[3] = null;
} else {// set Vif
entryV.setOffset(offset);
entryV.getV(Vf);
for (int f = 0; f < factors; f++) {
Vi[f].set(Vf[f]);
}
forwardObjs[2] = null;
forwardObjs[3] = ViObj;
}
forward(forwardObjs);
}
}