本文整理汇总了Java中com.hazelcast.mapreduce.KeyValueSource.fromMap方法的典型用法代码示例。如果您正苦于以下问题:Java KeyValueSource.fromMap方法的具体用法?Java KeyValueSource.fromMap怎么用?Java KeyValueSource.fromMap使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.hazelcast.mapreduce.KeyValueSource
的用法示例。
在下文中一共展示了KeyValueSource.fromMap方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: execute
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
@Override
public void execute(HazelcastInstance hazelcastInstance)
throws Exception {
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
IMap<String, SalaryYear> map = hazelcastInstance.getMap("salaries");
KeyValueSource<String, SalaryYear> source = KeyValueSource.fromMap(map);
Job<String, SalaryYear> job = jobTracker.newJob(source);
JobCompletableFuture<Integer> future = job //
.mapper(new SalarySumMapper()) //
.combiner(new SalarySumCombinerFactory()) //
.reducer(new SalarySumReducerFactory()) //
.submit(new SalarySumCollator());
System.out.println("Salary sum: " + future.get());
}
示例2: execute
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
public void execute(HazelcastInstance hazelcastInstance)
throws Exception {
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
IMap<String, Double> map = hazelcastInstance.getMap("inputs");
map.put("EventA", new Double(Math.random() * 99 + 1));
map.put("EventB", new Double(Math.random() * 99 + 1));
KeyValueSource<String, Double> source = KeyValueSource.fromMap(map);
Job<String, Double> job = jobTracker.newJob(source);
JobCompletableFuture<List<KeyValueTuple>> future = job //
.mapper(new InvertedMapper()) //
.combiner(new InvertedCombinerFactory()) //
.reducer(new InvertedReducerFactory()) //
.submit(new InvertedCollator());
System.out.println("Result: " + ToStringPrettyfier.toString(future.get()));
}
示例3: predict
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
@Override
public Collection<Classification> predict(Collection<? extends Object> data) throws Exception {
this.trainingdata = getTrainingdata();
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
KeyValueSource<Map<String, Serializable>, Classification> source = KeyValueSource.fromMap(this.trainingdata);
Job<Map<String, Serializable>, Classification> job = jobTracker.newJob(source);
JobCompletableFuture<List<Classification>> future = job //
.mapper(new DistanceBasedClassificationAlgorithmMapper(this.options,data)) //
.combiner(new DistanceBasedClassificationAlgorithmCombinerFactory(this.options)) //
.reducer(new DistanceBasedClassificationAlgorithmReducerFactory(this.options)) //
.submit(new DistanceBasedClassificationAlgorithmCollator(this.options));
return future.get();
}
示例4: predict
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
@Override
public Collection<Classification> predict(Collection<? extends Object> data) throws Exception {
this.trainingdata = getTrainingdata();
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
KeyValueSource<ByteHolder, ClassifiedFeature> source = KeyValueSource.fromMap(this.trainingdata);
Job<ByteHolder, ClassifiedFeature> job = jobTracker.newJob(source);
//
JobCompletableFuture<List<Classification>> future = job //
.mapper(new UserBasedCollaborativeFilteringRecommendationAlgorithmMapper(this.options,data)) //
.combiner(new DistanceBasedClassificationAlgorithmCombinerFactory(this.options)) //
.reducer(new DistanceBasedClassificationAlgorithmReducerFactory(this.options)) //
.submit(new DistanceBasedClassificationAlgorithmCollator(this.options));
return future.get();
}
开发者ID:bgokden,项目名称:predictblty,代码行数:18,代码来源:UserBasedCollaborativeFilteringRecommendationAlgorithm.java
示例5: mapReduceAverage
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
private static double mapReduceAverage(HazelcastInstance hazelcastInstance)
throws Exception {
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
IMap<String, String> map = hazelcastInstance.getMap("numbers");
KeyValueSource<String, String> source = KeyValueSource.fromMap(map);
Job<String, String> job = jobTracker.newJob(source);
ICompletableFuture<Double> future = job
.mapper(new TokenizerMapper())
.combiner(new NumberCountCombinerFactory())
.reducer(new NumberCountAndOpReducerFactory())
.submit(new AverageCollator());
future.andThen(finishInfoCallback());
return future.get();
}
示例6: mapReduce
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
private static Map<String, Long> mapReduce(HazelcastInstance hazelcastInstance)
throws Exception {
// Retrieving the JobTracker by name
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
// Creating the KeyValueSource for a Hazelcast IMap
IMap<String, String> map = hazelcastInstance.getMap("articles");
KeyValueSource<String, String> source = KeyValueSource.fromMap(map);
Job<String, String> job = jobTracker.newJob(source);
// Creating a new Job
ICompletableFuture<Map<String, Long>> future = job // returned future
.mapper(new TokenizerMapper()) // adding a mapper
.combiner(new WordCountCombinerFactory()) // adding a combiner through the factory
.reducer(new WordCountReducerFactory()) // adding a reducer through the factory
.submit(); // submit the task
// Attach a callback listener
future.andThen(buildCallback());
// Wait and retrieve the result
return future.get();
}
示例7: mapReduceCollate
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
private static long mapReduceCollate(HazelcastInstance hazelcastInstance)
throws Exception {
// Retrieving the JobTracker by name
JobTracker jobTracker = hazelcastInstance.getJobTracker("default");
// Creating the KeyValueSource for a Hazelcast IMap
IMap<String, String> map = hazelcastInstance.getMap("articles");
KeyValueSource<String, String> source = KeyValueSource.fromMap(map);
// Creating a new Job
Job<String, String> job = jobTracker.newJob(source);
ICompletableFuture<Long> future = job // returned future
.mapper(new TokenizerMapper()) // adding a mapper
.combiner(new WordCountCombinerFactory()) // adding a combiner through the factory
.reducer(new WordCountReducerFactory()) // adding a reducer through the factory
.submit(new WordCountCollator()); // submit the task and supply a collator
// Wait and retrieve the result
return future.get();
}
示例8: main
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
public static void main(String[] args)
throws Exception {
// Prepare Hazelcast cluster
HazelcastInstance hazelcastInstance = buildCluster(3);
try {
// Read data
fillMapWithData(hazelcastInstance);
JobTracker tracker = hazelcastInstance.getJobTracker("default");
IMap<String, String> map = hazelcastInstance.getMap(MAP_NAME);
KeyValueSource<String, String> source = KeyValueSource.fromMap(map);
Job<String, String> job = tracker.newJob(source);
ICompletableFuture<Map<String, Integer>> future = job
.mapper(new TokenizerMapper())
// Activate Combiner to add combining phase!
// .combiner(new WordcountCombinerFactory())
.reducer(new WordcountReducerFactory())
.submit();
System.out.println(ToStringPrettyfier.toString(future.get()));
} finally {
// Shutdown cluster
Hazelcast.shutdownAll();
}
}
示例9: mapReduce
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
@TimeStep(prob = 0.5)
public void mapReduce(ThreadState state) throws Exception {
JobTracker tracker = targetInstance.getJobTracker(Thread.currentThread().getName() + name);
KeyValueSource<Integer, Employee> source = KeyValueSource.fromMap(map);
Job<Integer, Employee> job = tracker.newJob(source);
ICompletableFuture<Map<Integer, Set<Employee>>> future = job
.mapper(new ModIdMapper(2))
.combiner(new RangeIdCombinerFactory(10, 30))
.reducer(new IdReducerFactory(10, 20, 30))
.submit();
Map<Integer, Set<Employee>> result = future.get();
for (Set<Employee> set : result.values()) {
for (Employee employee : set) {
assertTrue(employee.getId() % 2 == 0);
assertTrue(employee.getId() >= 10 && employee.getId() <= 30);
assertTrue(employee.getId() != 10);
assertTrue(employee.getId() != 20);
assertTrue(employee.getId() != 30);
}
}
state.operationCounter.mapReduce++;
}
示例10: main
import com.hazelcast.mapreduce.KeyValueSource; //导入方法依赖的package包/类
public static void main(String[] args)
throws Exception {
// Prepare Hazelcast cluster
HazelcastInstance hazelcastInstance = buildCluster(3);
try {
// Read data
fillMapWithData(hazelcastInstance);
JobTracker tracker = hazelcastInstance.getJobTracker(TRACKER_NAME);
IMap<String, String> map = hazelcastInstance.getMap(MAP_NAME);
KeyValueSource<String, String> source = KeyValueSource.fromMap(map);
Job<String, String> job = tracker.newJob(source);
final JobCompletableFuture<List<Map.Entry<String, Integer>>> future = job
.mapper(new TokenizerMapper())
// Activate Combiner to add combining phase!
// .combiner(new WordcountCombinerFactory())
.reducer(new WordcountReducerFactory())
// .submit();
// add collator for sorting and top10
.submit(new WordcountCollator());
future.andThen(new ExecutionCallback<List<Map.Entry<String, Integer>>>() {
@Override public void onResponse(List<Map.Entry<String, Integer>> response) {
System.out.println(ToStringPrettyfier.toString(response));
}
@Override public void onFailure(Throwable t) {
}
});
//System.out.println(ToStringPrettyfier.toString(future.get()));
} finally {
// Shutdown cluster
//Hazelcast.shutdownAll();
}
}