当前位置: 首页>>代码示例>>Java>>正文


Java RecommendedItemsWritable类代码示例

本文整理汇总了Java中org.apache.mahout.cf.taste.hadoop.RecommendedItemsWritable的典型用法代码示例。如果您正苦于以下问题:Java RecommendedItemsWritable类的具体用法?Java RecommendedItemsWritable怎么用?Java RecommendedItemsWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


RecommendedItemsWritable类属于org.apache.mahout.cf.taste.hadoop包,在下文中一共展示了RecommendedItemsWritable类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.mahout.cf.taste.hadoop.RecommendedItemsWritable; //导入依赖的package包/类
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {  
    // TODO Auto-generated method stub  
    Configuration conf1 = new Configuration();  

    Job job1 = new Job(conf1, "wiki  job five");  
    job1.setNumReduceTasks(1);  
    job1.setJarByClass(Step5.class);  
    job1.setInputFormatClass(SequenceFileInputFormat.class);  
    job1.setMapperClass(WikiMapper5.class);  
    job1.setMapOutputKeyClass(VarLongWritable.class);  
    job1.setMapOutputValueClass(VectorWritable.class);  
      
    job1.setCombinerClass(WiKiCombiner5.class);  
    job1.setReducerClass(WiKiReducer5.class);  
    job1.setOutputKeyClass(VarLongWritable.class);  
    job1.setOutputValueClass(RecommendedItemsWritable.class);  
//   job1.setOutputFormatClass(SequenceFileOutputFormat.class);  
    SequenceFileInputFormat.addInputPath(job1, new Path(INPUT_PATH));  
  
    FileOutputFormat.setOutputPath(job1, new Path(OUTPUT_PATH));     
    if(!job1.waitForCompletion(true)){  
        System.exit(1); // run error then exit  
    }  
}
 
开发者ID:bytegriffin,项目名称:recsys-offline,代码行数:25,代码来源:Step5.java

示例2: map

import org.apache.mahout.cf.taste.hadoop.RecommendedItemsWritable; //导入依赖的package包/类
protected void map(VarLongWritable key, RecommendedItemsWritable value, Context context) throws IOException,
		InterruptedException
{
	// <UserId, [item,value]>
	// the in the recommendations are already sorted! - this makes writing them easy

	List<RecommendedItem> recommendations = value.getRecommendedItems();
	int rank = 1;
	Put put = new Put(RowKeys.getRecommendationKey(collection, recommender, key.get()));
	for (RecommendedItem el : recommendations)
	{
		byte[] data = nl.gridline.zieook.model.Recommend.getRecommendation(el.getItemID(), rank, el.getValue());
		put.add(RECOMMENDATION_COLUMN, Bytes.toBytes(rank), data);
		rank++;
	}
	context.write(new LongWritable(key.get()), put);

}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:19,代码来源:UserRecommendationsStoreMap.java

示例3: reduce

import org.apache.mahout.cf.taste.hadoop.RecommendedItemsWritable; //导入依赖的package包/类
public void reduce(VarLongWritable key, Iterable<VectorWritable> values,Context context) throws IOException, InterruptedException{  
  
    int userID=(int)key.get();  
    Vector rev=null;  
    for(VectorWritable vec:values){  
        rev=rev==null? vec.get():rev.plus(vec.get());  
    }  
    Queue<RecommendedItem>topItems=new PriorityQueue<RecommendedItem>( recommendationsPerUser+1,  Collections.reverseOrder(ByValueRecommendedItemComparator.getInstance())   );  
    Iterator<Vector.Element>recommendationVectorIterator=  rev.nonZeroes().iterator();  
    while(recommendationVectorIterator.hasNext()){  
        Vector.Element e=recommendationVectorIterator.next();  
        int index=e.index();  
        System.out.println("Vecotr.element.indxe:"+index);  //  test here  find the index is item id or not  ** test result : index is item  
        if(!hasItem(userID,String.valueOf(index))){  
            float value=(float) e.get();  
            if(topItems.size()<recommendationsPerUser){  
                //  here only set index  
                topItems.add(new GenericRecommendedItem(index,value));  
            }else if(value>topItems.peek().getValue()){  
                topItems.add(new GenericRecommendedItem(index,value));  
                topItems.poll();  
            }  
        }  
    }  
    List<RecommendedItem>recom=new ArrayList<RecommendedItem>(topItems.size());  
    recom.addAll(topItems);  
    Collections.sort(recom,ByValueRecommendedItemComparator.getInstance());  
    context.write(key, new RecommendedItemsWritable(recom));          
}
 
开发者ID:bytegriffin,项目名称:recsys-offline,代码行数:30,代码来源:Step5.java

示例4: writeRecommendedItems

import org.apache.mahout.cf.taste.hadoop.RecommendedItemsWritable; //导入依赖的package包/类
/**
 * find the top entries in recommendationVector, map them to the real itemIDs and write back the result
 */
private void writeRecommendedItems(VarLongWritable userID, Vector recommendationVector, Context context)
		throws IOException, InterruptedException
{
	Queue<RecommendedItem> topItems = new PriorityQueue<RecommendedItem>(recommendationsPerUser + 1,
			Collections.reverseOrder(ByValueRecommendedItemComparator.getInstance()));

	Iterator<Vector.Element> recommendationVectorIterator = recommendationVector.iterateNonZero();
	while (recommendationVectorIterator.hasNext())
	{
		Vector.Element element = recommendationVectorIterator.next();
		int index = element.index();

		long itemID = indexItemIDMap.get(index);
		if (itemsToRecommendFor == null || itemsToRecommendFor.contains(itemID))
		{
			float value = (float) element.get();
			if (!Float.isNaN(value))
			{
				if (topItems.size() < recommendationsPerUser)
				{
					topItems.add(new GenericRecommendedItem(itemID, value));
				}
				else if (value > topItems.peek().getValue())
				{
					topItems.add(new GenericRecommendedItem(itemID, value));
					topItems.poll();
				}
			}
		}
	}

	if (!topItems.isEmpty())
	{
		List<RecommendedItem> recommendations = new ArrayList<RecommendedItem>(topItems.size());
		recommendations.addAll(topItems);
		Collections.sort(recommendations, ByValueRecommendedItemComparator.getInstance());
		context.write(userID, new RecommendedItemsWritable(recommendations));
	}
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:43,代码来源:AggregateAndRecommendReducer.java


注:本文中的org.apache.mahout.cf.taste.hadoop.RecommendedItemsWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。