当前位置: 首页>>代码示例>>Java>>正文


Java MapWritable.keySet方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.MapWritable.keySet方法的典型用法代码示例。如果您正苦于以下问题:Java MapWritable.keySet方法的具体用法?Java MapWritable.keySet怎么用?Java MapWritable.keySet使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.MapWritable的用法示例。


在下文中一共展示了MapWritable.keySet方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertToMap

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
private static Map<String, Map<String,String>> convertToMap(MapWritable inputMap) {
	Map<String, Map<String,String>> mapResult = Maps.newHashMap();
	
	for (Writable attributeText : inputMap.keySet()) {
		MapWritable partialInsideMap = (MapWritable) inputMap.get(attributeText);
		Map<String,String> partialOutputMap = Maps.newHashMap();
		
		for (Writable rule : partialInsideMap.keySet()) {
			Text regola = (Text) rule;
			Text valore = (Text) partialInsideMap.get(rule);
			
			partialOutputMap.put(regola.toString(), valore.toString());
		}
		
		mapResult.put(((Text)attributeText).toString(), partialOutputMap);
	}
	
	return mapResult;
}
 
开发者ID:disheng,项目名称:alfred-mpi,代码行数:20,代码来源:MapWritableConverter.java

示例2: reduce

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<MapWritable> listOfMaps, Context context) throws IOException, InterruptedException {

	for (MapWritable partialResultMap : listOfMaps) {
		for (Writable attributeText : partialResultMap.keySet()) {
			MapWritable partialInsideMap = (MapWritable) partialResultMap.get(attributeText);
			MapWritable partialOutputMap = new MapWritable();
			
			for (Writable rule : partialInsideMap.keySet()) {
				Text regola = (Text) rule;
				Text valore = (Text) partialInsideMap.get(rule);
				
				partialOutputMap.put(new Text(regola.toString()), new Text(valore.toString()));
			}
			
			result.put((Text)attributeText, partialOutputMap);
		}
	}
	
	Text resultWrite = new Text(MapWritableConverter.toJsonText(result));
	
	context.write(key,resultWrite);       
}
 
开发者ID:disheng,项目名称:alfred-mpi,代码行数:24,代码来源:XPathApplierTextReducer.java

示例3: reduce

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<MapWritable> listOfMaps, Context context) throws IOException, InterruptedException {

	for (MapWritable partialResultMap : listOfMaps) {
		for (Writable attributeText : partialResultMap.keySet()) {
			MapWritable partialInsideMap = (MapWritable) partialResultMap.get(attributeText);
			MapWritable partialOutputMap = new MapWritable();
			
			for (Writable rule : partialInsideMap.keySet()) {
				Text regola = (Text) rule;
				Text valore = (Text) partialInsideMap.get(rule);
				
				partialOutputMap.put(new Text(regola.toString()), new Text(valore.toString()));
			}
			
			result.put((Text)attributeText, partialOutputMap);
		}
	}
		
	context.write(key,result);       
}
 
开发者ID:disheng,项目名称:alfred-mpi,代码行数:22,代码来源:XPathApplierReducer.java

示例4: mapWritableToString

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
/**
 * Method to convert a MapWritable into a JSON string
 * 
 */
@SuppressWarnings("unchecked")
public static String mapWritableToString(MapWritable map)
{
  // Convert to JSON and then write to a String - ensures JSON read-in compatibility
  JSONObject jsonObj = new JSONObject();
  for (Writable key : map.keySet())
  {
    jsonObj.put(key.toString(), map.get(key).toString());
  }

  return jsonObj.toJSONString();
}
 
开发者ID:apache,项目名称:incubator-pirk,代码行数:17,代码来源:StringUtils.java

示例5: write

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void write(NullWritable key, Writable value) throws IOException {
	
  log.info("SolrRecordWriter ->  write");

	if(solr == null) {
    solr = SolrOperations.getSolrServer(conf);
	}

  SolrInputDocument doc = new SolrInputDocument();
  if(value.getClass().getName().equals("org.apache.hadoop.io.MapWritable")){
    MapWritable valueMap = (MapWritable) value;

    for(Writable keyWritable : valueMap.keySet()) {
      String fieldName = keyWritable.toString();
      Object fieldValue = valueMap.get(new Text(fieldName));
      // Need to add proper conversion of object to Schema field type
      doc.addField(fieldName, fieldValue.toString());
    }
  }
  else if(value.getClass().getName().equals("org.bigsolr.hadoop.SolrInputRecord")) {
    doc = (SolrInputDocument) value;
  }
  else {
    log.error("SolrRecordWriter write() Class for Value is not Supported: " + value.getClass().getName());
    System.exit(0);
  }
	
	try {
	  solr.add(doc);
	  //solr.commit(true,true);
	} catch (SolrServerException e) {
	  log.error("SolrRecordWriter-- solr.add(doc) failed");
    throw new IOException(e);
	}
	
}
 
开发者ID:mrt,项目名称:bigsolr,代码行数:38,代码来源:SolrRecordWriter.java

示例6: es2Json

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
public JSONObject es2Json(MapWritable valueWritable) throws IOException, InterruptedException {
    JSONObject obj = new JSONObject();
    for (Writable keyWritable : valueWritable.keySet()) {
        String key = ((Text) keyWritable).toString();
        Writable valWritable = valueWritable.get(keyWritable);
        
        if (valWritable instanceof Text) {
            obj.put(key, valWritable.toString());
        } else if (valWritable instanceof IntWritable) {
            obj.put(key, ((IntWritable)valWritable).get());
        } else if (valWritable instanceof FloatWritable) {
            obj.put(key, ((FloatWritable)valWritable).get());
        } else if (valWritable instanceof LongWritable) {
            obj.put(key, ((LongWritable)valWritable).get());
        } else if (valWritable instanceof DoubleWritable) {
            obj.put(key, ((DoubleWritable)valWritable).get());
        } else if (valWritable instanceof BooleanWritable) {
            obj.put(key, ((BooleanWritable)valWritable).get());
        } else if (valWritable instanceof MapWritable) {
            obj.put(key, es2Json((MapWritable) valWritable));
        } else if (valWritable instanceof WritableArrayWritable) {
            WritableArrayWritable waw = (WritableArrayWritable) valWritable;
            Writable[] writable = waw.get();
            
            JSONArray array = new JSONArray();
            for (int i=0; i < writable.length; ++i) {
                Object o = writable[i];
                if (o instanceof MapWritable) {
                    array.add(es2Json((MapWritable) o));
                } else if (o instanceof Text) {
                    array.add(o.toString());
                } else if (o instanceof IntWritable) {
                    array.add(((IntWritable)o).get());
                } else if (o instanceof FloatWritable) {
                    array.add(((FloatWritable)o).get());
                } else if (o instanceof LongWritable) {
                    array.add(((LongWritable)o).get());
                } else if (o instanceof DoubleWritable) {
                    array.add(((DoubleWritable)o).get());
                }
            }
            obj.put(key, array);
        }
    }
    return obj;
}
 
开发者ID:chaopengio,项目名称:elasticsearch-mapreduce,代码行数:47,代码来源:Es2Json.java

示例7: reduce

import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<MapWritable> values, Context context)
	throws IOException, InterruptedException
{
	// Let's have a map and internally collect them
	
	int maps = 0;
	int vals = 0;
			
	HashMap<Text, Integer> myMap = new HashMap<Text, Integer>(); 
	
	for (MapWritable m : values)
	{		
		maps++;
		for (Writable valName : m.keySet())
		{
			
			Text val = (Text) valName;
			Integer count = ((IntWritable)(m.get(valName))).get();
			if (myMap.containsKey(val))
			{
				myMap.put(val, myMap.get(val) + count);
			}
			else
			{
				myMap.put(val, count);
				vals++;
			}				
		}
	}
	
	s_logger.debug("Reducer/combiner got " + maps + 
				   " maps, with a total of " + vals + 
				   " distinct values for attribute `" + key + "`"); 
	
	// now output
	// key is key 
	// value is myMap as MapWritable<Text, IntWritable>
	
	MapWritable output = new MapWritable();		
	for (Text t : myMap.keySet())
	{
		s_logger.debug("Outputting count " + myMap.get(t) + " for attribute " + t);
		output.put(t,new IntWritable(myMap.get(t)));
	}
			
	context.write(key, output);		
	
}
 
开发者ID:haiqinwang,项目名称:SmileWide,代码行数:50,代码来源:AttributeValueHistogramReducer.java


注:本文中的org.apache.hadoop.io.MapWritable.keySet方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。