本文整理汇总了Java中org.apache.hadoop.io.MapWritable.keySet方法的典型用法代码示例。如果您正苦于以下问题:Java MapWritable.keySet方法的具体用法?Java MapWritable.keySet怎么用?Java MapWritable.keySet使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.MapWritable
的用法示例。
在下文中一共展示了MapWritable.keySet方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: convertToMap
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
private static Map<String, Map<String,String>> convertToMap(MapWritable inputMap) {
Map<String, Map<String,String>> mapResult = Maps.newHashMap();
for (Writable attributeText : inputMap.keySet()) {
MapWritable partialInsideMap = (MapWritable) inputMap.get(attributeText);
Map<String,String> partialOutputMap = Maps.newHashMap();
for (Writable rule : partialInsideMap.keySet()) {
Text regola = (Text) rule;
Text valore = (Text) partialInsideMap.get(rule);
partialOutputMap.put(regola.toString(), valore.toString());
}
mapResult.put(((Text)attributeText).toString(), partialOutputMap);
}
return mapResult;
}
示例2: reduce
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<MapWritable> listOfMaps, Context context) throws IOException, InterruptedException {
for (MapWritable partialResultMap : listOfMaps) {
for (Writable attributeText : partialResultMap.keySet()) {
MapWritable partialInsideMap = (MapWritable) partialResultMap.get(attributeText);
MapWritable partialOutputMap = new MapWritable();
for (Writable rule : partialInsideMap.keySet()) {
Text regola = (Text) rule;
Text valore = (Text) partialInsideMap.get(rule);
partialOutputMap.put(new Text(regola.toString()), new Text(valore.toString()));
}
result.put((Text)attributeText, partialOutputMap);
}
}
Text resultWrite = new Text(MapWritableConverter.toJsonText(result));
context.write(key,resultWrite);
}
示例3: reduce
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<MapWritable> listOfMaps, Context context) throws IOException, InterruptedException {
for (MapWritable partialResultMap : listOfMaps) {
for (Writable attributeText : partialResultMap.keySet()) {
MapWritable partialInsideMap = (MapWritable) partialResultMap.get(attributeText);
MapWritable partialOutputMap = new MapWritable();
for (Writable rule : partialInsideMap.keySet()) {
Text regola = (Text) rule;
Text valore = (Text) partialInsideMap.get(rule);
partialOutputMap.put(new Text(regola.toString()), new Text(valore.toString()));
}
result.put((Text)attributeText, partialOutputMap);
}
}
context.write(key,result);
}
示例4: mapWritableToString
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
/**
* Method to convert a MapWritable into a JSON string
*
*/
@SuppressWarnings("unchecked")
public static String mapWritableToString(MapWritable map)
{
// Convert to JSON and then write to a String - ensures JSON read-in compatibility
JSONObject jsonObj = new JSONObject();
for (Writable key : map.keySet())
{
jsonObj.put(key.toString(), map.get(key).toString());
}
return jsonObj.toJSONString();
}
示例5: write
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void write(NullWritable key, Writable value) throws IOException {
log.info("SolrRecordWriter -> write");
if(solr == null) {
solr = SolrOperations.getSolrServer(conf);
}
SolrInputDocument doc = new SolrInputDocument();
if(value.getClass().getName().equals("org.apache.hadoop.io.MapWritable")){
MapWritable valueMap = (MapWritable) value;
for(Writable keyWritable : valueMap.keySet()) {
String fieldName = keyWritable.toString();
Object fieldValue = valueMap.get(new Text(fieldName));
// Need to add proper conversion of object to Schema field type
doc.addField(fieldName, fieldValue.toString());
}
}
else if(value.getClass().getName().equals("org.bigsolr.hadoop.SolrInputRecord")) {
doc = (SolrInputDocument) value;
}
else {
log.error("SolrRecordWriter write() Class for Value is not Supported: " + value.getClass().getName());
System.exit(0);
}
try {
solr.add(doc);
//solr.commit(true,true);
} catch (SolrServerException e) {
log.error("SolrRecordWriter-- solr.add(doc) failed");
throw new IOException(e);
}
}
示例6: es2Json
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
public JSONObject es2Json(MapWritable valueWritable) throws IOException, InterruptedException {
JSONObject obj = new JSONObject();
for (Writable keyWritable : valueWritable.keySet()) {
String key = ((Text) keyWritable).toString();
Writable valWritable = valueWritable.get(keyWritable);
if (valWritable instanceof Text) {
obj.put(key, valWritable.toString());
} else if (valWritable instanceof IntWritable) {
obj.put(key, ((IntWritable)valWritable).get());
} else if (valWritable instanceof FloatWritable) {
obj.put(key, ((FloatWritable)valWritable).get());
} else if (valWritable instanceof LongWritable) {
obj.put(key, ((LongWritable)valWritable).get());
} else if (valWritable instanceof DoubleWritable) {
obj.put(key, ((DoubleWritable)valWritable).get());
} else if (valWritable instanceof BooleanWritable) {
obj.put(key, ((BooleanWritable)valWritable).get());
} else if (valWritable instanceof MapWritable) {
obj.put(key, es2Json((MapWritable) valWritable));
} else if (valWritable instanceof WritableArrayWritable) {
WritableArrayWritable waw = (WritableArrayWritable) valWritable;
Writable[] writable = waw.get();
JSONArray array = new JSONArray();
for (int i=0; i < writable.length; ++i) {
Object o = writable[i];
if (o instanceof MapWritable) {
array.add(es2Json((MapWritable) o));
} else if (o instanceof Text) {
array.add(o.toString());
} else if (o instanceof IntWritable) {
array.add(((IntWritable)o).get());
} else if (o instanceof FloatWritable) {
array.add(((FloatWritable)o).get());
} else if (o instanceof LongWritable) {
array.add(((LongWritable)o).get());
} else if (o instanceof DoubleWritable) {
array.add(((DoubleWritable)o).get());
}
}
obj.put(key, array);
}
}
return obj;
}
示例7: reduce
import org.apache.hadoop.io.MapWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<MapWritable> values, Context context)
throws IOException, InterruptedException
{
// Let's have a map and internally collect them
int maps = 0;
int vals = 0;
HashMap<Text, Integer> myMap = new HashMap<Text, Integer>();
for (MapWritable m : values)
{
maps++;
for (Writable valName : m.keySet())
{
Text val = (Text) valName;
Integer count = ((IntWritable)(m.get(valName))).get();
if (myMap.containsKey(val))
{
myMap.put(val, myMap.get(val) + count);
}
else
{
myMap.put(val, count);
vals++;
}
}
}
s_logger.debug("Reducer/combiner got " + maps +
" maps, with a total of " + vals +
" distinct values for attribute `" + key + "`");
// now output
// key is key
// value is myMap as MapWritable<Text, IntWritable>
MapWritable output = new MapWritable();
for (Text t : myMap.keySet())
{
s_logger.debug("Outputting count " + myMap.get(t) + " for attribute " + t);
output.put(t,new IntWritable(myMap.get(t)));
}
context.write(key, output);
}