本文整理汇总了Java中org.apache.hadoop.mapred.InputSplit.readFields方法的典型用法代码示例。如果您正苦于以下问题:Java InputSplit.readFields方法的具体用法?Java InputSplit.readFields怎么用?Java InputSplit.readFields使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.mapred.InputSplit
的用法示例。
在下文中一共展示了InputSplit.readFields方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: deserializeInputSplit
import org.apache.hadoop.mapred.InputSplit; //导入方法依赖的package包/类
public static InputSplit deserializeInputSplit(String base64, String className) throws IOException, ReflectiveOperationException{
Constructor<?> constructor = Class.forName(className).getDeclaredConstructor();
if (constructor == null) {
throw new ReflectiveOperationException("Class " + className + " does not implement a default constructor.");
}
constructor.setAccessible(true);
InputSplit split = (InputSplit) constructor.newInstance();
ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(base64));
split.readFields(byteArrayDataInput);
return split;
}
示例2: readFields
import org.apache.hadoop.mapred.InputSplit; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils
.newInstance(inputSplitClass, conf);
inputSplit.readFields(in);
inputFormatClass = (Class<? extends InputFormat>) readClass(in);
mapperClass = (Class<? extends Mapper>) readClass(in);
}
示例3: deserializeInputSplit
import org.apache.hadoop.mapred.InputSplit; //导入方法依赖的package包/类
public static InputSplit deserializeInputSplit(SerializedInputSplit split) throws IOException, ReflectiveOperationException{
Constructor<?> constructor = Class.forName(split.getInputSplitClass()).getDeclaredConstructor();
if (constructor == null) {
throw new ReflectiveOperationException("Class " + split.getInputSplitClass() + " does not implement a default constructor.");
}
constructor.setAccessible(true);
InputSplit deserializedSplit = (InputSplit) constructor.newInstance();
deserializedSplit.readFields(ByteStreams.newDataInput(split.getInputSplit().toByteArray()));
return deserializedSplit;
}
示例4: readFields
import org.apache.hadoop.mapred.InputSplit; //导入方法依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
String inputSplitClassName = in.readUTF();
try {
inputSplit = (InputSplit) ReflectionUtil.newInstance(conf
.getClassByName(inputSplitClassName), conf);
} catch (Exception e) {
throw new IOException(
"Cannot create an instance of InputSplit class = "
+ inputSplitClassName + ":" + e.getMessage(), e);
}
inputSplit.readFields(in);
inputFormatClassName = in.readUTF();
}
示例5: deserializeInputSplit
import org.apache.hadoop.mapred.InputSplit; //导入方法依赖的package包/类
public static List<InputSplit> deserializeInputSplit(List<String> base64, String className) throws IOException, ReflectiveOperationException{
Constructor<?> constructor = Class.forName(className).getDeclaredConstructor();
if (constructor == null) {
throw new ReflectiveOperationException("Class " + className + " does not implement a default constructor.");
}
constructor.setAccessible(true);
List<InputSplit> splits = new ArrayList<>();
for (String str : base64) {
InputSplit split = (InputSplit) constructor.newInstance();
ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(str));
split.readFields(byteArrayDataInput);
splits.add(split);
}
return splits;
}