本文整理匯總了Java中org.apache.hadoop.mapred.InputSplit.readFields方法的典型用法代碼示例。如果您正苦於以下問題:Java InputSplit.readFields方法的具體用法?Java InputSplit.readFields怎麽用?Java InputSplit.readFields使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.mapred.InputSplit
的用法示例。
在下文中一共展示了InputSplit.readFields方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: deserializeInputSplit
import org.apache.hadoop.mapred.InputSplit; //導入方法依賴的package包/類
public static InputSplit deserializeInputSplit(String base64, String className) throws IOException, ReflectiveOperationException{
Constructor<?> constructor = Class.forName(className).getDeclaredConstructor();
if (constructor == null) {
throw new ReflectiveOperationException("Class " + className + " does not implement a default constructor.");
}
constructor.setAccessible(true);
InputSplit split = (InputSplit) constructor.newInstance();
ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(base64));
split.readFields(byteArrayDataInput);
return split;
}
示例2: readFields
import org.apache.hadoop.mapred.InputSplit; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils
.newInstance(inputSplitClass, conf);
inputSplit.readFields(in);
inputFormatClass = (Class<? extends InputFormat>) readClass(in);
mapperClass = (Class<? extends Mapper>) readClass(in);
}
示例3: deserializeInputSplit
import org.apache.hadoop.mapred.InputSplit; //導入方法依賴的package包/類
public static InputSplit deserializeInputSplit(SerializedInputSplit split) throws IOException, ReflectiveOperationException{
Constructor<?> constructor = Class.forName(split.getInputSplitClass()).getDeclaredConstructor();
if (constructor == null) {
throw new ReflectiveOperationException("Class " + split.getInputSplitClass() + " does not implement a default constructor.");
}
constructor.setAccessible(true);
InputSplit deserializedSplit = (InputSplit) constructor.newInstance();
deserializedSplit.readFields(ByteStreams.newDataInput(split.getInputSplit().toByteArray()));
return deserializedSplit;
}
示例4: readFields
import org.apache.hadoop.mapred.InputSplit; //導入方法依賴的package包/類
@Override
public void readFields(DataInput in) throws IOException {
String inputSplitClassName = in.readUTF();
try {
inputSplit = (InputSplit) ReflectionUtil.newInstance(conf
.getClassByName(inputSplitClassName), conf);
} catch (Exception e) {
throw new IOException(
"Cannot create an instance of InputSplit class = "
+ inputSplitClassName + ":" + e.getMessage(), e);
}
inputSplit.readFields(in);
inputFormatClassName = in.readUTF();
}
示例5: deserializeInputSplit
import org.apache.hadoop.mapred.InputSplit; //導入方法依賴的package包/類
public static List<InputSplit> deserializeInputSplit(List<String> base64, String className) throws IOException, ReflectiveOperationException{
Constructor<?> constructor = Class.forName(className).getDeclaredConstructor();
if (constructor == null) {
throw new ReflectiveOperationException("Class " + className + " does not implement a default constructor.");
}
constructor.setAccessible(true);
List<InputSplit> splits = new ArrayList<>();
for (String str : base64) {
InputSplit split = (InputSplit) constructor.newInstance();
ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(str));
split.readFields(byteArrayDataInput);
splits.add(split);
}
return splits;
}