本文整理匯總了Java中org.apache.hadoop.io.serializer.SerializationFactory類的典型用法代碼示例。如果您正苦於以下問題:Java SerializationFactory類的具體用法?Java SerializationFactory怎麽用?Java SerializationFactory使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
SerializationFactory類屬於org.apache.hadoop.io.serializer包,在下文中一共展示了SerializationFactory類的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: ReduceContextImpl
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
public ReduceContextImpl(Configuration conf, TaskAttemptID taskid,
RawKeyValueIterator input,
Counter inputKeyCounter,
Counter inputValueCounter,
RecordWriter<KEYOUT,VALUEOUT> output,
OutputCommitter committer,
StatusReporter reporter,
RawComparator<KEYIN> comparator,
Class<KEYIN> keyClass,
Class<VALUEIN> valueClass
) throws InterruptedException, IOException{
super(conf, taskid, output, committer, reporter);
this.input = input;
this.inputKeyCounter = inputKeyCounter;
this.inputValueCounter = inputValueCounter;
this.comparator = comparator;
this.serializationFactory = new SerializationFactory(conf);
this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
this.keyDeserializer.open(buffer);
this.valueDeserializer = serializationFactory.getDeserializer(valueClass);
this.valueDeserializer.open(buffer);
hasMore = input.next();
this.keyClass = keyClass;
this.valueClass = valueClass;
this.conf = conf;
this.taskid = taskid;
}
示例2: copy
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
/**
* Make a copy of the writable object using serialization to a buffer
* @param src the object to copy from
* @param dst the object to copy into, which is destroyed
* @return dst param (the copy)
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <T> T copy(Configuration conf,
T src, T dst) throws IOException {
CopyInCopyOutBuffer buffer = CLONE_BUFFERS.get();
buffer.outBuffer.reset();
SerializationFactory factory = getFactory(conf);
Class<T> cls = (Class<T>) src.getClass();
Serializer<T> serializer = factory.getSerializer(cls);
serializer.open(buffer.outBuffer);
serializer.serialize(src);
buffer.moveData();
Deserializer<T> deserializer = factory.getDeserializer(cls);
deserializer.open(buffer.inBuffer);
dst = deserializer.deserialize(dst);
return dst;
}
示例3: copy
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
/**
* Make a copy of the writable object using serialization to a buffer
* @param dst the object to copy from
* @param src the object to copy into, which is destroyed
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <T> T copy(Configuration conf,
T src, T dst) throws IOException {
CopyInCopyOutBuffer buffer = cloneBuffers.get();
buffer.outBuffer.reset();
SerializationFactory factory = getFactory(conf);
Class<T> cls = (Class<T>) src.getClass();
Serializer<T> serializer = factory.getSerializer(cls);
serializer.open(buffer.outBuffer);
serializer.serialize(src);
buffer.moveData();
Deserializer<T> deserializer = factory.getDeserializer(cls);
deserializer.open(buffer.inBuffer);
dst = deserializer.deserialize(dst);
return dst;
}
示例4: KeyValueWriter
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
public KeyValueWriter(Configuration conf, OutputStream output,
Class<K> kyClass, Class<V> valClass
) throws IOException {
keyClass = kyClass;
valueClass = valClass;
dataBuffer = new DataOutputBuffer();
SerializationFactory serializationFactory
= new SerializationFactory(conf);
keySerializer
= (Serializer<K>)serializationFactory.getSerializer(keyClass);
keySerializer.open(dataBuffer);
valueSerializer
= (Serializer<V>)serializationFactory.getSerializer(valueClass);
valueSerializer.open(dataBuffer);
outputStream = new DataOutputStream(output);
}
示例5: serDeser
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
private <K> K serDeser(K conf) throws Exception {
SerializationFactory factory = new SerializationFactory(CONF);
Serializer<K> serializer =
factory.getSerializer(GenericsUtil.getClass(conf));
Deserializer<K> deserializer =
factory.getDeserializer(GenericsUtil.getClass(conf));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(conf);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
示例6: ValuesIterator
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
public ValuesIterator (RawKeyValueIterator in,
RawComparator<KEY> comparator,
Class<KEY> keyClass,
Class<VALUE> valClass, Configuration conf,
Progressable reporter)
throws IOException {
this.in = in;
this.comparator = comparator;
this.reporter = reporter;
SerializationFactory serializationFactory = new SerializationFactory(conf);
this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
this.keyDeserializer.open(keyIn);
this.valDeserializer = serializationFactory.getDeserializer(valClass);
this.valDeserializer.open(this.valueIn);
readNextKey();
key = nextKey;
nextKey = null; // force new instance creation
hasNext = more;
}
示例7: copy
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
/**
* Make a copy of the writable object using serialization to a buffer
* @param src the object to copy from
* @param dst the object to copy into, which is destroyed
* @return dst param (the copy)
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <T> T copy(Configuration conf,
T src, T dst) throws IOException {
CopyInCopyOutBuffer buffer = cloneBuffers.get();
buffer.outBuffer.reset();
SerializationFactory factory = getFactory(conf);
Class<T> cls = (Class<T>) src.getClass();
Serializer<T> serializer = factory.getSerializer(cls);
serializer.open(buffer.outBuffer);
serializer.serialize(src);
buffer.moveData();
Deserializer<T> deserializer = factory.getDeserializer(cls);
deserializer.open(buffer.inBuffer);
dst = deserializer.deserialize(dst);
return dst;
}
示例8: deserialize
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
/** Deserializes the object in the given datainput using
* available Hadoop serializations.
* @throws IOException */
public static<T> T deserialize(Configuration conf, DataInput in
, T obj , Class<T> objClass) throws IOException {
SerializationFactory serializationFactory = new SerializationFactory(getOrCreateConf(conf));
Deserializer<T> deserializer = serializationFactory.getDeserializer(
objClass);
int length = WritableUtils.readVInt(in);
byte[] arr = new byte[length];
in.readFully(arr);
List<ByteBuffer> list = new ArrayList<>();
list.add(ByteBuffer.wrap(arr));
try (ByteBufferInputStream is = new ByteBufferInputStream(list)) {
deserializer.open(is);
T newObj = deserializer.deserialize(obj);
return newObj;
}finally {
if(deserializer != null)
deserializer.close();
}
}
示例9: ReduceContext
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
public ReduceContext(Configuration conf, TaskAttemptID taskid,
RawKeyValueIterator input,
Counter inputKeyCounter,
Counter inputValueCounter,
RecordWriter<KEYOUT,VALUEOUT> output,
OutputCommitter committer,
StatusReporter reporter,
RawComparator<KEYIN> comparator,
Class<KEYIN> keyClass,
Class<VALUEIN> valueClass
) throws InterruptedException, IOException{
super(conf, taskid, output, committer, reporter);
this.input = input;
this.inputKeyCounter = inputKeyCounter;
this.inputValueCounter = inputValueCounter;
this.comparator = comparator;
SerializationFactory serializationFactory = new SerializationFactory(conf);
this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
this.keyDeserializer.open(buffer);
this.valueDeserializer = serializationFactory.getDeserializer(valueClass);
this.valueDeserializer.open(buffer);
hasMore = input.next();
}
示例10: getSerialization
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
/**
* Gets serializer for specified class.
*
* @param cls Class.
* @param jobConf Job configuration.
* @return Appropriate serializer.
*/
@SuppressWarnings("unchecked")
private HadoopSerialization getSerialization(Class<?> cls, Configuration jobConf) throws IgniteCheckedException {
A.notNull(cls, "cls");
SerializationFactory factory = new SerializationFactory(jobConf);
Serialization<?> serialization = factory.getSerialization(cls);
if (serialization == null)
throw new IgniteCheckedException("Failed to find serialization for: " + cls.getName());
if (serialization.getClass() == WritableSerialization.class)
return new HadoopWritableSerialization((Class<? extends Writable>)cls);
return new HadoopSerializationWrapper(serialization, cls);
}
示例11: getSplitDetails
import org.apache.hadoop.io.serializer.SerializationFactory; //導入依賴的package包/類
@SuppressWarnings("unchecked")
private static <T> T getSplitDetails(FSDataInputStream inFile, long offset, Configuration configuration)
throws IOException {
inFile.seek(offset);
String className = StringInterner.weakIntern(Text.readString(inFile));
Class<T> cls;
try {
cls = (Class<T>) configuration.getClassByName(className);
} catch (ClassNotFoundException ce) {
IOException wrap = new IOException("Split class " + className +
" not found");
wrap.initCause(ce);
throw wrap;
}
SerializationFactory factory = new SerializationFactory(configuration);
Deserializer<T> deserializer =
(Deserializer<T>) factory.getDeserializer(cls);
deserializer.open(inFile);
T split = deserializer.deserialize(null);
return split;
}