本文整理匯總了Java中org.apache.hadoop.io.DataInputBuffer類的典型用法代碼示例。如果您正苦於以下問題:Java DataInputBuffer類的具體用法?Java DataInputBuffer怎麽用?Java DataInputBuffer使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
DataInputBuffer類屬於org.apache.hadoop.io包,在下文中一共展示了DataInputBuffer類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createFakeCredentials
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
throws IOException {
Credentials creds = new Credentials();
byte[] password = new byte[20];
Text kind = new Text();
Text service = new Text();
Text alias = new Text();
for (int i = 0; i < nTok; ++i) {
byte[] identifier = ("idef" + i).getBytes();
r.nextBytes(password);
kind.set("kind" + i);
service.set("service" + i);
alias.set("token" + i);
Token token = new Token(identifier, password, kind, service);
creds.addToken(alias, token);
}
DataOutputBuffer buf = new DataOutputBuffer();
creds.writeTokenStorageToStream(buf);
DataInputBuffer ret = new DataInputBuffer();
ret.reset(buf.getData(), 0, buf.getLength());
return ret;
}
示例2: Scanner
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
/**
* Constructor
*
* @param reader
* The TFile reader object.
* @param begin
* Begin location of the scan.
* @param end
* End location of the scan.
* @throws IOException
*/
Scanner(Reader reader, Location begin, Location end) throws IOException {
this.reader = reader;
// ensure the TFile index is loaded throughout the life of scanner.
reader.checkTFileDataIndex();
beginLocation = begin;
endLocation = end;
valTransferBuffer = new BytesWritable();
// TODO: remember the longest key in a TFile, and use it to replace
// MAX_KEY_SIZE.
keyBuffer = new byte[MAX_KEY_SIZE];
keyDataInputStream = new DataInputBuffer();
valueBufferInputStream = new ChunkDecoder();
valueDataInputStream = new DataInputStream(valueBufferInputStream);
if (beginLocation.compareTo(endLocation) >= 0) {
currentLocation = new Location(endLocation);
} else {
currentLocation = new Location(0, 0);
initBlock(beginLocation.getBlockIndex());
inBlockAdvance(beginLocation.getRecordIndex());
}
}
示例3: testDelegationTokenIdentiferSerializationRoundTrip
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
Text renewer, Text realUser) throws IOException {
TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
owner, renewer, realUser);
DataOutputBuffer out = new DataOutputBuffer();
dtid.writeImpl(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
try {
TestDelegationTokenIdentifier dtid2 =
new TestDelegationTokenIdentifier();
dtid2.readFields(in);
assertTrue(dtid.equals(dtid2));
return true;
} catch(IOException e){
return false;
}
}
示例4: testWritableComparatorJavaSerialization
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
Serialization ser = new JavaSerialization();
Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
DataOutputBuffer dob = new DataOutputBuffer();
serializer.open(dob);
TestWC orig = new TestWC(0);
serializer.serialize(orig);
serializer.close();
Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), 0, dob.getLength());
deserializer.open(dib);
TestWC deser = deserializer.deserialize(null);
deserializer.close();
assertEquals(orig, deser);
}
示例5: testSerialization
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
/**
* A utility that tests serialization/deserialization.
* @param conf configuration to use, "io.serializations" is read to
* determine the serialization
* @param <K> the class of the item
* @param before item to (de)serialize
* @return deserialized item
*/
public static <K> K testSerialization(Configuration conf, K before)
throws Exception {
SerializationFactory factory = new SerializationFactory(conf);
Serializer<K> serializer
= factory.getSerializer(GenericsUtil.getClass(before));
Deserializer<K> deserializer
= factory.getDeserializer(GenericsUtil.getClass(before));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(before);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
示例6: testWriteRead
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@Test
public void testWriteRead() {
mfDatasetInputSplit.addDataset("dataSet1");
mfDatasetInputSplit.addDataset("dataSet2");
DataOutputBuffer dob = new DataOutputBuffer();
DataInputBuffer dib = new DataInputBuffer();
MainframeDatasetInputSplit mfReader = new MainframeDatasetInputSplit();
try {
mfDatasetInputSplit.write(dob);
dib.reset(dob.getData(), dob.getLength());
mfReader.readFields(dib);
Assert.assertNotNull("MFReader get data from tester", mfReader);
Assert.assertEquals(2, mfReader.getLength());
Assert.assertEquals("dataSet1", mfReader.getNextDataset());
Assert.assertEquals("dataSet2", mfReader.getNextDataset());
} catch (IOException ioe) {
Assert.fail("No IOException should be thrown!");
} catch (InterruptedException ie) {
Assert.fail("No InterruptedException should be thrown!");
}
}
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:22,代碼來源:TestMainframeDatasetInputSplit.java
示例7: testAMRMTokenIdentifier
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@Test
public void testAMRMTokenIdentifier() throws IOException {
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(1, 1), 1);
int masterKeyId = 1;
AMRMTokenIdentifier token = new AMRMTokenIdentifier(appAttemptId, masterKeyId);
AMRMTokenIdentifier anotherToken = new AMRMTokenIdentifier();
byte[] tokenContent = token.getBytes();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenContent, tokenContent.length);
anotherToken.readFields(dib);
// verify the whole record equals with original record
Assert.assertEquals("Token is not the same after serialization " +
"and deserialization.", token, anotherToken);
Assert.assertEquals("ApplicationAttemptId from proto is not the same with original token",
anotherToken.getApplicationAttemptId(), appAttemptId);
Assert.assertEquals("masterKeyId from proto is not the same with original token",
anotherToken.getKeyId(), masterKeyId);
}
示例8: testLocalResourceStatusSerDe
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
LocalResourceStatus rsrcS = createLocalResourceStatus();
assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalResourceStatusProto rsrcPbD =
LocalResourceStatusProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalResourceStatus rsrcD =
new LocalResourceStatusPBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals(createResource(), rsrcS.getResource());
assertEquals(createResource(), rsrcD.getResource());
}
示例9: testLocalizerStatusSerDe
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
LocalizerStatus rsrcS = createLocalizerStatus();
assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerStatusProto rsrcPbD =
LocalizerStatusProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerStatus rsrcD =
new LocalizerStatusPBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals("localizer0", rsrcS.getLocalizerId());
assertEquals("localizer0", rsrcD.getLocalizerId());
assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
示例10: testLocalizerHeartbeatResponseSerDe
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
LocalizerHeartbeatResponsePBImpl rsrcPb =
(LocalizerHeartbeatResponsePBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerHeartbeatResponseProto rsrcPbD =
LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerHeartbeatResponse rsrcD =
new LocalizerHeartbeatResponsePBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
示例11: testIFileStream
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
public void testIFileStream() throws Exception {
final int DLEN = 100;
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
IFileOutputStream ifos = new IFileOutputStream(dob);
for (int i = 0; i < DLEN; ++i) {
ifos.write(i);
}
ifos.close();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), DLEN + 4);
IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
for (int i = 0; i < DLEN; ++i) {
assertEquals(i, ifis.read());
}
ifis.close();
}
示例12: serDeser
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
private <K> K serDeser(K conf) throws Exception {
SerializationFactory factory = new SerializationFactory(CONF);
Serializer<K> serializer =
factory.getSerializer(GenericsUtil.getClass(conf));
Deserializer<K> deserializer =
factory.getDeserializer(GenericsUtil.getClass(conf));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(conf);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
示例13: append
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
int keyLength = key.getLength() - key.getPosition();
if (keyLength < 0) {
throw new IOException("Negative key-length not allowed: " + keyLength +
" for " + key);
}
int valueLength = value.getLength() - value.getPosition();
if (valueLength < 0) {
throw new IOException("Negative value-length not allowed: " +
valueLength + " for " + value);
}
WritableUtils.writeVInt(out, keyLength);
WritableUtils.writeVInt(out, valueLength);
out.write(key.getData(), key.getPosition(), keyLength);
out.write(value.getData(), value.getPosition(), valueLength);
// Update bytes written
decompressedBytesWritten += keyLength + valueLength +
WritableUtils.getVIntSize(keyLength) +
WritableUtils.getVIntSize(valueLength);
++numRecordsWritten;
}
示例14: nextRawValue
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
public void nextRawValue(DataInputBuffer value) throws IOException {
final byte[] valBytes = (value.getData().length < currentValueLength)
? new byte[currentValueLength << 1]
: value.getData();
int i = readData(valBytes, 0, currentValueLength);
if (i != currentValueLength) {
throw new IOException ("Asked for " + currentValueLength + " Got: " + i);
}
value.reset(valBytes, currentValueLength);
// Record the bytes read
bytesRead += currentValueLength;
++recNo;
++numRecordsRead;
}
示例15: write
import org.apache.hadoop.io.DataInputBuffer; //導入依賴的package包/類
/**
* Write the given K,V to the cache.
* Write to memcache if space is available, else write to the filecache
* @param key
* @param value
* @throws IOException
*/
public void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {
assert (key != null && value != null);
if (fileCache.isActive()) {
fileCache.write(key, value);
return;
}
if (memCache.reserveSpace(key, value)) {
memCache.write(key, value);
} else {
fileCache.activate();
fileCache.write(key, value);
}
}