本文整理汇总了Java中org.apache.hadoop.io.file.tfile.TFile.Reader类的典型用法代码示例。如果您正苦于以下问题:Java Reader类的具体用法?Java Reader怎么用?Java Reader使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Reader类属于org.apache.hadoop.io.file.tfile.TFile包,在下文中一共展示了Reader类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: unsortedWithSomeCodec
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
void unsortedWithSomeCodec(String codec) throws IOException {
Path uTfile = new Path(ROOT, "unsorted.tfile");
FSDataOutputStream fout = createFSOutput(uTfile);
Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
writeRecords(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(uTfile);
Reader reader =
new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.close();
reader.close();
fin.close();
fs.delete(uTfile, true);
}
示例2: testFailureNegativeOffset
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureNegativeOffset() throws IOException {
if (skip)
return;
writeRecords(2, true, true);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] buf = new byte[K];
try {
scanner.entry().getKey(buf, -1);
fail("Failed to handle key negative offset.");
}
catch (Exception e) {
// noop, expecting exceptions
}
finally {
}
scanner.close();
reader.close();
}
示例3: testFailureGetNonExistentMetaBlock
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
if (skip)
return;
writer.append("keyX".getBytes(), "valueX".getBytes());
// create a new metablock
DataOutputStream outMeta =
writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
outMeta.write(123);
outMeta.write("foo".getBytes());
outMeta.close();
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
DataInputStream mb = reader.getMetaBlock("testX");
Assert.assertNotNull(mb);
mb.close();
try {
DataInputStream mbBad = reader.getMetaBlock("testY");
Assert.fail("Error on handling non-existent metablocks.");
} catch (Exception e) {
// noop, expecting exceptions
}
reader.close();
}
示例4: testFailureReadValueManyTimes
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
示例5: testFailureOpenEmptyFile
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureOpenEmptyFile() throws IOException {
if (skip)
return;
closeOutput();
// create an absolutely empty file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling empty files.");
} catch (EOFException e) {
// noop, expecting exceptions
}
}
示例6: testFailureOpenRandomFile
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
示例7: testFailureNegativeOffset_2
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}
示例8: testFailureNegativeLength_2
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureNegativeLength_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), 0, -1);
Assert.fail("Error on handling negative length.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
scanner.close();
reader.close();
}
closeOutput();
}
示例9: readValueBeforeKey
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
private void readValueBeforeKey(int recordIndex)
throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
try {
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
recordIndex));
} finally {
scanner.close();
reader.close();
}
}
示例10: readValueWithoutKey
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
private void readValueWithoutKey(int recordIndex)
throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
byte[] vbuf1 = new byte[BUF_SIZE];
int vlen1 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf1);
Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
if (scanner.advance() && !scanner.atEnd()) {
byte[] vbuf2 = new byte[BUF_SIZE];
int vlen2 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf2);
Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
+ (recordIndex + 1));
}
scanner.close();
reader.close();
}
示例11: testFailureScannerWithKeys
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
@Test
public void testFailureScannerWithKeys() throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
try {
Scanner scanner =
reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
Assert
.fail("Failed to catch creating scanner with keys on unsorted file.");
}
catch (RuntimeException e) {
}
finally {
reader.close();
}
}
示例12: testFailureNegativeOffset
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
public void testFailureNegativeOffset() throws IOException {
if (skip)
return;
writeRecords(2, true, true);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] buf = new byte[K];
try {
scanner.entry().getKey(buf, -1);
Assert.fail("Failed to handle key negative offset.");
}
catch (Exception e) {
// noop, expecting exceptions
}
finally {
}
scanner.close();
reader.close();
}
示例13: testFailureScannerWithKeys
import org.apache.hadoop.io.file.tfile.TFile.Reader; //导入依赖的package包/类
public void testFailureScannerWithKeys() throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
try {
Scanner scanner =
reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
Assert
.fail("Failed to catch creating scanner with keys on unsorted file.");
}
catch (RuntimeException e) {
}
finally {
reader.close();
}
}