本文整理汇总了Java中org.apache.hadoop.fi.FiTestUtil.nextRandomInt方法的典型用法代码示例。如果您正苦于以下问题:Java FiTestUtil.nextRandomInt方法的具体用法?Java FiTestUtil.nextRandomInt怎么用?Java FiTestUtil.nextRandomInt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.fi.FiTestUtil
的用法示例。
在下文中一共展示了FiTestUtil.nextRandomInt方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: writeSeveralPackets
import org.apache.hadoop.fi.FiTestUtil; //导入方法依赖的package包/类
/**
* 1. create files with dfs
* 2. write MIN_N_PACKET to MAX_N_PACKET packets
* 3. close file
* 4. open the same file
* 5. read the bytes and compare results
*/
private static void writeSeveralPackets(String methodName) throws IOException {
final Random r = FiTestUtil.RANDOM.get();
final int nPackets = FiTestUtil.nextRandomInt(MIN_N_PACKET, MAX_N_PACKET + 1);
final int lastPacketSize = FiTestUtil.nextRandomInt(1, PACKET_SIZE + 1);
final int size = (nPackets - 1)*PACKET_SIZE + lastPacketSize;
FiTestUtil.LOG.info("size=" + size + ", nPackets=" + nPackets
+ ", lastPacketSize=" + lastPacketSize);
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf
).numDataNodes(REPLICATION + 2).build();
final FileSystem dfs = cluster.getFileSystem();
try {
final Path p = new Path("/" + methodName + "/foo");
final FSDataOutputStream out = createFile(dfs, p);
final long seed = r.nextLong();
final Random ran = new Random(seed);
ran.nextBytes(bytes);
out.write(bytes, 0, size);
out.close();
final FSDataInputStream in = dfs.open(p);
int totalRead = 0;
int nRead = 0;
while ((nRead = in.read(toRead, totalRead, size - totalRead)) > 0) {
totalRead += nRead;
}
Assert.assertEquals("Cannot read file.", size, totalRead);
for (int i = 0; i < size; i++) {
Assert.assertTrue("File content differ.", bytes[i] == toRead[i]);
}
}
finally {
dfs.close();
cluster.shutdown();
}
}
示例2: writeSeveralPackets
import org.apache.hadoop.fi.FiTestUtil; //导入方法依赖的package包/类
/**
* 1. create files with dfs
* 2. write MIN_N_PACKET to MAX_N_PACKET packets
* 3. close file
* 4. open the same file
* 5. read the bytes and compare results
*/
private static void writeSeveralPackets(String methodName)
throws IOException {
final Random r = FiTestUtil.RANDOM.get();
final int nPackets =
FiTestUtil.nextRandomInt(MIN_N_PACKET, MAX_N_PACKET + 1);
final int lastPacketSize = FiTestUtil.nextRandomInt(1, PACKET_SIZE + 1);
final int size = (nPackets - 1) * PACKET_SIZE + lastPacketSize;
FiTestUtil.LOG.info(
"size=" + size + ", nPackets=" + nPackets + ", lastPacketSize=" +
lastPacketSize);
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(REPLICATION + 2).build();
final FileSystem dfs = cluster.getFileSystem();
try {
final Path p = new Path("/" + methodName + "/foo");
final FSDataOutputStream out = createFile(dfs, p);
final long seed = r.nextLong();
final Random ran = new Random(seed);
ran.nextBytes(bytes);
out.write(bytes, 0, size);
out.close();
final FSDataInputStream in = dfs.open(p);
int totalRead = 0;
int nRead = 0;
while ((nRead = in.read(toRead, totalRead, size - totalRead)) > 0) {
totalRead += nRead;
}
Assert.assertEquals("Cannot read file.", size, totalRead);
for (int i = 0; i < size; i++) {
Assert.assertTrue("File content differ.", bytes[i] == toRead[i]);
}
} finally {
dfs.close();
cluster.shutdown();
}
}