本文整理汇总了Java中org.apache.hadoop.hdfs.web.WebHdfsTestUtil类的典型用法代码示例。如果您正苦于以下问题:Java WebHdfsTestUtil类的具体用法?Java WebHdfsTestUtil怎么用?Java WebHdfsTestUtil使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
WebHdfsTestUtil类属于org.apache.hadoop.hdfs.web包,在下文中一共展示了WebHdfsTestUtil类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testAuditWebHdfs
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfs() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
InputStream istream = webfs.open(file);
int val = istream.read();
istream.close();
verifyAuditLogsRepeat(true, 3);
assertTrue("failed to read from file", val >= 0);
}
示例2: testAuditWebHdfsStat
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that stat via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsStat() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
FileStatus st = webfs.getFileStatus(file);
verifyAuditLogs(true);
assertTrue("failed to stat file", st != null && st.isFile());
}
示例3: testAuditWebHdfsDenied
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that denied access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsDenied() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0600));
fs.setOwner(file, "root", null);
setupAuditLogs();
try {
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
InputStream istream = webfs.open(file);
int val = istream.read();
fail("open+read must not succeed, got " + val);
} catch(AccessControlException E) {
System.out.println("got access denied, as expected.");
}
verifyAuditLogsRepeat(false, 2);
}
示例4: testWriteReadUsingWebHdfs
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
@Test
public void testWriteReadUsingWebHdfs() throws Exception {
int fileLength = blockSize * dataBlocks + cellSize + 123;
final byte[] expected = StripedFileTestUtil.generateBytes(fileLength);
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsConstants.WEBHDFS_SCHEME);
Path srcPath = new Path("/testWriteReadUsingWebHdfs");
DFSTestUtil.writeFile(fs, srcPath, new String(expected));
StripedFileTestUtil.verifyLength(fs, srcPath, fileLength);
byte[] smallBuf = new byte[1024];
byte[] largeBuf = new byte[fileLength + 100];
// TODO: HDFS-8797
//StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, largeBuf);
StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf);
StripedFileTestUtil.verifySeek(fs, srcPath, fileLength);
StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf);
// webhdfs doesn't support bytebuffer read
}
示例5: testAuditWebHdfs
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfs() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsConstants.WEBHDFS_SCHEME);
InputStream istream = webfs.open(file);
int val = istream.read();
istream.close();
verifyAuditLogsRepeat(true, 3);
assertTrue("failed to read from file", val >= 0);
}
示例6: testAuditWebHdfsStat
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that stat via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsStat() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsConstants.WEBHDFS_SCHEME);
FileStatus st = webfs.getFileStatus(file);
verifyAuditLogs(true);
assertTrue("failed to stat file", st != null && st.isFile());
}
示例7: testAuditWebHdfsDenied
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that denied access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsDenied() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0600));
fs.setOwner(file, "root", null);
setupAuditLogs();
try {
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsConstants.WEBHDFS_SCHEME);
InputStream istream = webfs.open(file);
int val = istream.read();
fail("open+read must not succeed, got " + val);
} catch(AccessControlException E) {
System.out.println("got access denied, as expected.");
}
verifyAuditLogsRepeat(false, 2);
}
示例8: testAuditWebHdfs
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfs() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
InputStream istream = webfs.open(file);
int val = istream.read();
istream.close();
verifyAuditLogsRepeat(true, 3);
assertTrue("failed to read from file", val >= 0);
}
示例9: testAuditWebHdfsStat
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that stat via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsStat() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
FileStatus st = webfs.getFileStatus(file);
verifyAuditLogs(true);
assertTrue("failed to stat file", st != null && st.isFile());
}
示例10: testAuditWebHdfsDenied
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that denied access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsDenied() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0600));
fs.setOwner(file, "root", null);
setupAuditLogs();
try {
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
InputStream istream = webfs.open(file);
int val = istream.read();
fail("open+read must not succeed, got " + val);
} catch(AccessControlException E) {
System.out.println("got access denied, as expected.");
}
verifyAuditLogsRepeat(false, 2);
}
示例11: testAuditWebHdfs
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/**
* test that access via webhdfs puts proper entry in audit log
*/
@Test
public void testAuditWebHdfs() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short) 0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs =
WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
InputStream istream = webfs.open(file);
int val = istream.read();
istream.close();
verifyAuditLogsRepeat(true, 3);
assertTrue("failed to read from file", val >= 0);
}
示例12: testAuditWebHdfsStat
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/**
* test that stat via webhdfs puts proper entry in audit log
*/
@Test
public void testAuditWebHdfsStat() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short) 0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs =
WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
FileStatus st = webfs.getFileStatus(file);
verifyAuditLogs(true);
assertTrue("failed to stat file", st != null && st.isFile());
}
示例13: testAuditWebHdfsDenied
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/**
* test that denied access via webhdfs puts proper entry in audit log
*/
@Test
public void testAuditWebHdfsDenied() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short) 0600));
fs.setOwner(file, "root", null);
setupAuditLogs();
try {
WebHdfsFileSystem webfs =
WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
InputStream istream = webfs.open(file);
int val = istream.read();
fail("open+read must not succeed, got " + val);
} catch (AccessControlException E) {
System.out.println("got access denied, as expected.");
}
verifyAuditLogsRepeat(false, 2);
}
示例14: beforeClassSetup
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
@BeforeClass
public static void beforeClassSetup() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
conf.set(FsPermission.UMASK_LABEL, "000");
conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 0);
cluster = new MiniDFSCluster.Builder(conf).build();
webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME);
dfs = cluster.getFileSystem();
}
示例15: testAuditWebHdfsOpen
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入依赖的package包/类
/** test that open via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsOpen() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
webfs.open(file);
verifyAuditLogsCheckPattern(true, 3, webOpenPattern);
}