本文整理汇总了Java中org.apache.hadoop.test.TestDir类的典型用法代码示例。如果您正苦于以下问题:Java TestDir类的具体用法?Java TestDir怎么用?Java TestDir使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TestDir类属于org.apache.hadoop.test包,在下文中一共展示了TestDir类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testOperationDoAs
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
@TestJetty
@TestHdfs
public void testOperationDoAs() throws Exception {
createHttpFSServer();
UserGroupInformation ugi = UserGroupInformation.createProxyUser(HadoopUsersConfTestHelper.getHadoopUsers()[0],
UserGroupInformation.getCurrentUser());
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
operation(operation);
return null;
}
});
}
示例2: testWithXAttrs
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
/**
* Ensure that GETXATTRS, SETXATTR, REMOVEXATTR fail.
*/
@Test
@TestDir
@TestJetty
@TestHdfs
public void testWithXAttrs() throws Exception {
final String name1 = "user.a1";
final byte[] value1 = new byte[]{0x31, 0x32, 0x33};
final String dir = "/noXAttr";
final String path = dir + "/file";
startMiniDFS();
createHttpFSServer();
FileSystem fs = FileSystem.get(nnConf);
fs.mkdirs(new Path(dir));
OutputStream os = fs.create(new Path(path));
os.write(1);
os.close();
/* GETXATTRS, SETXATTR, REMOVEXATTR fail */
getStatus(path, "GETXATTRS");
putCmd(path, "SETXATTR", TestHttpFSServer.setXAttrParam(name1, value1));
putCmd(path, "REMOVEXATTR", "xattr.name=" + name1);
}
示例3: testValidHttpFSAccess
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
@TestJetty
@TestHdfs
public void testValidHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
return null;
}
});
}
示例4: testHdfsAccess
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
@TestJetty
@TestHdfs
public void testHdfsAccess() throws Exception {
createHttpFSServer(false);
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/?user.name={0}&op=liststatus", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
reader.readLine();
reader.close();
}
示例5: testGlobFilter
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
@TestJetty
@TestHdfs
public void testGlobFilter() throws Exception {
createHttpFSServer(false);
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
fs.mkdirs(new Path("/tmp"));
fs.create(new Path("/tmp/foo.txt")).close();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/tmp?user.name={0}&op=liststatus&filter=f*", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
reader.readLine();
reader.close();
}
示例6: testOpenOffsetLength
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
@TestJetty
@TestHdfs
public void testOpenOffsetLength() throws Exception {
createHttpFSServer(false);
byte[] array = new byte[]{0, 1, 2, 3};
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
fs.mkdirs(new Path("/tmp"));
OutputStream os = fs.create(new Path("/tmp/foo"));
os.write(array);
os.close();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/tmp/foo?user.name={0}&op=open&offset=1&length=2", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
InputStream is = conn.getInputStream();
Assert.assertEquals(1, is.read());
Assert.assertEquals(2, is.read());
Assert.assertEquals(-1, is.read());
}
示例7: testPutNoOperation
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
@TestJetty
@TestHdfs
public void testPutNoOperation() throws Exception {
createHttpFSServer(false);
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/foo?user.name={0}", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setDoInput(true);
conn.setDoOutput(true);
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
}
示例8: lifecycle
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
public void lifecycle() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
System.setProperty("TestServerWebApp1.home.dir", dir);
System.setProperty("TestServerWebApp1.config.dir", dir);
System.setProperty("TestServerWebApp1.log.dir", dir);
System.setProperty("TestServerWebApp1.temp.dir", dir);
ServerWebApp server = new ServerWebApp("TestServerWebApp1") {
};
assertEquals(server.getStatus(), Server.Status.UNDEF);
server.contextInitialized(null);
assertEquals(server.getStatus(), Server.Status.NORMAL);
server.contextDestroyed(null);
assertEquals(server.getStatus(), Server.Status.SHUTDOWN);
}
示例9: testResolveAuthority
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
public void testResolveAuthority() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
System.setProperty("TestServerWebApp3.home.dir", dir);
System.setProperty("TestServerWebApp3.config.dir", dir);
System.setProperty("TestServerWebApp3.log.dir", dir);
System.setProperty("TestServerWebApp3.temp.dir", dir);
System.setProperty("testserverwebapp3.http.hostname", "localhost");
System.setProperty("testserverwebapp3.http.port", "14000");
ServerWebApp server = new ServerWebApp("TestServerWebApp3") {
};
InetSocketAddress address = server.resolveAuthority();
Assert.assertEquals("localhost", address.getHostName());
Assert.assertEquals(14000, address.getPort());
}
示例10: noKerberosKeytabProperty
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H01.*")
@TestDir
public void noKerberosKeytabProperty() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",",
Arrays.asList(InstrumentationService.class.getName(),
SchedulerService.class.getName(),
FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.authentication.type", "kerberos");
conf.set("server.hadoop.authentication.kerberos.keytab", " ");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
}
示例11: noKerberosPrincipalProperty
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H01.*")
@TestDir
public void noKerberosPrincipalProperty() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",",
Arrays.asList(InstrumentationService.class.getName(),
SchedulerService.class.getName(),
FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.authentication.type", "kerberos");
conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
conf.set("server.hadoop.authentication.kerberos.principal", " ");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
}
示例12: kerberosInitializationFailure
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H02.*")
@TestDir
public void kerberosInitializationFailure() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",",
Arrays.asList(InstrumentationService.class.getName(),
SchedulerService.class.getName(),
FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.authentication.type", "kerberos");
conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
conf.set("server.hadoop.authentication.kerberos.principal", "[email protected]");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
}
示例13: serviceHadoopConf
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
public void serviceHadoopConf() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",",
Arrays.asList(InstrumentationService.class.getName(),
SchedulerService.class.getName(),
FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
Assert.assertEquals(fsAccess.serviceHadoopConf.get("foo"), "FOO");
server.destroy();
}
示例14: NameNodeNotinWhitelists
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestException(exception = FileSystemAccessException.class, msgRegExp = "H05.*")
@TestDir
public void NameNodeNotinWhitelists() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",",
Arrays.asList(InstrumentationService.class.getName(),
SchedulerService.class.getName(),
FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.name.node.whitelist", "NN");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
fsAccess.validateNamenode("NNx");
}
示例15: loadingSysPropConfig
import org.apache.hadoop.test.TestDir; //导入依赖的package包/类
@Test
@TestDir
public void loadingSysPropConfig() throws Exception {
try {
System.setProperty("testserver.a", "sysprop");
String dir = TestDirHelper.getTestDir().getAbsolutePath();
File configFile = new File(dir, "testserver-site.xml");
Writer w = new FileWriter(configFile);
w.write("<configuration><property><name>testserver.a</name><value>site</value></property></configuration>");
w.close();
Server server = new Server("testserver", dir, dir, dir, dir);
server.init();
assertEquals(server.getConfig().get("testserver.a"), "sysprop");
} finally {
System.getProperties().remove("testserver.a");
}
}