本文整理汇总了Java中org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op方法的典型用法代码示例。如果您正苦于以下问题:Java HttpOpParam.Op方法的具体用法?Java HttpOpParam.Op怎么用?Java HttpOpParam.Op使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hdfs.web.resources.HttpOpParam
的用法示例。
在下文中一共展示了HttpOpParam.Op方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: validateResponse
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
private static Map<?, ?> validateResponse(final HttpOpParam.Op op,
final HttpURLConnection conn, boolean unwrapException) throws IOException {
final int code = conn.getResponseCode();
if (code != op.getExpectedHttpResponseCode()) {
final Map<?, ?> m;
try {
m = jsonParse(conn, true);
} catch(Exception e) {
throw new IOException("Unexpected HTTP response: code=" + code + " != "
+ op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
+ ", message=" + conn.getResponseMessage(), e);
}
if (m == null) {
throw new IOException("Unexpected HTTP response: code=" + code + " != "
+ op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
+ ", message=" + conn.getResponseMessage());
} else if (m.get(RemoteException.class.getSimpleName()) == null) {
return m;
}
final RemoteException re = JsonUtil.toRemoteException(m);
throw unwrapException? toIOException(re): re;
}
return null;
}
示例2: getAuthParameters
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
Param<?,?>[] getAuthParameters(final HttpOpParam.Op op) throws IOException {
List<Param<?,?>> authParams = Lists.newArrayList();
// Skip adding delegation token for token operations because these
// operations require authentication.
Token<?> token = null;
if (UserGroupInformation.isSecurityEnabled() && !op.getRequireAuth()) {
token = getDelegationToken();
}
if (token != null) {
authParams.add(new DelegationParam(token.encodeToUrlString()));
} else {
UserGroupInformation userUgi = ugi;
UserGroupInformation realUgi = userUgi.getRealUser();
if (realUgi != null) { // proxy user
authParams.add(new DoAsParam(userUgi.getShortUserName()));
userUgi = realUgi;
}
authParams.add(new UserParam(userUgi.getShortUserName()));
}
return authParams.toArray(new Param<?,?>[0]);
}
示例3: listStatus
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public FileStatus[] listStatus(final Path f) throws IOException {
statistics.incrementReadOps(1);
final HttpOpParam.Op op = GetOpParam.Op.LISTSTATUS;
final Map<?, ?> json = run(op, f);
final Map<?, ?> rootmap = (Map<?, ?>)json.get(FileStatus.class.getSimpleName() + "es");
final Object[] array = (Object[])rootmap.get(FileStatus.class.getSimpleName());
//convert FileStatus
final FileStatus[] statuses = new FileStatus[array.length];
for(int i = 0; i < array.length; i++) {
final Map<?, ?> m = (Map<?, ?>)array[i];
statuses[i] = makeQualified(JsonUtil.toFileStatus(m, false), f);
}
return statuses;
}
示例4: write
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
FSDataOutputStream write(final HttpOpParam.Op op,
final HttpURLConnection conn, final int bufferSize) throws IOException {
return new FSDataOutputStream(
new BufferedOutputStream(conn.getOutputStream(), bufferSize),
statistics) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
try {
validateResponse(op, conn, true);
} finally {
conn.disconnect();
}
}
}
};
}
示例5: toUrl
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
URL toUrl(final HttpOpParam.Op op, final Path fspath,
final Param<?,?>... parameters) throws IOException {
//initialize URI path and query
final String path = PATH_PREFIX
+ (fspath == null? "/": makeQualified(fspath).toUri().getPath());
final String query = op.toQueryString()
+ '&' + new UserParam(ugi)
+ Param.toSortedString("&", parameters);
final URL url;
if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN)
|| op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) {
// Skip adding delegation token for getting or renewing delegation token,
// because these operations require kerberos authentication.
url = getNamenodeURL(path, query);
} else {
url = getNamenodeURL(path, addDt2Query(query));
}
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
return url;
}
示例6: create
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
final boolean overwrite, final int bufferSize, final short replication,
final long blockSize, final Progressable progress) throws IOException {
statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PutOpParam.Op.CREATE;
return new Runner(op, f,
new PermissionParam(applyUMask(permission)),
new OverwriteParam(overwrite),
new BufferSizeParam(bufferSize),
new ReplicationParam(replication),
new BlockSizeParam(blockSize))
.run()
.write(bufferSize);
}
示例7: toUrl
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
public static URL toUrl(final WebHdfsFileSystem webhdfs,
final HttpOpParam.Op op, final Path fspath,
final Param<?,?>... parameters) throws IOException {
final URL url = webhdfs.toUrl(op, fspath, parameters);
WebHdfsTestUtil.LOG.info("url=" + url);
return url;
}
示例8: testJsonParseClosesInputStream
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Test
public void testJsonParseClosesInputStream() throws Exception {
final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fileSystem;
Path file = getTestRootPath(fSys, "test/hadoop/file");
createFile(file);
final HttpOpParam.Op op = GetOpParam.Op.GETHOMEDIRECTORY;
final URL url = webhdfs.toUrl(op, file);
final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(op.getType().toString());
conn.connect();
InputStream myIn = new InputStream(){
private HttpURLConnection localConn = conn;
@Override
public void close() throws IOException {
closedInputStream = true;
localConn.getInputStream().close();
}
@Override
public int read() throws IOException {
return localConn.getInputStream().read();
}
};
final HttpURLConnection spyConn = spy(conn);
doReturn(myIn).when(spyConn).getInputStream();
try {
Assert.assertFalse(closedInputStream);
WebHdfsFileSystem.jsonParse(spyConn, false);
Assert.assertTrue(closedInputStream);
} catch(IOException ioe) {
junit.framework.TestCase.fail();
}
conn.disconnect();
}
示例9: toUrl
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
URL toUrl(final HttpOpParam.Op op, final Path fspath,
final Param<?,?>... parameters) throws IOException {
//initialize URI path and query
final String path = PATH_PREFIX
+ (fspath == null? "/": makeQualified(fspath).toUri().getRawPath());
final String query = op.toQueryString()
+ Param.toSortedString("&", getAuthParameters(op))
+ Param.toSortedString("&", parameters);
final URL url = getNamenodeURL(path, query);
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
return url;
}
示例10: getDelegationToken
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public Token<DelegationTokenIdentifier> getDelegationToken(
final String renewer) throws IOException {
final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
final Map<?, ?> m = run(op, null, new RenewerParam(renewer));
final Token<DelegationTokenIdentifier> token =
JsonUtil.toDelegationToken(m);
SecurityUtil.setTokenService(token, nnAddr);
return token;
}
示例11: append
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public FSDataOutputStream append(final Path f, final int bufferSize,
final Progressable progress) throws IOException {
statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PostOpParam.Op.APPEND;
return new Runner(op, f, new BufferSizeParam(bufferSize))
.run()
.write(bufferSize);
}
示例12: cancelDelegationToken
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
private synchronized void cancelDelegationToken(final Token<?> token
) throws IOException {
final HttpOpParam.Op op = PutOpParam.Op.CANCELDELEGATIONTOKEN;
TokenArgumentParam dtargParam = new TokenArgumentParam(
token.encodeToUrlString());
run(op, null, dtargParam);
}
示例13: getContentSummary
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public ContentSummary getContentSummary(final Path p) throws IOException {
statistics.incrementReadOps(1);
final HttpOpParam.Op op = GetOpParam.Op.GETCONTENTSUMMARY;
final Map<?, ?> m = run(op, p);
return JsonUtil.toContentSummary(m);
}
示例14: getFileChecksum
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public MD5MD5CRC32FileChecksum getFileChecksum(final Path p
) throws IOException {
statistics.incrementReadOps(1);
final HttpOpParam.Op op = GetOpParam.Op.GETFILECHECKSUM;
final Map<?, ?> m = run(op, p);
return JsonUtil.toMD5MD5CRC32FileChecksum(m);
}
示例15: rename
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
@Override
public void rename(final Path src, final Path dst,
final Options.Rename... options) throws IOException {
statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PutOpParam.Op.RENAME;
run(op, src, new DestinationParam(makeQualified(dst).toUri().getPath()),
new RenameOptionSetParam(options));
}