当前位置: 首页>>代码示例>>Java>>正文


Java HttpOpParam.Op方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op方法的典型用法代码示例。如果您正苦于以下问题:Java HttpOpParam.Op方法的具体用法?Java HttpOpParam.Op怎么用?Java HttpOpParam.Op使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.web.resources.HttpOpParam的用法示例。


在下文中一共展示了HttpOpParam.Op方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: validateResponse

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
private static Map<?, ?> validateResponse(final HttpOpParam.Op op,
    final HttpURLConnection conn, boolean unwrapException) throws IOException {
  final int code = conn.getResponseCode();
  if (code != op.getExpectedHttpResponseCode()) {
    final Map<?, ?> m;
    try {
      m = jsonParse(conn, true);
    } catch(Exception e) {
      throw new IOException("Unexpected HTTP response: code=" + code + " != "
          + op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
          + ", message=" + conn.getResponseMessage(), e);
    }

    if (m == null) {
      throw new IOException("Unexpected HTTP response: code=" + code + " != "
          + op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
          + ", message=" + conn.getResponseMessage());
    } else if (m.get(RemoteException.class.getSimpleName()) == null) {
      return m;
    }

    final RemoteException re = JsonUtil.toRemoteException(m);
    throw unwrapException? toIOException(re): re;
  }
  return null;
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:27,代码来源:WebHdfsFileSystem.java

示例2: getAuthParameters

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
Param<?,?>[] getAuthParameters(final HttpOpParam.Op op) throws IOException {
  List<Param<?,?>> authParams = Lists.newArrayList();    
  // Skip adding delegation token for token operations because these
  // operations require authentication.
  Token<?> token = null;
  if (UserGroupInformation.isSecurityEnabled() && !op.getRequireAuth()) {
    token = getDelegationToken();
  }
  if (token != null) {
    authParams.add(new DelegationParam(token.encodeToUrlString()));
  } else {
    UserGroupInformation userUgi = ugi;
    UserGroupInformation realUgi = userUgi.getRealUser();
    if (realUgi != null) { // proxy user
      authParams.add(new DoAsParam(userUgi.getShortUserName()));
      userUgi = realUgi;
    }
    authParams.add(new UserParam(userUgi.getShortUserName()));
  }
  return authParams.toArray(new Param<?,?>[0]);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:22,代码来源:WebHdfsFileSystem.java

示例3: listStatus

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public FileStatus[] listStatus(final Path f) throws IOException {
  statistics.incrementReadOps(1);

  final HttpOpParam.Op op = GetOpParam.Op.LISTSTATUS;
  final Map<?, ?> json  = run(op, f);
  final Map<?, ?> rootmap = (Map<?, ?>)json.get(FileStatus.class.getSimpleName() + "es");
  final Object[] array = (Object[])rootmap.get(FileStatus.class.getSimpleName());

  //convert FileStatus
  final FileStatus[] statuses = new FileStatus[array.length];
  for(int i = 0; i < array.length; i++) {
    final Map<?, ?> m = (Map<?, ?>)array[i];
    statuses[i] = makeQualified(JsonUtil.toFileStatus(m, false), f);
  }
  return statuses;
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:18,代码来源:WebHdfsFileSystem.java

示例4: write

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
FSDataOutputStream write(final HttpOpParam.Op op,
    final HttpURLConnection conn, final int bufferSize) throws IOException {
  return new FSDataOutputStream(
      new BufferedOutputStream(conn.getOutputStream(), bufferSize),
      statistics) {
    @Override
    public void close() throws IOException {
      try {
        super.close();
      } finally {
        try {
          validateResponse(op, conn, true);
        } finally {
          conn.disconnect();
        }
      }
    }
  };
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:20,代码来源:WebHdfsFileSystem.java

示例5: toUrl

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
  final String query = op.toQueryString()
      + '&' + new UserParam(ugi)
      + Param.toSortedString("&", parameters);
  final URL url;
  if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN)
      || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) {
    // Skip adding delegation token for getting or renewing delegation token,
    // because these operations require kerberos authentication.
    url = getNamenodeURL(path, query);
  } else {
    url = getNamenodeURL(path, addDt2Query(query));
  }
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:23,代码来源:WebHdfsFileSystem.java

示例6: create

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
 
开发者ID:huiyi-learning,项目名称:hardfs,代码行数:17,代码来源:WebHdfsFileSystem.java

示例7: toUrl

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
public static URL toUrl(final WebHdfsFileSystem webhdfs,
    final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  final URL url = webhdfs.toUrl(op, fspath, parameters);
  WebHdfsTestUtil.LOG.info("url=" + url);
  return url;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:8,代码来源:WebHdfsTestUtil.java

示例8: testJsonParseClosesInputStream

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Test
public void testJsonParseClosesInputStream() throws Exception {
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fileSystem;
  Path file = getTestRootPath(fSys, "test/hadoop/file");
  createFile(file);
  final HttpOpParam.Op op = GetOpParam.Op.GETHOMEDIRECTORY;
  final URL url = webhdfs.toUrl(op, file);
  final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();

  InputStream myIn = new InputStream(){
    private HttpURLConnection localConn = conn;
    @Override
    public void close() throws IOException {
      closedInputStream = true;
      localConn.getInputStream().close();
    }
    @Override
    public int read() throws IOException {
      return localConn.getInputStream().read();
    }
  };
  final HttpURLConnection spyConn = spy(conn);
  doReturn(myIn).when(spyConn).getInputStream();

  try {
    Assert.assertFalse(closedInputStream);
    WebHdfsFileSystem.jsonParse(spyConn, false);
    Assert.assertTrue(closedInputStream);
  } catch(IOException ioe) {
    junit.framework.TestCase.fail();
  }
  conn.disconnect();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:36,代码来源:TestFSMainOperationsWebHdfs.java

示例9: toUrl

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getRawPath());
  final String query = op.toQueryString()
      + Param.toSortedString("&", getAuthParameters(op))
      + Param.toSortedString("&", parameters);
  final URL url = getNamenodeURL(path, query);
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:15,代码来源:WebHdfsFileSystem.java

示例10: getDelegationToken

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public Token<DelegationTokenIdentifier> getDelegationToken(
    final String renewer) throws IOException {
  final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
  final Map<?, ?> m = run(op, null, new RenewerParam(renewer));
  final Token<DelegationTokenIdentifier> token =
      JsonUtil.toDelegationToken(m);
  SecurityUtil.setTokenService(token, nnAddr);
  return token;
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:11,代码来源:WebHdfsFileSystem.java

示例11: append

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public FSDataOutputStream append(final Path f, final int bufferSize,
    final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PostOpParam.Op.APPEND;
  return new Runner(op, f, new BufferSizeParam(bufferSize))
    .run()
    .write(bufferSize);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:11,代码来源:WebHdfsFileSystem.java

示例12: cancelDelegationToken

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
private synchronized void cancelDelegationToken(final Token<?> token
    ) throws IOException {
  final HttpOpParam.Op op = PutOpParam.Op.CANCELDELEGATIONTOKEN;
  TokenArgumentParam dtargParam = new TokenArgumentParam(
      token.encodeToUrlString());
  run(op, null, dtargParam);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:8,代码来源:WebHdfsFileSystem.java

示例13: getContentSummary

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public ContentSummary getContentSummary(final Path p) throws IOException {
  statistics.incrementReadOps(1);

  final HttpOpParam.Op op = GetOpParam.Op.GETCONTENTSUMMARY;
  final Map<?, ?> m = run(op, p);
  return JsonUtil.toContentSummary(m);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:9,代码来源:WebHdfsFileSystem.java

示例14: getFileChecksum

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@Override
public MD5MD5CRC32FileChecksum getFileChecksum(final Path p
    ) throws IOException {
  statistics.incrementReadOps(1);

  final HttpOpParam.Op op = GetOpParam.Op.GETFILECHECKSUM;
  final Map<?, ?> m = run(op, p);
  return JsonUtil.toMD5MD5CRC32FileChecksum(m);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:10,代码来源:WebHdfsFileSystem.java

示例15: rename

import org.apache.hadoop.hdfs.web.resources.HttpOpParam; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
@Override
public void rename(final Path src, final Path dst,
    final Options.Rename... options) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.RENAME;
  run(op, src, new DestinationParam(makeQualified(dst).toUri().getPath()),
      new RenameOptionSetParam(options));
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:10,代码来源:WebHdfsFileSystem.java


注:本文中的org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。