当前位置: 首页>>代码示例>>Java>>正文


Java Param类代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.web.resources.Param的典型用法代码示例。如果您正苦于以下问题:Java Param类的具体用法?Java Param怎么用?Java Param使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Param类属于org.apache.hadoop.hdfs.web.resources包,在下文中一共展示了Param类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: initWebHdfs

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void initWebHdfs(Configuration conf) throws IOException {
  if (WebHdfsFileSystem.isEnabled(conf, HttpServer2.LOG)) {
    // set user pattern based on configuration file
    UserParam.setUserPattern(conf.get(
        DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY,
        DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));

    // add authentication filter for webhdfs
    final String className = conf.get(
        DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY,
        DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_DEFAULT);
    final String name = className;

    final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
    Map<String, String> params = getAuthFilterParams(conf);
    HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
        params, new String[] { pathSpec });
    HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
        + ")");

    // add webhdfs packages
    httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class
        .getPackage().getName() + ";" + Param.class.getPackage().getName(),
        pathSpec);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:NameNodeHttpServer.java

示例2: initWebHdfs

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void initWebHdfs(Configuration conf) throws IOException {
  // set user pattern based on configuration file
  UserParam.setUserPattern(conf.get(
      HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY,
      HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));

  // add authentication filter for webhdfs
  final String className = conf.get(
      DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY,
      DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_DEFAULT);
  final String name = className;

  final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
  Map<String, String> params = getAuthFilterParams(conf);
  HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
      params, new String[] { pathSpec });
  HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
      + ")");

  // add webhdfs packages
  httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class
      .getPackage().getName() + ";" + Param.class.getPackage().getName(),
      pathSpec);
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:25,代码来源:NameNodeHttpServer.java

示例3: init

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void init(final UserGroupInformation ugi,
    final DelegationParam delegation, final String nnId,
    final UriFsPathParam path, final HttpOpParam<?> op,
    final Param<?, ?>... parameters) throws IOException {
  if (LOG.isTraceEnabled()) {
    LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
        + ", ugi=" + ugi + Param.toSortedString(", ", parameters));
  }
  if (nnId == null) {
    throw new IllegalArgumentException(NamenodeAddressParam.NAME
        + " is not specified.");
  }

  //clear content type
  response.setContentType(null);
  
  if (UserGroupInformation.isSecurityEnabled()) {
    //add a token for RPC.
    final Token<DelegationTokenIdentifier> token = deserializeToken
            (delegation.getValue(), nnId);
    ugi.addToken(token);
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:24,代码来源:DatanodeWebHdfsMethods.java

示例4: getAuthParameters

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
Param<?,?>[] getAuthParameters(final HttpOpParam.Op op) throws IOException {
  List<Param<?,?>> authParams = Lists.newArrayList();    
  // Skip adding delegation token for token operations because these
  // operations require authentication.
  Token<?> token = null;
  if (UserGroupInformation.isSecurityEnabled() && !op.getRequireAuth()) {
    token = getDelegationToken();
  }
  if (token != null) {
    authParams.add(new DelegationParam(token.encodeToUrlString()));
  } else {
    UserGroupInformation userUgi = ugi;
    UserGroupInformation realUgi = userUgi.getRealUser();
    if (realUgi != null) { // proxy user
      authParams.add(new DoAsParam(userUgi.getShortUserName()));
      userUgi = realUgi;
    }
    authParams.add(new UserParam(userUgi.getShortUserName()));
  }
  return authParams.toArray(new Param<?,?>[0]);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:22,代码来源:WebHdfsFileSystem.java

示例5: init

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void init(final UserGroupInformation ugi,
    final DelegationParam delegation, final InetSocketAddress nnRpcAddr,
    final UriFsPathParam path, final HttpOpParam<?> op,
    final Param<?, ?>... parameters) throws IOException {
  if (LOG.isTraceEnabled()) {
    LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
        + ", ugi=" + ugi + Param.toSortedString(", ", parameters));
  }
  if (nnRpcAddr == null) {
    throw new IllegalArgumentException(NamenodeRpcAddressParam.NAME
        + " is not specified.");
  }

  //clear content type
  response.setContentType(null);
  
  if (UserGroupInformation.isSecurityEnabled()) {
    //add a token for RPC.
    final Token<DelegationTokenIdentifier> token = 
        new Token<DelegationTokenIdentifier>();
    token.decodeFromUrlString(delegation.getValue());
    SecurityUtil.setTokenService(token, nnRpcAddr);
    token.setKind(DelegationTokenIdentifier.HDFS_DELEGATION_KIND);
    ugi.addToken(token);
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:27,代码来源:DatanodeWebHdfsMethods.java

示例6: init

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void init(final UserGroupInformation ugi,
    final DelegationParam delegation, final InetSocketAddress nnRpcAddr,
    final UriFsPathParam path, final HttpOpParam<?> op,
    final Param<?, ?>... parameters) throws IOException {
  if (LOG.isTraceEnabled()) {
    LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path +
        ", ugi=" + ugi + Param.toSortedString(", ", parameters));
  }
  if (nnRpcAddr == null) {
    throw new IllegalArgumentException(
        NamenodeRpcAddressParam.NAME + " is not specified.");
  }

  //clear content type
  response.setContentType(null);
  
  if (UserGroupInformation.isSecurityEnabled()) {
    //add a token for RPC.
    final Token<DelegationTokenIdentifier> token =
        new Token<>();
    token.decodeFromUrlString(delegation.getValue());
    SecurityUtil.setTokenService(token, nnRpcAddr);
    token.setKind(DelegationTokenIdentifier.HDFS_DELEGATION_KIND);
    ugi.addToken(token);
  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:27,代码来源:DatanodeWebHdfsMethods.java

示例7: toUrl

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
  final String query = op.toQueryString()
      + '&' + new UserParam(ugi)
      + Param.toSortedString("&", parameters);
  final URL url;
  if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN)
      || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) {
    // Skip adding delegation token for getting or renewing delegation token,
    // because these operations require kerberos authentication.
    url = getNamenodeURL(path, query);
  } else {
    url = getNamenodeURL(path, addDt2Query(query));
  }
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:23,代码来源:WebHdfsFileSystem.java

示例8: init

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void init(final UserGroupInformation ugi, final DelegationParam delegation,
    final UriFsPathParam path, final HttpOpParam<?> op,
    final Param<?, ?>... parameters) throws IOException {
  if (LOG.isTraceEnabled()) {
    LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
        + ", ugi=" + ugi + Param.toSortedString(", ", parameters));
  }

  //clear content type
  response.setContentType(null);
  
  if (UserGroupInformation.isSecurityEnabled()) {
    //add a token for RPC.
    final DataNode datanode = (DataNode)context.getAttribute("datanode");
    final InetSocketAddress nnRpcAddr = NameNode.getAddress(datanode.getConf());
    final Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>();
    token.decodeFromUrlString(delegation.getValue());
    SecurityUtil.setTokenService(token, nnRpcAddr);
    token.setKind(DelegationTokenIdentifier.HDFS_DELEGATION_KIND);
    ugi.addToken(token);
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:23,代码来源:DatanodeWebHdfsMethods.java

示例9: toUrl

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
public static URL toUrl(final WebHdfsFileSystem webhdfs,
    final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  final URL url = webhdfs.toUrl(op, fspath, parameters);
  WebHdfsTestUtil.LOG.info("url=" + url);
  return url;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:8,代码来源:WebHdfsTestUtil.java

示例10: testWebHdfsOffsetAndLength

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
@Test
public void testWebHdfsOffsetAndLength() throws Exception{
  MiniDFSCluster cluster = null;
  final Configuration conf = WebHdfsTestUtil.createConf();
  final int OFFSET = 42;
  final int LENGTH = 512;
  final String PATH = "/foo";
  byte[] CONTENTS = new byte[1024];
  RANDOM.nextBytes(CONTENTS);
  try {
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
    final WebHdfsFileSystem fs =
        WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME);
    try (OutputStream os = fs.create(new Path(PATH))) {
      os.write(CONTENTS);
    }
    InetSocketAddress addr = cluster.getNameNode().getHttpAddress();
    URL url = new URL("http", addr.getHostString(), addr
        .getPort(), WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" +
        Param.toSortedString("&", new OffsetParam((long) OFFSET),
                             new LengthParam((long) LENGTH))
    );
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    conn.setInstanceFollowRedirects(true);
    Assert.assertEquals(LENGTH, conn.getContentLength());
    byte[] subContents = new byte[LENGTH];
    byte[] realContents = new byte[LENGTH];
    System.arraycopy(CONTENTS, OFFSET, subContents, 0, LENGTH);
    IOUtils.readFully(conn.getInputStream(), realContents);
    Assert.assertArrayEquals(subContents, realContents);
  } finally {
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:37,代码来源:TestWebHDFS.java

示例11: testWebHdfsOffsetAndLength

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
@Test
public void testWebHdfsOffsetAndLength() throws Exception{
  MiniDFSCluster cluster = null;
  final Configuration conf = WebHdfsTestUtil.createConf();
  final int OFFSET = 42;
  final int LENGTH = 512;
  final String PATH = "/foo";
  byte[] CONTENTS = new byte[1024];
  RANDOM.nextBytes(CONTENTS);
  try {
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
    final WebHdfsFileSystem fs =
        WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
    try (OutputStream os = fs.create(new Path(PATH))) {
      os.write(CONTENTS);
    }
    InetSocketAddress addr = cluster.getNameNode().getHttpAddress();
    URL url = new URL("http", addr.getHostString(), addr
        .getPort(), WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" +
        Param.toSortedString("&", new OffsetParam((long) OFFSET),
                             new LengthParam((long) LENGTH))
    );
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    conn.setInstanceFollowRedirects(true);
    Assert.assertEquals(LENGTH, conn.getContentLength());
    byte[] subContents = new byte[LENGTH];
    byte[] realContents = new byte[LENGTH];
    System.arraycopy(CONTENTS, OFFSET, subContents, 0, LENGTH);
    IOUtils.readFully(conn.getInputStream(), realContents);
    Assert.assertArrayEquals(subContents, realContents);
  } finally {
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:37,代码来源:TestWebHDFS.java

示例12: toUrl

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getRawPath());
  final String query = op.toQueryString()
      + Param.toSortedString("&", getAuthParameters(op))
      + Param.toSortedString("&", parameters);
  final URL url = getNamenodeURL(path, query);
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:15,代码来源:WebHdfsFileSystem.java

示例13: init

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private void init(final UserGroupInformation ugi,
    final DelegationParam delegation,
    final UserParam username, final DoAsParam doAsUser,
    final UriFsPathParam path, final HttpOpParam<?> op,
    final Param<?, ?>... parameters) {
  if (LOG.isTraceEnabled()) {
    LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
        + ", ugi=" + ugi + ", " + username + ", " + doAsUser
        + Param.toSortedString(", ", parameters));
  }

  //clear content type
  response.setContentType(null);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:15,代码来源:NamenodeWebHdfsMethods.java

示例14: redirectURI

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
private URI redirectURI(final NameNode namenode,
    final UserGroupInformation ugi, final DelegationParam delegation,
    final UserParam username, final DoAsParam doAsUser,
    final String path, final HttpOpParam.Op op, final long openOffset,
    final long blocksize,
    final Param<?, ?>... parameters) throws URISyntaxException, IOException {
  final Configuration conf = (Configuration)context.getAttribute(JspHelper.CURRENT_CONF);
  final DatanodeInfo dn = chooseDatanode(namenode, path, op, openOffset,
      blocksize, conf);

  final String delegationQuery;
  if (!UserGroupInformation.isSecurityEnabled()) {
    //security disabled
    delegationQuery = Param.toSortedString("&", doAsUser, username);
  } else if (delegation.getValue() != null) {
    //client has provided a token
    delegationQuery = "&" + delegation;
  } else {
    //generate a token
    final Token<? extends TokenIdentifier> t = generateDelegationToken(
        namenode, ugi, request.getUserPrincipal().getName());
    delegationQuery = "&" + new DelegationParam(t.encodeToUrlString());
  }
  final String query = op.toQueryString() + delegationQuery
      + "&" + new NamenodeRpcAddressParam(namenode)
      + Param.toSortedString("&", parameters);
  final String uripath = WebHdfsFileSystem.PATH_PREFIX + path;

  final URI uri = new URI("http", null, dn.getHostName(), dn.getInfoPort(),
      uripath, query, null);
  if (LOG.isTraceEnabled()) {
    LOG.trace("redirectURI=" + uri);
  }
  return uri;
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:36,代码来源:NamenodeWebHdfsMethods.java

示例15: getAuthParameters

import org.apache.hadoop.hdfs.web.resources.Param; //导入依赖的package包/类
Param<?, ?>[] getAuthParameters(final HttpOpParam.Op op) throws IOException {
  List<Param<?, ?>> authParams = Lists.newArrayList();
  // Skip adding delegation token for token operations because these
  // operations require authentication.
  boolean hasToken = false;
  if (UserGroupInformation.isSecurityEnabled() &&
      op != GetOpParam.Op.GETDELEGATIONTOKEN &&
      op != PutOpParam.Op.RENEWDELEGATIONTOKEN) {
    synchronized (this) {
      hasToken = (delegationToken != null);
      if (hasToken) {
        final String encoded = delegationToken.encodeToUrlString();
        authParams.add(new DelegationParam(encoded));
      } // else we are talking to an insecure cluster
    }
  }
  UserGroupInformation userUgi = ugi;
  if (!hasToken) {
    UserGroupInformation realUgi = userUgi.getRealUser();
    if (realUgi != null) { // proxy user
      authParams.add(new DoAsParam(userUgi.getShortUserName()));
      userUgi = realUgi;
    }
  }
  authParams.add(new UserParam(userUgi.getShortUserName()));
  return authParams.toArray(new Param<?, ?>[0]);
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:28,代码来源:WebHdfsFileSystem.java


注:本文中的org.apache.hadoop.hdfs.web.resources.Param类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。