当前位置: 首页>>代码示例>>Java>>正文


Java PostOpParam.getValue方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.web.resources.PostOpParam.getValue方法的典型用法代码示例。如果您正苦于以下问题:Java PostOpParam.getValue方法的具体用法?Java PostOpParam.getValue怎么用?Java PostOpParam.getValue使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.web.resources.PostOpParam的用法示例。


在下文中一共展示了PostOpParam.getValue方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: post

import org.apache.hadoop.hdfs.web.resources.PostOpParam; //导入方法依赖的package包/类
private Response post(final UserGroupInformation ugi,
    final DelegationParam delegation, final UserParam username,
    final DoAsParam doAsUser, final String fullpath, final PostOpParam op,
    final ConcatSourcesParam concatSrcs, final BufferSizeParam bufferSize)
    throws IOException, URISyntaxException {
  final NameNode namenode = (NameNode) context.getAttribute("name.node");

  switch (op.getValue()) {
    case APPEND: {
      final URI uri =
          redirectURI(namenode, ugi, delegation, username, doAsUser, fullpath,
              op.getValue(), -1L, -1L, bufferSize);
      return Response.temporaryRedirect(uri)
          .type(MediaType.APPLICATION_OCTET_STREAM).build();
    }
    case CONCAT: {
      namenode.getRpcServer().concat(fullpath, concatSrcs.getAbsolutePaths());
      return Response.ok().build();
    }
    default:
      throw new UnsupportedOperationException(op + " is not supported");
  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:24,代码来源:NamenodeWebHdfsMethods.java

示例2: post

import org.apache.hadoop.hdfs.web.resources.PostOpParam; //导入方法依赖的package包/类
private Response post(
    final InputStream in,
    final String nnId,
    final String fullpath,
    final PostOpParam op,
    final BufferSizeParam bufferSize
    ) throws IOException {
  final DataNode datanode = (DataNode)context.getAttribute("datanode");

  switch(op.getValue()) {
  case APPEND:
  {
    final Configuration conf = new Configuration(datanode.getConf());
    final int b = bufferSize.getValue(conf);
    DFSClient dfsclient = newDfsClient(nnId, conf);
    FSDataOutputStream out = null;
    try {
      out = dfsclient.append(fullpath, b, null, null);
      IOUtils.copyBytes(in, out, b);
      out.close();
      out = null;
      dfsclient.close();
      dfsclient = null;
    } finally {
      IOUtils.cleanup(LOG, out);
      IOUtils.cleanup(LOG, dfsclient);
    }
    return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build();
  }
  default:
    throw new UnsupportedOperationException(op + " is not supported");
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:34,代码来源:DatanodeWebHdfsMethods.java

示例3: post

import org.apache.hadoop.hdfs.web.resources.PostOpParam; //导入方法依赖的package包/类
private Response post(
    final UserGroupInformation ugi,
    final DelegationParam delegation,
    final UserParam username,
    final DoAsParam doAsUser,
    final String fullpath,
    final PostOpParam op,
    final ConcatSourcesParam concatSrcs,
    final BufferSizeParam bufferSize
    ) throws IOException, URISyntaxException {
  final NameNode namenode = (NameNode)context.getAttribute("name.node");

  switch(op.getValue()) {
  case APPEND:
  {
    final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser,
        fullpath, op.getValue(), -1L, -1L, bufferSize);
    return Response.temporaryRedirect(uri).type(MediaType.APPLICATION_OCTET_STREAM).build();
  }
  case CONCAT:
  {
    namenode.getRpcServer().concat(fullpath, concatSrcs.getAbsolutePaths());
    return Response.ok().build();
  }
  default:
    throw new UnsupportedOperationException(op + " is not supported");
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:29,代码来源:NamenodeWebHdfsMethods.java

示例4: post

import org.apache.hadoop.hdfs.web.resources.PostOpParam; //导入方法依赖的package包/类
private Response post(
    final InputStream in,
    final UserGroupInformation ugi,
    final DelegationParam delegation,
    final InetSocketAddress nnRpcAddr,
    final String fullpath,
    final PostOpParam op,
    final BufferSizeParam bufferSize
    ) throws IOException {
  final DataNode datanode = (DataNode)context.getAttribute("datanode");

  switch(op.getValue()) {
  case APPEND:
  {
    final Configuration conf = new Configuration(datanode.getConf());
    final int b = bufferSize.getValue(conf);
    DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
    FSDataOutputStream out = null;
    try {
      out = dfsclient.append(fullpath, b, null, null);
      IOUtils.copyBytes(in, out, b);
      out.close();
      out = null;
      dfsclient.close();
      dfsclient = null;
    } finally {
      IOUtils.cleanup(LOG, out);
      IOUtils.cleanup(LOG, dfsclient);
    }
    return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build();
  }
  default:
    throw new UnsupportedOperationException(op + " is not supported");
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:36,代码来源:DatanodeWebHdfsMethods.java

示例5: post

import org.apache.hadoop.hdfs.web.resources.PostOpParam; //导入方法依赖的package包/类
private Response post(final InputStream in, final UserGroupInformation ugi,
    final DelegationParam delegation, final InetSocketAddress nnRpcAddr,
    final String fullpath, final PostOpParam op,
    final BufferSizeParam bufferSize) throws IOException {
  final DataNode datanode = (DataNode) context.getAttribute("datanode");

  switch (op.getValue()) {
    case APPEND: {
      final Configuration conf = new Configuration(datanode.getConf());
      final int b = bufferSize.getValue(conf);
      DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
      FSDataOutputStream out = null;
      try {
        out = dfsclient.append(fullpath, b, null, null);
        IOUtils.copyBytes(in, out, b);
        out.close();
        out = null;
        dfsclient.close();
        dfsclient = null;
      } finally {
        IOUtils.cleanup(LOG, out);
        IOUtils.cleanup(LOG, dfsclient);
      }
      return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build();
    }
    default:
      throw new UnsupportedOperationException(op + " is not supported");
  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:30,代码来源:DatanodeWebHdfsMethods.java

示例6: post

import org.apache.hadoop.hdfs.web.resources.PostOpParam; //导入方法依赖的package包/类
private Response post(
    final UserGroupInformation ugi,
    final DelegationParam delegation,
    final UserParam username,
    final DoAsParam doAsUser,
    final String fullpath,
    final PostOpParam op,
    final ConcatSourcesParam concatSrcs,
    final BufferSizeParam bufferSize
    ) throws IOException, URISyntaxException {
  final NameNode namenode = (NameNode)context.getAttribute("name.node");

  switch(op.getValue()) {
  case APPEND:
  {
    final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser,
        fullpath, op.getValue(), -1L, -1L, bufferSize);
    return Response.temporaryRedirect(uri).type(MediaType.APPLICATION_OCTET_STREAM).build();
  }
  case CONCAT:
  {
    getRPCServer(namenode).concat(fullpath, concatSrcs.getAbsolutePaths());
    return Response.ok().build();
  }
  default:
    throw new UnsupportedOperationException(op + " is not supported");
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:29,代码来源:NamenodeWebHdfsMethods.java


注:本文中的org.apache.hadoop.hdfs.web.resources.PostOpParam.getValue方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。