当前位置: 首页>>代码示例>>Java>>正文


Java WebHdfsTestUtil.getWebHdfsFileSystem方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.web.WebHdfsTestUtil.getWebHdfsFileSystem方法的典型用法代码示例。如果您正苦于以下问题:Java WebHdfsTestUtil.getWebHdfsFileSystem方法的具体用法?Java WebHdfsTestUtil.getWebHdfsFileSystem怎么用?Java WebHdfsTestUtil.getWebHdfsFileSystem使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.web.WebHdfsTestUtil的用法示例。


在下文中一共展示了WebHdfsTestUtil.getWebHdfsFileSystem方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testWriteReadUsingWebHdfs

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@Test
public void testWriteReadUsingWebHdfs() throws Exception {
  int fileLength = blockSize * dataBlocks + cellSize + 123;

  final byte[] expected = StripedFileTestUtil.generateBytes(fileLength);
  FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
      WebHdfsConstants.WEBHDFS_SCHEME);
  Path srcPath = new Path("/testWriteReadUsingWebHdfs");
  DFSTestUtil.writeFile(fs, srcPath, new String(expected));

  StripedFileTestUtil.verifyLength(fs, srcPath, fileLength);

  byte[] smallBuf = new byte[1024];
  byte[] largeBuf = new byte[fileLength + 100];
  // TODO: HDFS-8797
  //StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, largeBuf);

  StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf);
  StripedFileTestUtil.verifySeek(fs, srcPath, fileLength);
  StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf);
  // webhdfs doesn't support bytebuffer read
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:23,代码来源:TestWriteReadStripedFile.java

示例2: beforeClassSetup

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@BeforeClass
public static void beforeClassSetup() throws Exception {
  Configuration conf = new HdfsConfiguration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(FsPermission.UMASK_LABEL, "000");
  conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 0);
  cluster = new MiniDFSCluster.Builder(conf).build();
  webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME);
  dfs = cluster.getFileSystem();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:11,代码来源:TestSymlinkHdfs.java

示例3: beforeClassSetup

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@BeforeClass
public static void beforeClassSetup() throws Exception {
  Configuration conf = new HdfsConfiguration();
  conf.set(FsPermission.UMASK_LABEL, "000");
  conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 0);
  cluster = new MiniDFSCluster.Builder(conf).build();
  webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
  dfs = cluster.getFileSystem();
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:10,代码来源:TestSymlinkHdfs.java

示例4: beforeClassSetup

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@BeforeClass
public static void beforeClassSetup() throws Exception {
  Configuration conf = new HdfsConfiguration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(FsPermission.UMASK_LABEL, "000");
  cluster = new MiniDFSCluster.Builder(conf).build();
  webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf);
  dfs = cluster.getFileSystem();
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:10,代码来源:TestSymlinkHdfs.java

示例5: testSetUp

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@BeforeClass
public static void testSetUp() throws Exception {
  Configuration conf = new HdfsConfiguration();
  conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
  conf.set(FsPermission.UMASK_LABEL, "000");
  cluster = new MiniDFSCluster.Builder(conf).build();
  fc = FileContext.getFileContext(cluster.getURI(0));
  webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf);
  dfs = cluster.getFileSystem();
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:11,代码来源:TestFcHdfsSymlink.java

示例6: testReadWriteUsingWebHdfs

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@Test(timeout = 120000)
public void testReadWriteUsingWebHdfs() throws Exception {
  final HdfsAdmin dfsAdmin =
      new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
  final FileSystem webHdfsFs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
      WebHdfsFileSystem.SCHEME);

  final Path zone = new Path("/zone");
  fs.mkdirs(zone);
  dfsAdmin.createEncryptionZone(zone, TEST_KEY);

  /* Create an unencrypted file for comparison purposes. */
  final Path unencFile = new Path("/unenc");
  final int len = 8192;
  DFSTestUtil.createFile(webHdfsFs, unencFile, len, (short) 1, 0xFEED);

  /*
   * Create the same file via webhdfs, but this time encrypted. Compare it
   * using both webhdfs and DFS.
   */
  final Path encFile1 = new Path(zone, "myfile");
  DFSTestUtil.createFile(webHdfsFs, encFile1, len, (short) 1, 0xFEED);
  verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);
  verifyFilesEqual(fs, unencFile, encFile1, len);

  /*
   * Same thing except this time create the encrypted file using DFS.
   */
  final Path encFile2 = new Path(zone, "myfile2");
  DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);
  verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);
  verifyFilesEqual(fs, unencFile, encFile2, len);

  /* Verify appending to files works correctly. */
  appendOneByte(fs, unencFile);
  appendOneByte(webHdfsFs, encFile1);
  appendOneByte(fs, encFile2);
  verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);
  verifyFilesEqual(fs, unencFile, encFile1, len);
  verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);
  verifyFilesEqual(fs, unencFile, encFile2, len);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:43,代码来源:TestEncryptionZones.java

示例7: testReadWriteUsingWebHdfs

import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; //导入方法依赖的package包/类
@Test(timeout = 120000)
public void testReadWriteUsingWebHdfs() throws Exception {
  final HdfsAdmin dfsAdmin =
      new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
  final FileSystem webHdfsFs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
      WebHdfsConstants.WEBHDFS_SCHEME);

  final Path zone = new Path("/zone");
  fs.mkdirs(zone);
  dfsAdmin.createEncryptionZone(zone, TEST_KEY);

  /* Create an unencrypted file for comparison purposes. */
  final Path unencFile = new Path("/unenc");
  final int len = 8192;
  DFSTestUtil.createFile(webHdfsFs, unencFile, len, (short) 1, 0xFEED);

  /*
   * Create the same file via webhdfs, but this time encrypted. Compare it
   * using both webhdfs and DFS.
   */
  final Path encFile1 = new Path(zone, "myfile");
  DFSTestUtil.createFile(webHdfsFs, encFile1, len, (short) 1, 0xFEED);
  verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);
  verifyFilesEqual(fs, unencFile, encFile1, len);

  /*
   * Same thing except this time create the encrypted file using DFS.
   */
  final Path encFile2 = new Path(zone, "myfile2");
  DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);
  verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);
  verifyFilesEqual(fs, unencFile, encFile2, len);

  /* Verify appending to files works correctly. */
  appendOneByte(fs, unencFile);
  appendOneByte(webHdfsFs, encFile1);
  appendOneByte(fs, encFile2);
  verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);
  verifyFilesEqual(fs, unencFile, encFile1, len);
  verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);
  verifyFilesEqual(fs, unencFile, encFile2, len);
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:43,代码来源:TestEncryptionZones.java


注:本文中的org.apache.hadoop.hdfs.web.WebHdfsTestUtil.getWebHdfsFileSystem方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。