当前位置: 首页>>代码示例>>Java>>正文


Java PathIsNotEmptyDirectoryException类代码示例

本文整理汇总了Java中org.apache.hadoop.fs.PathIsNotEmptyDirectoryException的典型用法代码示例。如果您正苦于以下问题:Java PathIsNotEmptyDirectoryException类的具体用法?Java PathIsNotEmptyDirectoryException怎么用?Java PathIsNotEmptyDirectoryException使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


PathIsNotEmptyDirectoryException类属于org.apache.hadoop.fs包,在下文中一共展示了PathIsNotEmptyDirectoryException类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: delete

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
/**
 * Remove a file/directory from the namespace.
 * <p>
 * For large directories, deletion is incremental. The blocks under
 * the directory are collected and deleted a small number at a time holding
 * the {@link FSNamesystem} lock.
 * <p>
 * For small directory or file the deletion is done in one shot.
 *
 * @param fsn namespace
 * @param src path name to be deleted
 * @param recursive boolean true to apply to all sub-directories recursively
 * @param logRetryCache whether to record RPC ids in editlog for retry cache
 *          rebuilding
 * @return blocks collected from the deleted path
 * @throws IOException
 */
static BlocksMapUpdateInfo delete(
    FSNamesystem fsn, String src, boolean recursive, boolean logRetryCache)
    throws IOException {
  FSDirectory fsd = fsn.getFSDirectory();
  FSPermissionChecker pc = fsd.getPermissionChecker();
  byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);

  src = fsd.resolvePath(pc, src, pathComponents);
  final INodesInPath iip = fsd.getINodesInPath4Write(src, false);
  if (!recursive && fsd.isNonEmptyDirectory(iip)) {
    throw new PathIsNotEmptyDirectoryException(src + " is non empty");
  }
  if (fsd.isPermissionEnabled()) {
    fsd.checkPermission(pc, iip, false, null, FsAction.WRITE, null,
                        FsAction.ALL, true);
  }
  if (recursive && fsd.isNonEmptyDirectory(iip)) {
    checkProtectedDescendants(fsd, fsd.normalizePath(src));
  }

  return deleteInternal(fsn, src, iip, logRetryCache);
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:40,代码来源:FSDirDeleteOp.java

示例2: testChildDeletion

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
@Test
public void testChildDeletion() throws Throwable {
  ServiceRecord app = createRecord("app1",
      PersistencePolicies.APPLICATION, "app",
      null);
  ServiceRecord container = createRecord("container1",
      PersistencePolicies.CONTAINER, "container",
      null);

  operations.bind("/app", app, BindFlags.OVERWRITE);
  operations.bind("/app/container", container, BindFlags.OVERWRITE);

  try {
    int p = purge("/",
        "app1",
        PersistencePolicies.APPLICATION,
        RegistryAdminService.PurgePolicy.FailOnChildren);
    fail("expected a failure, got a purge count of " + p);
  } catch (PathIsNotEmptyDirectoryException expected) {
    // expected
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TestRegistryRMOperations.java

示例3: delete

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
/**
 * Remove a file/directory from the namespace.
 * <p>
 * For large directories, deletion is incremental. The blocks under
 * the directory are collected and deleted a small number at a time holding
 * the {@link FSNamesystem} lock.
 * <p>
 * For small directory or file the deletion is done in one shot.
 *
 */
static BlocksMapUpdateInfo delete(
    FSNamesystem fsn, String src, boolean recursive, boolean logRetryCache)
    throws IOException {
  FSDirectory fsd = fsn.getFSDirectory();
  FSPermissionChecker pc = fsd.getPermissionChecker();
  byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);

  src = fsd.resolvePath(pc, src, pathComponents);
  final INodesInPath iip = fsd.getINodesInPath4Write(src, false);
  if (!recursive && fsd.isNonEmptyDirectory(iip)) {
    throw new PathIsNotEmptyDirectoryException(src + " is non empty");
  }
  if (fsd.isPermissionEnabled()) {
    fsd.checkPermission(pc, iip, false, null, FsAction.WRITE, null,
                        FsAction.ALL, true);
  }

  return deleteInternal(fsn, src, iip, logRetryCache);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:30,代码来源:FSDirDeleteOp.java

示例4: testDeleteFailsIfNonRecursive

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
/** @throws Exception If failed. */
public void testDeleteFailsIfNonRecursive() throws Exception {
    Path fsHome = new Path(primaryFsUri);
    Path someDir3 = new Path(fsHome, "/someDir1/someDir2/someDir3");

    FSDataOutputStream os = fs.create(someDir3, EnumSet.noneOf(CreateFlag.class),
        Options.CreateOpts.perms(FsPermission.getDefault()));

    os.close();

    final Path someDir2 = new Path(fsHome, "/someDir1/someDir2");

    GridTestUtils.assertThrows(log, new Callable<Object>() {
        @Override public Object call() throws Exception {
            fs.delete(someDir2, false);

            return null;
        }
    }, PathIsNotEmptyDirectoryException.class, null);

    assertPathExists(fs, someDir2);
    assertPathExists(fs, someDir3);
}
 
开发者ID:apache,项目名称:ignite,代码行数:24,代码来源:HadoopIgfs20FileSystemAbstractSelfTest.java

示例5: processPath

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
@Override
protected void processPath(PathData item) throws IOException {
  if (!item.stat.isDirectory()) {
    throw new PathIsNotDirectoryException(item.toString());
  }      
  if (item.fs.listStatus(item.path).length == 0) {
    if (!item.fs.delete(item.path, false)) {
      throw new PathIOException(item.toString());
    }
  } else if (!ignoreNonEmpty) {
    throw new PathIsNotEmptyDirectoryException(item.toString());
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:14,代码来源:Delete.java

示例6: testDeleteNonEmpty

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
@Test
public void testDeleteNonEmpty() throws Throwable {
  putExampleServiceEntry(ENTRY_PATH, 0);
  try {
    operations.delete(PARENT_PATH, false);
    fail("Expected a failure");
  } catch (PathIsNotEmptyDirectoryException expected) {
    // expected; ignore
  }
  operations.delete(PARENT_PATH, true);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:12,代码来源:TestRegistryOperations.java

示例7: testRMNonRf

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
@Test
public void testRMNonRf() throws Throwable {
  mkPath("/rm", CreateMode.PERSISTENT);
  mkPath("/rm/child", CreateMode.PERSISTENT);
  try {
    curatorService.zkDelete("/rm", false, null);
    fail("expected a failure");
  } catch (PathIsNotEmptyDirectoryException expected) {

  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:12,代码来源:TestCuratorService.java

示例8: cast

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
/**
 * Cast IO exception to IGFS exception.
 *
 * @param msg Error message.
 * @param e IO exception.
 * @return IGFS exception.
 */
public static IgfsException cast(String msg, IOException e) {
    if (e instanceof FileNotFoundException)
        return new IgfsPathNotFoundException(e);
    else if (e instanceof ParentNotDirectoryException)
        return new IgfsParentNotDirectoryException(msg, e);
    else if (e instanceof PathIsNotEmptyDirectoryException)
        return new IgfsDirectoryNotEmptyException(e);
    else if (e instanceof PathExistsException)
        return new IgfsPathAlreadyExistsException(msg, e);
    else
        return new IgfsException(msg, e);
}
 
开发者ID:apache,项目名称:ignite,代码行数:20,代码来源:HadoopIgfsSecondaryFileSystemDelegateImpl.java

示例9: deleteInternal

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
/**
 * Remove a file/directory from the namespace.
 * <p>
 * For large directories, deletion is incremental. The blocks under
 * the directory are collected and deleted a small number at a time holding
 * the {@link FSNamesystem} lock.
 * <p>
 * For small directory or file the deletion is done in one shot.
 * 
 * @see ClientProtocol#delete(String, boolean) for description of exceptions
 */
private boolean deleteInternal(String src, boolean recursive,
    boolean enforcePermission, boolean logRetryCache)
    throws AccessControlException, SafeModeException, UnresolvedLinkException,
           IOException {
  BlocksMapUpdateInfo collectedBlocks = new BlocksMapUpdateInfo();
  List<INode> removedINodes = new ChunkedArrayList<INode>();
  FSPermissionChecker pc = getPermissionChecker();
  checkOperation(OperationCategory.WRITE);
  byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
  boolean ret = false;

  waitForLoadingFSImage();
  writeLock();
  try {
    checkOperation(OperationCategory.WRITE);
    checkNameNodeSafeMode("Cannot delete " + src);
    src = resolvePath(src, pathComponents);
    if (!recursive && dir.isNonEmptyDirectory(src)) {
      throw new PathIsNotEmptyDirectoryException(src + " is non empty");
    }
    if (enforcePermission && isPermissionEnabled) {
      checkPermission(pc, src, false, null, FsAction.WRITE, null,
          FsAction.ALL, true, false);
    }

    long mtime = now();
    // Unlink the target directory from directory tree
    long filesRemoved = dir.delete(src, collectedBlocks, removedINodes,
            mtime);
    if (filesRemoved < 0) {
      return false;
    }
    getEditLog().logDelete(src, mtime, logRetryCache);
    incrDeletedFileCount(filesRemoved);
    // Blocks/INodes will be handled later
    removePathAndBlocks(src, null, removedINodes, true);
    ret = true;
  } finally {
    writeUnlock();
  }
  getEditLog().logSync(); 
  removeBlocks(collectedBlocks); // Incremental deletion of blocks
  collectedBlocks.clear();

  if (NameNode.stateChangeLog.isDebugEnabled()) {
    NameNode.stateChangeLog.debug("DIR* Namesystem.delete: "
      + src +" is removed");
  }
  return ret;
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:62,代码来源:FSNamesystem.java

示例10: delete

import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; //导入依赖的package包/类
/**
 * Delete a path.
 *
 * If the operation returns without an error then the entry has been
 * deleted.
 * @param path path delete recursively
 * @param recursive recursive flag
 * @throws PathNotFoundException path is not in the registry.
 * @throws InvalidPathnameException the path is invalid.
 * @throws PathIsNotEmptyDirectoryException path has child entries, but
 * recursive is false.
 * @throws IOException Any other IO Exception
 *
 */
void delete(String path, boolean recursive)
    throws PathNotFoundException,
    PathIsNotEmptyDirectoryException,
    InvalidPathnameException,
    IOException;
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:RegistryOperations.java


注:本文中的org.apache.hadoop.fs.PathIsNotEmptyDirectoryException类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。