当前位置: 首页>>代码示例>>Java>>正文


Java ParentNotDirectoryException类代码示例

本文整理汇总了Java中org.apache.hadoop.fs.ParentNotDirectoryException的典型用法代码示例。如果您正苦于以下问题:Java ParentNotDirectoryException类的具体用法?Java ParentNotDirectoryException怎么用?Java ParentNotDirectoryException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ParentNotDirectoryException类属于org.apache.hadoop.fs包,在下文中一共展示了ParentNotDirectoryException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: mkdirs

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Convenience method, so that we don't open a new connection when using this
 * method from within another method. Otherwise every API invocation incurs
 * the overhead of opening/closing a TCP connection.
 */
private boolean mkdirs(FTPClient client, Path file, FsPermission permission)
    throws IOException {
  boolean created = true;
  Path workDir = new Path(client.printWorkingDirectory());
  Path absolute = makeAbsolute(workDir, file);
  String pathName = absolute.getName();
  if (!exists(client, absolute)) {
    Path parent = absolute.getParent();
    created = (parent == null || mkdirs(client, parent, FsPermission
        .getDirDefault()));
    if (created) {
      String parentDir = parent.toUri().getPath();
      client.changeWorkingDirectory(parentDir);
      created = created && client.makeDirectory(pathName);
    }
  } else if (isFile(client, absolute)) {
    throw new ParentNotDirectoryException(String.format(
        "Can't make directory for path %s since it is a file.", absolute));
  }
  return created;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:FTPFileSystem.java

示例2: createInternal

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
@Override
public FSDataOutputStream createInternal(final Path f,
    final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
    final int bufferSize, final short replication, final long blockSize,
    final Progressable progress, final ChecksumOpt checksumOpt,
    final boolean createParent) throws AccessControlException,
    FileAlreadyExistsException, FileNotFoundException,
    ParentNotDirectoryException, UnsupportedFileSystemException,
    UnresolvedLinkException, IOException {
  InodeTree.ResolveResult<AbstractFileSystem> res;
  try {
    res = fsState.resolve(getUriPath(f), false);
  } catch (FileNotFoundException e) {
    if (createParent) {
      throw readOnlyMountTable("create", f);
    } else {
      throw e;
    }
  }
  assert(res.remainingPath != null);
  return res.targetFileSystem.createInternal(res.remainingPath, flag,
      absolutePermission, bufferSize, replication,
      blockSize, progress, checksumOpt,
      createParent);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:26,代码来源:ViewFs.java

示例3: createSymlink

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Creates a symbolic link.
 * 
 * @see ClientProtocol#createSymlink(String, String,FsPermission, boolean) 
 */
public void createSymlink(String target, String link, boolean createParent)
    throws IOException {
  TraceScope scope = getPathTraceScope("createSymlink", target);
  try {
    FsPermission dirPerm = 
        FsPermission.getDefault().applyUMask(dfsClientConf.uMask); 
    namenode.createSymlink(target, link, dirPerm, createParent);
  } catch (RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
                                   FileAlreadyExistsException.class, 
                                   FileNotFoundException.class,
                                   ParentNotDirectoryException.class,
                                   NSQuotaExceededException.class, 
                                   DSQuotaExceededException.class,
                                   UnresolvedPathException.class,
                                   SnapshotAccessControlException.class);
  } finally {
    scope.close();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:26,代码来源:DFSClient.java

示例4: rename

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Rename file or directory.
 * @see ClientProtocol#rename2(String, String, Options.Rename...)
 */
public void rename(String src, String dst, Options.Rename... options)
    throws IOException {
  checkOpen();
  TraceScope scope = getSrcDstTraceScope("rename2", src, dst);
  try {
    namenode.rename2(src, dst, options);
  } catch(RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
                                   DSQuotaExceededException.class,
                                   FileAlreadyExistsException.class,
                                   FileNotFoundException.class,
                                   ParentNotDirectoryException.class,
                                   SafeModeException.class,
                                   NSQuotaExceededException.class,
                                   UnresolvedPathException.class,
                                   SnapshotAccessControlException.class);
  } finally {
    scope.close();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:DFSClient.java

示例5: startFile

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Create a new file entry in the namespace.
 * 
 * For description of parameters and exceptions thrown see
 * {@link ClientProtocol#create}, except it returns valid file status upon
 * success
 */
HdfsFileStatus startFile(String src, PermissionStatus permissions,
    String holder, String clientMachine, EnumSet<CreateFlag> flag,
    boolean createParent, short replication, long blockSize, 
    CryptoProtocolVersion[] supportedVersions, boolean logRetryCache)
    throws AccessControlException, SafeModeException,
    FileAlreadyExistsException, UnresolvedLinkException,
    FileNotFoundException, ParentNotDirectoryException, IOException {

  HdfsFileStatus status = null;
  try {
    status = startFileInt(src, permissions, holder, clientMachine, flag,
        createParent, replication, blockSize, supportedVersions,
        logRetryCache);
  } catch (AccessControlException e) {
    logAuditEvent(false, "create", src);
    throw e;
  }
  return status;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:FSNamesystem.java

示例6: rename2

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
@Override
public void rename2(String src, String dst, Rename... options)
    throws AccessControlException, DSQuotaExceededException,
    FileAlreadyExistsException, FileNotFoundException,
    NSQuotaExceededException, ParentNotDirectoryException, SafeModeException,
    UnresolvedLinkException, IOException {
  boolean overwrite = false;
  if (options != null) {
    for (Rename option : options) {
      if (option == Rename.OVERWRITE) {
        overwrite = true;
      }
    }
  }
  Rename2RequestProto req = Rename2RequestProto.newBuilder().
      setSrc(src).
      setDst(dst).setOverwriteDest(overwrite).
      build();
  try {
    rpcProxy.rename2(null, req);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:26,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例7: mkdirs

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
@Override
public boolean mkdirs(String src, FsPermission masked, boolean createParent)
    throws AccessControlException, FileAlreadyExistsException,
    FileNotFoundException, NSQuotaExceededException,
    ParentNotDirectoryException, SafeModeException, UnresolvedLinkException,
    IOException {
  MkdirsRequestProto req = MkdirsRequestProto.newBuilder()
      .setSrc(src)
      .setMasked(PBHelper.convert(masked))
      .setCreateParent(createParent).build();

  try {
    return rpcProxy.mkdirs(null, req).getResult();
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例8: createSymlink

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
@Override
public void createSymlink(String target, String link, FsPermission dirPerm,
    boolean createParent) throws AccessControlException,
    FileAlreadyExistsException, FileNotFoundException,
    ParentNotDirectoryException, SafeModeException, UnresolvedLinkException,
    IOException {
  CreateSymlinkRequestProto req = CreateSymlinkRequestProto.newBuilder()
      .setTarget(target)
      .setLink(link)
      .setDirPerm(PBHelper.convert(dirPerm))
      .setCreateParent(createParent)
      .build();
  try {
    rpcProxy.createSymlink(null, req);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例9: testCreateDirWithFileParent

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
@Test(timeout = SWIFT_TEST_TIMEOUT)
public void testCreateDirWithFileParent() throws Throwable {
  Path path = new Path("/test/CreateDirWithFileParent");
  Path child = new Path(path, "subdir/child");
  fs.mkdirs(path.getParent());
  try {
    //create the child dir
    writeTextFile(fs, path, "parent", true);
    try {
      fs.mkdirs(child);
    } catch (ParentNotDirectoryException expected) {
      LOG.debug("Expected Exception", expected);
    }
  } finally {
    fs.delete(path, true);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestSwiftFileSystemBasicOps.java

示例10: createSymlink

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Creates a symbolic link.
 *
 * @see ClientProtocol#createSymlink(String, String,FsPermission, boolean)
 */
public void createSymlink(String target, String link, boolean createParent)
    throws IOException {
  checkOpen();
  try (TraceScope ignored = newPathTraceScope("createSymlink", target)) {
    final FsPermission dirPerm = applyUMask(null);
    namenode.createSymlink(target, link, dirPerm, createParent);
  } catch (RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
        FileAlreadyExistsException.class,
        FileNotFoundException.class,
        ParentNotDirectoryException.class,
        NSQuotaExceededException.class,
        DSQuotaExceededException.class,
        QuotaByStorageTypeExceededException.class,
        UnresolvedPathException.class,
        SnapshotAccessControlException.class);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:24,代码来源:DFSClient.java

示例11: rename

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Rename file or directory.
 * @see ClientProtocol#rename2(String, String, Options.Rename...)
 */
public void rename(String src, String dst, Options.Rename... options)
    throws IOException {
  checkOpen();
  try (TraceScope ignored = newSrcDstTraceScope("rename2", src, dst)) {
    namenode.rename2(src, dst, options);
  } catch (RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
        DSQuotaExceededException.class,
        QuotaByStorageTypeExceededException.class,
        FileAlreadyExistsException.class,
        FileNotFoundException.class,
        ParentNotDirectoryException.class,
        SafeModeException.class,
        NSQuotaExceededException.class,
        UnresolvedPathException.class,
        SnapshotAccessControlException.class);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:23,代码来源:DFSClient.java

示例12: primitiveMkdir

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Same {{@link #mkdirs(String, FsPermission, boolean)} except
 * that the permissions has already been masked against umask.
 */
public boolean primitiveMkdir(String src, FsPermission absPermission,
    boolean createParent) throws IOException {
  checkOpen();
  if (absPermission == null) {
    absPermission = applyUMask(null);
  }

  LOG.debug("{}: masked={}", src, absPermission);
  try (TraceScope ignored = tracer.newScope("mkdir")) {
    return namenode.mkdirs(src, absPermission, createParent);
  } catch (RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
        InvalidPathException.class,
        FileAlreadyExistsException.class,
        FileNotFoundException.class,
        ParentNotDirectoryException.class,
        SafeModeException.class,
        NSQuotaExceededException.class,
        DSQuotaExceededException.class,
        QuotaByStorageTypeExceededException.class,
        UnresolvedPathException.class,
        SnapshotAccessControlException.class);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:29,代码来源:DFSClient.java

示例13: createSymlink

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Creates a symbolic link.
 * 
 * @see ClientProtocol#createSymlink(String, String,FsPermission, boolean) 
 */
public void createSymlink(String target, String link, boolean createParent)
    throws IOException {
  try {
    FsPermission dirPerm = 
        FsPermission.getDefault().applyUMask(dfsClientConf.uMask); 
    namenode.createSymlink(target, link, dirPerm, createParent);
  } catch (RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
                                   FileAlreadyExistsException.class, 
                                   FileNotFoundException.class,
                                   ParentNotDirectoryException.class,
                                   NSQuotaExceededException.class, 
                                   DSQuotaExceededException.class,
                                   UnresolvedPathException.class,
                                   SnapshotAccessControlException.class);
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:23,代码来源:DFSClient.java

示例14: rename

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * Rename file or directory.
 * @see ClientProtocol#rename2(String, String, Options.Rename...)
 */
public void rename(String src, String dst, Options.Rename... options)
    throws IOException {
  checkOpen();
  try {
    namenode.rename2(src, dst, options);
  } catch(RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
                                   DSQuotaExceededException.class,
                                   FileAlreadyExistsException.class,
                                   FileNotFoundException.class,
                                   ParentNotDirectoryException.class,
                                   SafeModeException.class,
                                   NSQuotaExceededException.class,
                                   UnresolvedPathException.class,
                                   SnapshotAccessControlException.class);
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:22,代码来源:DFSClient.java

示例15: renameTo

import org.apache.hadoop.fs.ParentNotDirectoryException; //导入依赖的package包/类
/**
 * @see #unprotectedRenameTo(String, String, long, Options.Rename...)
 */
void renameTo(String src, String dst, long mtime,
    BlocksMapUpdateInfo collectedBlocks, Options.Rename... options)
    throws FileAlreadyExistsException, FileNotFoundException,
    ParentNotDirectoryException, QuotaExceededException,
    UnresolvedLinkException, IOException {
  if (NameNode.stateChangeLog.isDebugEnabled()) {
    NameNode.stateChangeLog.debug("DIR* FSDirectory.renameTo: " + src
        + " to " + dst);
  }
  writeLock();
  try {
    if (unprotectedRenameTo(src, dst, mtime, collectedBlocks, options)) {
      namesystem.incrDeletedFileCount(1);
    }
  } finally {
    writeUnlock();
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:22,代码来源:FSDirectory.java


注:本文中的org.apache.hadoop.fs.ParentNotDirectoryException类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。