当前位置: 首页>>代码示例>>Java>>正文


Java Path.toUri方法代码示例

本文整理汇总了Java中org.apache.hadoop.fs.Path.toUri方法的典型用法代码示例。如果您正苦于以下问题:Java Path.toUri方法的具体用法?Java Path.toUri怎么用?Java Path.toUri使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.fs.Path的用法示例。


在下文中一共展示了Path.toUri方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testFactory

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Test
public void testFactory() throws Exception {
  Configuration conf = new Configuration();
  final String userUri = UserProvider.SCHEME_NAME + ":///";
  final Path jksPath = new Path(tmpDir.toString(), "test.jks");
  final String jksUri = JavaKeyStoreProvider.SCHEME_NAME +
      "://file" + jksPath.toUri();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
      userUri + "," + jksUri);
  List<CredentialProvider> providers = 
      CredentialProviderFactory.getProviders(conf);
  assertEquals(2, providers.size());
  assertEquals(UserProvider.class, providers.get(0).getClass());
  assertEquals(JavaKeyStoreProvider.class, providers.get(1).getClass());
  assertEquals(userUri, providers.get(0).toString());
  assertEquals(jksUri, providers.get(1).toString());
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:18,代码来源:TestCredentialProviderFactory.java

示例2: testJksProvider

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Test
public void testJksProvider() throws Exception {
  Configuration conf = new Configuration();
  final Path jksPath = new Path(tmpDir.toString(), "test.jks");
  final String ourUrl =
      JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(tmpDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
  checkSpecificProvider(conf, ourUrl);
  Path path = ProviderUtils.unnestUri(new URI(ourUrl));
  FileSystem fs = path.getFileSystem(conf);
  FileStatus s = fs.getFileStatus(path);
  assertTrue(s.getPermission().toString().equals("rwx------"));
  assertTrue(file + " should exist", file.isFile());

  // check permission retention after explicit change
  fs.setPermission(path, new FsPermission("777"));
  checkPermissionRetention(conf, ourUrl, path);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:22,代码来源:TestCredentialProviderFactory.java

示例3: testLocalJksProvider

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Test
public void testLocalJksProvider() throws Exception {
  Configuration conf = new Configuration();
  final Path jksPath = new Path(tmpDir.toString(), "test.jks");
  final String ourUrl =
      LocalJavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(tmpDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
  checkSpecificProvider(conf, ourUrl);
  Path path = ProviderUtils.unnestUri(new URI(ourUrl));
  FileSystem fs = path.getFileSystem(conf);
  FileStatus s = fs.getFileStatus(path);
  assertTrue("Unexpected permissions: " + s.getPermission().toString(), s.getPermission().toString().equals("rwx------"));
  assertTrue(file + " should exist", file.isFile());

  // check permission retention after explicit change
  fs.setPermission(path, new FsPermission("777"));
  checkPermissionRetention(conf, ourUrl, path);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:22,代码来源:TestCredentialProviderFactory.java

示例4: setup

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Before
public void setup() throws Exception {
  outContent.reset();
  errContent.reset();
  final File tmpDir = new File(System.getProperty("test.build.data", "target"),
      UUID.randomUUID().toString());
  if (!tmpDir.mkdirs()) {
    throw new IOException("Unable to create " + tmpDir);
  }
  final Path jksPath = new Path(tmpDir.toString(), "keystore.jceks");
  jceksProvider = "jceks://file" + jksPath.toUri();
  initialStdOut = System.out;
  initialStdErr = System.err;
  System.setOut(new PrintStream(outContent));
  System.setErr(new PrintStream(errContent));
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:17,代码来源:TestKeyShell.java

示例5: getFinalPath

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
/**
 * Find the final name of a given output file, given the job output directory
 * and the work directory.
 * @param srcFile the specific task output file
 * @return the final path for the specific output file
 * @throws IOException failure
 */
private Path getFinalPath(Path srcFile) throws IOException {
  URI taskOutputUri = srcFile.toUri();
  URI relativePath = sourcePath.toUri().relativize(taskOutputUri);
  if (taskOutputUri == relativePath) {
    throw new IOException("Can not get the relative path:"
        + " base = " + sourcePath + " child = " + srcFile);
  }
  if (!relativePath.getPath().isEmpty()) {
    return new Path(destPath, relativePath.getPath());
  } else {
    // relative path is none.
    if (destPathStatus != null && destPathStatus.isFile()) {
      return destPath;
    } else {
      // source is a file, dest is a dir
      return new Path(destPath, srcFile.getName());
    }
  }
}
 
开发者ID:steveloughran,项目名称:cloudup,代码行数:27,代码来源:Cloudup.java

示例6: setup

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Before
public void setup() throws Exception {
  System.setOut(new PrintStream(outContent));
  System.setErr(new PrintStream(errContent));
  final Path jksPath = new Path(tmpDir.toString(), "keystore.jceks");
  new File(jksPath.toString()).delete();
  jceksProvider = "jceks://file" + jksPath.toUri();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:9,代码来源:TestCredShell.java

示例7: provisionPasswordsToCredentialProvider

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
public static void provisionPasswordsToCredentialProvider() throws Exception {
  File testDir = new File(System.getProperty("test.build.data",
      "target/test-dir"));

  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
  JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};

  // create new aliases
  try {
    provider.createCredentialEntry(
        FileBasedKeyStoresFactory.resolvePropertyName(SSLFactory.Mode.SERVER,
            FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY),
            storepass);

    provider.createCredentialEntry(
        FileBasedKeyStoresFactory.resolvePropertyName(SSLFactory.Mode.SERVER,
            FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
            keypass);

    // write out so that it can be found in checks
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:38,代码来源:KeyStoreTestUtil.java

示例8: changePermissions

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
private void changePermissions(FileSystem fs, final Path path)
    throws IOException, InterruptedException {
  File f = new File(path.toUri());
  if (FileUtils.isSymlink(f)) {
    // avoid following symlinks when changing permissions
    return;
  }
  boolean isDir = f.isDirectory();
  FsPermission perm = cachePerms;
  // set public perms as 755 or 555 based on dir or file
  if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
    perm = isDir ? PUBLIC_DIR_PERMS : PUBLIC_FILE_PERMS;
  }
  // set private perms as 700 or 500
  else {
    // PRIVATE:
    // APPLICATION:
    perm = isDir ? PRIVATE_DIR_PERMS : PRIVATE_FILE_PERMS;
  }
  LOG.debug("Changing permissions for path " + path + " to perm " + perm);
  final FsPermission fPerm = perm;
  if (null == userUgi) {
    files.setPermission(path, perm);
  } else {
    userUgi.doAs(new PrivilegedExceptionAction<Void>() {
      public Void run() throws Exception {
        files.setPermission(path, fPerm);
        return null;
      }
    });
  }
  if (isDir) {
    FileStatus[] statuses = fs.listStatus(path);
    for (FileStatus status : statuses) {
      changePermissions(fs, status.getPath());
    }
  }
}
 
开发者ID:intel-hpdd,项目名称:scheduling-connector-for-hadoop,代码行数:39,代码来源:FSDownload.java

示例9: isParentOf

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
/**
 * Probe for a path being a parent of another
 * @param parent parent path
 * @param child possible child path
 * @return true if the parent's path matches the start of the child's
 */
private boolean isParentOf(Path parent, Path child) {
  URI parentURI = parent.toUri();
  String parentPath = parentURI.getPath();
  if (!parentPath.endsWith("/")) {
    parentPath += "/";
  }
  URI childURI = child.toUri();
  String childPath = childURI.getPath();
  return childPath.startsWith(parentPath);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:FTPFileSystem.java

示例10: testConfGetPassword

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Test
public void testConfGetPassword() throws Exception {
  File testDir = new File(System.getProperty("test.build.data",
                                             "target/test-dir"));
  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
      JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] bindpass = {'b', 'i', 'n', 'd', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};

  // ensure that we get nulls when the key isn't there
  assertEquals(null, provider.getCredentialEntry(
      LdapGroupsMapping.BIND_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry
      (LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY));

  // create new aliases
  try {
    provider.createCredentialEntry(
        LdapGroupsMapping.BIND_PASSWORD_KEY, bindpass);

    provider.createCredentialEntry(
        LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY, storepass);
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
  // make sure we get back the right key
  assertArrayEquals(bindpass, provider.getCredentialEntry(
      LdapGroupsMapping.BIND_PASSWORD_KEY).getCredential());
  assertArrayEquals(storepass, provider.getCredentialEntry(
      LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY).getCredential());

  LdapGroupsMapping mapping = new LdapGroupsMapping();
  Assert.assertEquals("bindpass",
      mapping.getPassword(conf, LdapGroupsMapping.BIND_PASSWORD_KEY, ""));
  Assert.assertEquals("storepass",
      mapping.getPassword(conf, LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY,
         ""));
  // let's make sure that a password that doesn't exist returns an
  // empty string as currently expected and used to trigger a call to
  // extract password
  Assert.assertEquals("", mapping.getPassword(conf,"invalid-alias", ""));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:54,代码来源:TestLdapGroupsMapping.java

示例11: expandAsGlob

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
/**
 * Expand the given path as a glob pattern.  Non-existent paths do not
 * throw an exception because creation commands like touch and mkdir need
 * to create them.  The "stat" field will be null if the path does not
 * exist.
 * @param pattern the pattern to expand as a glob
 * @param conf the hadoop configuration
 * @return list of {@link PathData} objects.  if the pattern is not a glob,
 * and does not exist, the list will contain a single PathData with a null
 * stat 
 * @throws IOException anything else goes wrong...
 */
public static PathData[] expandAsGlob(String pattern, Configuration conf)
throws IOException {
  Path globPath = new Path(pattern);
  FileSystem fs = globPath.getFileSystem(conf);    
  FileStatus[] stats = fs.globStatus(globPath);
  PathData[] items = null;
  
  if (stats == null) {
    // remove any quoting in the glob pattern
    pattern = pattern.replaceAll("\\\\(.)", "$1");
    // not a glob & file not found, so add the path with a null stat
    items = new PathData[]{ new PathData(fs, pattern, null) };
  } else {
    // figure out what type of glob path was given, will convert globbed
    // paths to match the type to preserve relativity
    PathType globType;
    URI globUri = globPath.toUri();
    if (globUri.getScheme() != null) {
      globType = PathType.HAS_SCHEME;
    } else if (!globUri.getPath().isEmpty() &&
               new Path(globUri.getPath()).isAbsolute()) {
      globType = PathType.SCHEMELESS_ABSOLUTE;
    } else {
      globType = PathType.RELATIVE;
    }

    // convert stats to PathData
    items = new PathData[stats.length];
    int i=0;
    for (FileStatus stat : stats) {
      URI matchUri = stat.getPath().toUri();
      String globMatch = null;
      switch (globType) {
        case HAS_SCHEME: // use as-is, but remove authority if necessary
          if (globUri.getAuthority() == null) {
            matchUri = removeAuthority(matchUri);
          }
          globMatch = uriToString(matchUri, false);
          break;
        case SCHEMELESS_ABSOLUTE: // take just the uri's path
          globMatch = matchUri.getPath();
          break;
        case RELATIVE: // make it relative to the current working dir
          URI cwdUri = fs.getWorkingDirectory().toUri();
          globMatch = relativize(cwdUri, matchUri, stat.isDirectory());
          break;
      }
      items[i++] = new PathData(fs, globMatch, stat);
    }
  }
  Arrays.sort(items);
  return items;
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:66,代码来源:PathData.java

示例12: changePermissions

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
/**
 * Recursively change permissions of all files/dirs on path based 
 * on resource visibility.
 * Change to 755 or 700 for dirs, 555 or 500 for files.
 * @param fs FileSystem
 * @param path Path to modify perms for
 * @throws IOException
 * @throws InterruptedException 
 */
private void changePermissions(FileSystem fs, final Path path)
    throws IOException, InterruptedException {
  File f = new File(path.toUri());
  if (FileUtils.isSymlink(f)) {
    // avoid following symlinks when changing permissions
    return;
  }
  boolean isDir = f.isDirectory();
  FsPermission perm = cachePerms;
  // set public perms as 755 or 555 based on dir or file
  if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
    perm = isDir ? PUBLIC_DIR_PERMS : PUBLIC_FILE_PERMS;
  }
  // set private perms as 700 or 500
  else {
    // PRIVATE:
    // APPLICATION:
    perm = isDir ? PRIVATE_DIR_PERMS : PRIVATE_FILE_PERMS;
  }
  LOG.debug("Changing permissions for path " + path
      + " to perm " + perm);
  final FsPermission fPerm = perm;
  if (null == userUgi) {
    files.setPermission(path, perm);
  }
  else {
    userUgi.doAs(new PrivilegedExceptionAction<Void>() {
      public Void run() throws Exception {
        files.setPermission(path, fPerm);
        return null;
      }
    });
  }
  if (isDir) {
    FileStatus[] statuses = fs.listStatus(path);
    for (FileStatus status : statuses) {
      changePermissions(fs, status.getPath());
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:50,代码来源:FSDownload.java

示例13: provisionCredentialsForSSL

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
protected Configuration provisionCredentialsForSSL() throws IOException,
    Exception {
  File testDir = new File(System.getProperty("test.build.data",
      "target/test-dir"));

  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
  JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
  char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};

  // ensure that we get nulls when the key isn't there
  assertEquals(null, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEY_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY));

  // create new aliases
  try {
    provider.createCredentialEntry(
        WebAppUtils.WEB_APP_KEY_PASSWORD_KEY, keypass);

    provider.createCredentialEntry(
        WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY, storepass);

    provider.createCredentialEntry(
        WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY, trustpass);

    // write out so that it can be found in checks
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
  // make sure we get back the right key directly from api
  assertArrayEquals(keypass, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEY_PASSWORD_KEY).getCredential());
  assertArrayEquals(storepass, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY).getCredential());
  assertArrayEquals(trustpass, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY).getCredential());
  return conf;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:55,代码来源:TestWebAppUtils.java

示例14: CompressAwarePath

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
public CompressAwarePath(Path path, long rawDataLength, long compressSize) {
  super(path.toUri());
  this.rawDataLength = rawDataLength;
  this.compressedSize = compressSize;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:6,代码来源:MergeManagerImpl.java

示例15: testGetPassword

import org.apache.hadoop.fs.Path; //导入方法依赖的package包/类
@Test
public void testGetPassword() throws Exception {
  File testDir = new File(System.getProperty("test.build.data",
      "target/test-dir"));

  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
  JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
  char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};

  // ensure that we get nulls when the key isn't there
  assertEquals(null, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));

  // create new aliases
  try {
    provider.createCredentialEntry(
        DFS_SERVER_HTTPS_KEYPASSWORD_KEY, keypass);

    provider.createCredentialEntry(
        DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY, storepass);

    provider.createCredentialEntry(
        DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY, trustpass);

    // write out so that it can be found in checks
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
  // make sure we get back the right key directly from api
  assertArrayEquals(keypass, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYPASSWORD_KEY).getCredential());
  assertArrayEquals(storepass, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY).getCredential());
  assertArrayEquals(trustpass, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY).getCredential());

  // use WebAppUtils as would be used by loadSslConfiguration
  Assert.assertEquals("keypass",
      DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
  Assert.assertEquals("storepass",
      DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
  Assert.assertEquals("trustpass",
      DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));

  // let's make sure that a password that doesn't exist returns null
  Assert.assertEquals(null, DFSUtil.getPassword(conf,"invalid-alias"));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:65,代码来源:TestDFSUtil.java


注:本文中的org.apache.hadoop.fs.Path.toUri方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。