當前位置: 首頁>>代碼示例>>Java>>正文


Java Path.toUri方法代碼示例

本文整理匯總了Java中org.apache.hadoop.fs.Path.toUri方法的典型用法代碼示例。如果您正苦於以下問題:Java Path.toUri方法的具體用法?Java Path.toUri怎麽用?Java Path.toUri使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.fs.Path的用法示例。


在下文中一共展示了Path.toUri方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testFactory

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Test
public void testFactory() throws Exception {
  Configuration conf = new Configuration();
  final String userUri = UserProvider.SCHEME_NAME + ":///";
  final Path jksPath = new Path(tmpDir.toString(), "test.jks");
  final String jksUri = JavaKeyStoreProvider.SCHEME_NAME +
      "://file" + jksPath.toUri();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
      userUri + "," + jksUri);
  List<CredentialProvider> providers = 
      CredentialProviderFactory.getProviders(conf);
  assertEquals(2, providers.size());
  assertEquals(UserProvider.class, providers.get(0).getClass());
  assertEquals(JavaKeyStoreProvider.class, providers.get(1).getClass());
  assertEquals(userUri, providers.get(0).toString());
  assertEquals(jksUri, providers.get(1).toString());
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:18,代碼來源:TestCredentialProviderFactory.java

示例2: testJksProvider

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Test
public void testJksProvider() throws Exception {
  Configuration conf = new Configuration();
  final Path jksPath = new Path(tmpDir.toString(), "test.jks");
  final String ourUrl =
      JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(tmpDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
  checkSpecificProvider(conf, ourUrl);
  Path path = ProviderUtils.unnestUri(new URI(ourUrl));
  FileSystem fs = path.getFileSystem(conf);
  FileStatus s = fs.getFileStatus(path);
  assertTrue(s.getPermission().toString().equals("rwx------"));
  assertTrue(file + " should exist", file.isFile());

  // check permission retention after explicit change
  fs.setPermission(path, new FsPermission("777"));
  checkPermissionRetention(conf, ourUrl, path);
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:22,代碼來源:TestCredentialProviderFactory.java

示例3: testLocalJksProvider

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Test
public void testLocalJksProvider() throws Exception {
  Configuration conf = new Configuration();
  final Path jksPath = new Path(tmpDir.toString(), "test.jks");
  final String ourUrl =
      LocalJavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(tmpDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
  checkSpecificProvider(conf, ourUrl);
  Path path = ProviderUtils.unnestUri(new URI(ourUrl));
  FileSystem fs = path.getFileSystem(conf);
  FileStatus s = fs.getFileStatus(path);
  assertTrue("Unexpected permissions: " + s.getPermission().toString(), s.getPermission().toString().equals("rwx------"));
  assertTrue(file + " should exist", file.isFile());

  // check permission retention after explicit change
  fs.setPermission(path, new FsPermission("777"));
  checkPermissionRetention(conf, ourUrl, path);
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:22,代碼來源:TestCredentialProviderFactory.java

示例4: setup

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Before
public void setup() throws Exception {
  outContent.reset();
  errContent.reset();
  final File tmpDir = new File(System.getProperty("test.build.data", "target"),
      UUID.randomUUID().toString());
  if (!tmpDir.mkdirs()) {
    throw new IOException("Unable to create " + tmpDir);
  }
  final Path jksPath = new Path(tmpDir.toString(), "keystore.jceks");
  jceksProvider = "jceks://file" + jksPath.toUri();
  initialStdOut = System.out;
  initialStdErr = System.err;
  System.setOut(new PrintStream(outContent));
  System.setErr(new PrintStream(errContent));
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:17,代碼來源:TestKeyShell.java

示例5: getFinalPath

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
/**
 * Find the final name of a given output file, given the job output directory
 * and the work directory.
 * @param srcFile the specific task output file
 * @return the final path for the specific output file
 * @throws IOException failure
 */
private Path getFinalPath(Path srcFile) throws IOException {
  URI taskOutputUri = srcFile.toUri();
  URI relativePath = sourcePath.toUri().relativize(taskOutputUri);
  if (taskOutputUri == relativePath) {
    throw new IOException("Can not get the relative path:"
        + " base = " + sourcePath + " child = " + srcFile);
  }
  if (!relativePath.getPath().isEmpty()) {
    return new Path(destPath, relativePath.getPath());
  } else {
    // relative path is none.
    if (destPathStatus != null && destPathStatus.isFile()) {
      return destPath;
    } else {
      // source is a file, dest is a dir
      return new Path(destPath, srcFile.getName());
    }
  }
}
 
開發者ID:steveloughran,項目名稱:cloudup,代碼行數:27,代碼來源:Cloudup.java

示例6: setup

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Before
public void setup() throws Exception {
  System.setOut(new PrintStream(outContent));
  System.setErr(new PrintStream(errContent));
  final Path jksPath = new Path(tmpDir.toString(), "keystore.jceks");
  new File(jksPath.toString()).delete();
  jceksProvider = "jceks://file" + jksPath.toUri();
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:9,代碼來源:TestCredShell.java

示例7: provisionPasswordsToCredentialProvider

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
public static void provisionPasswordsToCredentialProvider() throws Exception {
  File testDir = new File(System.getProperty("test.build.data",
      "target/test-dir"));

  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
  JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};

  // create new aliases
  try {
    provider.createCredentialEntry(
        FileBasedKeyStoresFactory.resolvePropertyName(SSLFactory.Mode.SERVER,
            FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY),
            storepass);

    provider.createCredentialEntry(
        FileBasedKeyStoresFactory.resolvePropertyName(SSLFactory.Mode.SERVER,
            FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
            keypass);

    // write out so that it can be found in checks
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:38,代碼來源:KeyStoreTestUtil.java

示例8: changePermissions

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
private void changePermissions(FileSystem fs, final Path path)
    throws IOException, InterruptedException {
  File f = new File(path.toUri());
  if (FileUtils.isSymlink(f)) {
    // avoid following symlinks when changing permissions
    return;
  }
  boolean isDir = f.isDirectory();
  FsPermission perm = cachePerms;
  // set public perms as 755 or 555 based on dir or file
  if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
    perm = isDir ? PUBLIC_DIR_PERMS : PUBLIC_FILE_PERMS;
  }
  // set private perms as 700 or 500
  else {
    // PRIVATE:
    // APPLICATION:
    perm = isDir ? PRIVATE_DIR_PERMS : PRIVATE_FILE_PERMS;
  }
  LOG.debug("Changing permissions for path " + path + " to perm " + perm);
  final FsPermission fPerm = perm;
  if (null == userUgi) {
    files.setPermission(path, perm);
  } else {
    userUgi.doAs(new PrivilegedExceptionAction<Void>() {
      public Void run() throws Exception {
        files.setPermission(path, fPerm);
        return null;
      }
    });
  }
  if (isDir) {
    FileStatus[] statuses = fs.listStatus(path);
    for (FileStatus status : statuses) {
      changePermissions(fs, status.getPath());
    }
  }
}
 
開發者ID:intel-hpdd,項目名稱:scheduling-connector-for-hadoop,代碼行數:39,代碼來源:FSDownload.java

示例9: isParentOf

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
/**
 * Probe for a path being a parent of another
 * @param parent parent path
 * @param child possible child path
 * @return true if the parent's path matches the start of the child's
 */
private boolean isParentOf(Path parent, Path child) {
  URI parentURI = parent.toUri();
  String parentPath = parentURI.getPath();
  if (!parentPath.endsWith("/")) {
    parentPath += "/";
  }
  URI childURI = child.toUri();
  String childPath = childURI.getPath();
  return childPath.startsWith(parentPath);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:17,代碼來源:FTPFileSystem.java

示例10: testConfGetPassword

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Test
public void testConfGetPassword() throws Exception {
  File testDir = new File(System.getProperty("test.build.data",
                                             "target/test-dir"));
  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
      JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] bindpass = {'b', 'i', 'n', 'd', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};

  // ensure that we get nulls when the key isn't there
  assertEquals(null, provider.getCredentialEntry(
      LdapGroupsMapping.BIND_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry
      (LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY));

  // create new aliases
  try {
    provider.createCredentialEntry(
        LdapGroupsMapping.BIND_PASSWORD_KEY, bindpass);

    provider.createCredentialEntry(
        LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY, storepass);
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
  // make sure we get back the right key
  assertArrayEquals(bindpass, provider.getCredentialEntry(
      LdapGroupsMapping.BIND_PASSWORD_KEY).getCredential());
  assertArrayEquals(storepass, provider.getCredentialEntry(
      LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY).getCredential());

  LdapGroupsMapping mapping = new LdapGroupsMapping();
  Assert.assertEquals("bindpass",
      mapping.getPassword(conf, LdapGroupsMapping.BIND_PASSWORD_KEY, ""));
  Assert.assertEquals("storepass",
      mapping.getPassword(conf, LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY,
         ""));
  // let's make sure that a password that doesn't exist returns an
  // empty string as currently expected and used to trigger a call to
  // extract password
  Assert.assertEquals("", mapping.getPassword(conf,"invalid-alias", ""));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:54,代碼來源:TestLdapGroupsMapping.java

示例11: expandAsGlob

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
/**
 * Expand the given path as a glob pattern.  Non-existent paths do not
 * throw an exception because creation commands like touch and mkdir need
 * to create them.  The "stat" field will be null if the path does not
 * exist.
 * @param pattern the pattern to expand as a glob
 * @param conf the hadoop configuration
 * @return list of {@link PathData} objects.  if the pattern is not a glob,
 * and does not exist, the list will contain a single PathData with a null
 * stat 
 * @throws IOException anything else goes wrong...
 */
public static PathData[] expandAsGlob(String pattern, Configuration conf)
throws IOException {
  Path globPath = new Path(pattern);
  FileSystem fs = globPath.getFileSystem(conf);    
  FileStatus[] stats = fs.globStatus(globPath);
  PathData[] items = null;
  
  if (stats == null) {
    // remove any quoting in the glob pattern
    pattern = pattern.replaceAll("\\\\(.)", "$1");
    // not a glob & file not found, so add the path with a null stat
    items = new PathData[]{ new PathData(fs, pattern, null) };
  } else {
    // figure out what type of glob path was given, will convert globbed
    // paths to match the type to preserve relativity
    PathType globType;
    URI globUri = globPath.toUri();
    if (globUri.getScheme() != null) {
      globType = PathType.HAS_SCHEME;
    } else if (!globUri.getPath().isEmpty() &&
               new Path(globUri.getPath()).isAbsolute()) {
      globType = PathType.SCHEMELESS_ABSOLUTE;
    } else {
      globType = PathType.RELATIVE;
    }

    // convert stats to PathData
    items = new PathData[stats.length];
    int i=0;
    for (FileStatus stat : stats) {
      URI matchUri = stat.getPath().toUri();
      String globMatch = null;
      switch (globType) {
        case HAS_SCHEME: // use as-is, but remove authority if necessary
          if (globUri.getAuthority() == null) {
            matchUri = removeAuthority(matchUri);
          }
          globMatch = uriToString(matchUri, false);
          break;
        case SCHEMELESS_ABSOLUTE: // take just the uri's path
          globMatch = matchUri.getPath();
          break;
        case RELATIVE: // make it relative to the current working dir
          URI cwdUri = fs.getWorkingDirectory().toUri();
          globMatch = relativize(cwdUri, matchUri, stat.isDirectory());
          break;
      }
      items[i++] = new PathData(fs, globMatch, stat);
    }
  }
  Arrays.sort(items);
  return items;
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:66,代碼來源:PathData.java

示例12: changePermissions

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
/**
 * Recursively change permissions of all files/dirs on path based 
 * on resource visibility.
 * Change to 755 or 700 for dirs, 555 or 500 for files.
 * @param fs FileSystem
 * @param path Path to modify perms for
 * @throws IOException
 * @throws InterruptedException 
 */
private void changePermissions(FileSystem fs, final Path path)
    throws IOException, InterruptedException {
  File f = new File(path.toUri());
  if (FileUtils.isSymlink(f)) {
    // avoid following symlinks when changing permissions
    return;
  }
  boolean isDir = f.isDirectory();
  FsPermission perm = cachePerms;
  // set public perms as 755 or 555 based on dir or file
  if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
    perm = isDir ? PUBLIC_DIR_PERMS : PUBLIC_FILE_PERMS;
  }
  // set private perms as 700 or 500
  else {
    // PRIVATE:
    // APPLICATION:
    perm = isDir ? PRIVATE_DIR_PERMS : PRIVATE_FILE_PERMS;
  }
  LOG.debug("Changing permissions for path " + path
      + " to perm " + perm);
  final FsPermission fPerm = perm;
  if (null == userUgi) {
    files.setPermission(path, perm);
  }
  else {
    userUgi.doAs(new PrivilegedExceptionAction<Void>() {
      public Void run() throws Exception {
        files.setPermission(path, fPerm);
        return null;
      }
    });
  }
  if (isDir) {
    FileStatus[] statuses = fs.listStatus(path);
    for (FileStatus status : statuses) {
      changePermissions(fs, status.getPath());
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:50,代碼來源:FSDownload.java

示例13: provisionCredentialsForSSL

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
protected Configuration provisionCredentialsForSSL() throws IOException,
    Exception {
  File testDir = new File(System.getProperty("test.build.data",
      "target/test-dir"));

  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
  JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
  char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};

  // ensure that we get nulls when the key isn't there
  assertEquals(null, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEY_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY));

  // create new aliases
  try {
    provider.createCredentialEntry(
        WebAppUtils.WEB_APP_KEY_PASSWORD_KEY, keypass);

    provider.createCredentialEntry(
        WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY, storepass);

    provider.createCredentialEntry(
        WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY, trustpass);

    // write out so that it can be found in checks
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
  // make sure we get back the right key directly from api
  assertArrayEquals(keypass, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEY_PASSWORD_KEY).getCredential());
  assertArrayEquals(storepass, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY).getCredential());
  assertArrayEquals(trustpass, provider.getCredentialEntry(
      WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY).getCredential());
  return conf;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:55,代碼來源:TestWebAppUtils.java

示例14: CompressAwarePath

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
public CompressAwarePath(Path path, long rawDataLength, long compressSize) {
  super(path.toUri());
  this.rawDataLength = rawDataLength;
  this.compressedSize = compressSize;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:6,代碼來源:MergeManagerImpl.java

示例15: testGetPassword

import org.apache.hadoop.fs.Path; //導入方法依賴的package包/類
@Test
public void testGetPassword() throws Exception {
  File testDir = new File(System.getProperty("test.build.data",
      "target/test-dir"));

  Configuration conf = new Configuration();
  final Path jksPath = new Path(testDir.toString(), "test.jks");
  final String ourUrl =
  JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();

  File file = new File(testDir, "test.jks");
  file.delete();
  conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);

  CredentialProvider provider =
      CredentialProviderFactory.getProviders(conf).get(0);
  char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
  char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
  char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};

  // ensure that we get nulls when the key isn't there
  assertEquals(null, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
  assertEquals(null, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));

  // create new aliases
  try {
    provider.createCredentialEntry(
        DFS_SERVER_HTTPS_KEYPASSWORD_KEY, keypass);

    provider.createCredentialEntry(
        DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY, storepass);

    provider.createCredentialEntry(
        DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY, trustpass);

    // write out so that it can be found in checks
    provider.flush();
  } catch (Exception e) {
    e.printStackTrace();
    throw e;
  }
  // make sure we get back the right key directly from api
  assertArrayEquals(keypass, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYPASSWORD_KEY).getCredential());
  assertArrayEquals(storepass, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY).getCredential());
  assertArrayEquals(trustpass, provider.getCredentialEntry(
      DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY).getCredential());

  // use WebAppUtils as would be used by loadSslConfiguration
  Assert.assertEquals("keypass",
      DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
  Assert.assertEquals("storepass",
      DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
  Assert.assertEquals("trustpass",
      DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));

  // let's make sure that a password that doesn't exist returns null
  Assert.assertEquals(null, DFSUtil.getPassword(conf,"invalid-alias"));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:65,代碼來源:TestDFSUtil.java


注:本文中的org.apache.hadoop.fs.Path.toUri方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。