本文整理汇总了Java中org.apache.hadoop.fs.FileSystem.getHomeDirectory方法的典型用法代码示例。如果您正苦于以下问题:Java FileSystem.getHomeDirectory方法的具体用法?Java FileSystem.getHomeDirectory怎么用?Java FileSystem.getHomeDirectory使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.fs.FileSystem
的用法示例。
在下文中一共展示了FileSystem.getHomeDirectory方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: copyLocalFileToDfs
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
public static Path copyLocalFileToDfs(FileSystem fs, String appId,
Path srcPath, String dstFileName) throws IOException {
Path dstPath = new Path(fs.getHomeDirectory(),
Constants.DEFAULT_APP_NAME + Path.SEPARATOR + appId + Path.SEPARATOR + dstFileName);
LOG.info("Copying " + srcPath + " to " + dstPath);
fs.copyFromLocalFile(srcPath, dstPath);
return dstPath;
}
示例2: getFilesCount
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
public int getFilesCount(String storeBaseDir, String tableName) {
int filesCount = 0;
try {
FileSystem fs = FileSystem.get(conf);
Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
Path tablePath = new Path(storeBasePath, tableName);
if (fs.exists(tablePath)) {
RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator =
fs.listFiles(tablePath, false);
while (locatedFileStatusRemoteIterator.hasNext()) {
filesCount++;
LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
System.out.println("File name is " + next.getPath());
}
}
} catch (IOException e) {
e.printStackTrace();
}
return filesCount;
}
示例3: getORCRecords
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
public List<OrcStruct> getORCRecords(String storeBaseDir, String tableName) throws IOException {
List<OrcStruct> orcrecords = new ArrayList<>();
try {
FileSystem fs = FileSystem.get(conf);
Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
Path tablePath = new Path(storeBasePath, tableName);
if (fs.exists(tablePath)) {
RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator =
fs.listFiles(tablePath, false);
while (locatedFileStatusRemoteIterator.hasNext()) {
LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
final org.apache.hadoop.hive.ql.io.orc.Reader fis =
OrcFile.createReader(next.getPath(), OrcFile.readerOptions(conf));
RecordReader rows = fis.rows();
while (rows.hasNext()) {
orcrecords.add((OrcStruct) rows.next(null));
}
System.out.println("File name is " + next.getPath());
}
}
} catch (IOException e) {
e.printStackTrace();
}
return orcrecords;
}
示例4: testHomeDirectory
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
/**
* Tests get/set working directory in DFS.
*/
@Test(timeout=30000)
public void testHomeDirectory() throws IOException {
final String[] homeBases = new String[] {"/home", "/home/user"};
Configuration conf = new HdfsConfiguration();
for (final String homeBase : homeBases) {
conf.set(DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY, homeBase);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fileSys = cluster.getFileSystem();
try {
// test home directory
Path home =
fileSys.makeQualified(
new Path(homeBase + "/" + getUserName(fileSys)));
Path fsHome = fileSys.getHomeDirectory();
assertEquals(home, fsHome);
} finally {
fileSys.close();
cluster.shutdown();
}
}
}
示例5: addToLocalResources
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
private void addToLocalResources(FileSystem fs, String fileSrcPath,
String fileDstPath, String appId, Map<String, LocalResource> localResources,
String resources) throws IOException {
String suffix =
"prkeyrotation" + "/" + appId + "/" + fileDstPath;
Path dst =
new Path(fs.getHomeDirectory(), suffix);
if (fileSrcPath == null) {
FSDataOutputStream ostream = null;
try {
ostream = FileSystem
.create(fs, dst, new FsPermission((short) 0710));
ostream.writeUTF(resources);
} finally {
IOUtils.closeQuietly(ostream);
}
} else {
fs.copyFromLocalFile(new Path(fileSrcPath), dst);
}
FileStatus scFileStatus = fs.getFileStatus(dst);
LocalResource scRsrc =
LocalResource.newInstance(
ConverterUtils.getYarnUrlFromPath(dst),
LocalResourceType.FILE, LocalResourceVisibility.APPLICATION,
scFileStatus.getLen(), scFileStatus.getModificationTime());
localResources.put(fileDstPath, scRsrc);
}
示例6: getHomeDirectory
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
/**
* 此方法用于获取文件系统的HomeDirectory
*
* @param fileSystemInfo
* 文件系统信息
* @return 文件系统的HomeDirectory
*/
public static Path getHomeDirectory(FileSystemInfo fileSystemInfo) {
FileSystem fs = getFileSystem(fileSystemInfo);
try {
return fs.getHomeDirectory();
} finally {
closeFileSystem(fs);
}
}
示例7: checkFileExistsSecured
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
public boolean checkFileExistsSecured(final String user, final String keytab, String storeBaseDir,
String tableName) {
try {
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(user, keytab);
FileSystem fs = FileSystem.get(conf);
Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
Path tablePath = new Path(storeBasePath, tableName);
return fs.exists(tablePath);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
示例8: checkFileExists
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
public boolean checkFileExists(String storeBaseDir, String tableName) {
try {
FileSystem fs = FileSystem.get(conf);
Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
Path tablePath = new Path(storeBasePath, tableName);
return fs.exists(tablePath);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
示例9: addToLocalResources
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
private void addToLocalResources(FileSystem fs, String fileSrcPath,
String fileDstPath, String appId, Map<String, LocalResource> localResources,
String resources) throws IOException {
String suffix =
appName + "/" + appId + "/" + fileDstPath;
Path dst =
new Path(fs.getHomeDirectory(), suffix);
if (fileSrcPath == null) {
FSDataOutputStream ostream = null;
try {
ostream = FileSystem
.create(fs, dst, new FsPermission((short) 0710));
ostream.writeUTF(resources);
} finally {
IOUtils.closeQuietly(ostream);
}
} else {
fs.copyFromLocalFile(new Path(fileSrcPath), dst);
}
FileStatus scFileStatus = fs.getFileStatus(dst);
LocalResource scRsrc =
LocalResource.newInstance(
ConverterUtils.getYarnUrlFromURI(dst.toUri()),
LocalResourceType.FILE, LocalResourceVisibility.APPLICATION,
scFileStatus.getLen(), scFileStatus.getModificationTime());
localResources.put(fileDstPath, scRsrc);
}
示例10: testWorkingDirectory
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
/**
* Tests get/set working directory in DFS.
*/
@Test
public void testWorkingDirectory() throws IOException {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fileSys = cluster.getFileSystem();
try {
Path orig_path = fileSys.getWorkingDirectory();
assertTrue(orig_path.isAbsolute());
Path file1 = new Path("somewhat/random.txt");
writeFile(fileSys, file1);
assertTrue(fileSys.exists(new Path(orig_path, file1.toString())));
fileSys.delete(file1, true);
Path subdir1 = new Path("/somewhere");
fileSys.setWorkingDirectory(subdir1);
writeFile(fileSys, file1);
cleanupFile(fileSys, new Path(subdir1, file1.toString()));
Path subdir2 = new Path("else");
fileSys.setWorkingDirectory(subdir2);
writeFile(fileSys, file1);
readFile(fileSys, file1);
cleanupFile(fileSys, new Path(new Path(subdir1, subdir2.toString()),
file1.toString()));
// test home directory
Path home =
fileSys.makeQualified(
new Path(DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT
+ "/" + getUserName(fileSys)));
Path fsHome = fileSys.getHomeDirectory();
assertEquals(home, fsHome);
} finally {
fileSys.close();
cluster.shutdown();
}
}
示例11: execute
import org.apache.hadoop.fs.FileSystem; //导入方法依赖的package包/类
/**
* Executes the filesystem operation.
*
* @param fs filesystem instance to use.
*
* @return a JSON object with the user home directory.
*
* @throws IOException thrown if an IO error occured.
*/
@Override
@SuppressWarnings("unchecked")
public JSONObject execute(FileSystem fs) throws IOException {
Path homeDir = fs.getHomeDirectory();
JSONObject json = new JSONObject();
json.put(HttpFSFileSystem.HOME_DIR_JSON, homeDir.toUri().getPath());
return json;
}