当前位置: 首页>>代码示例>>Java>>正文


Java FsUrlStreamHandlerFactory类代码示例

本文整理汇总了Java中org.apache.hadoop.fs.FsUrlStreamHandlerFactory的典型用法代码示例。如果您正苦于以下问题:Java FsUrlStreamHandlerFactory类的具体用法?Java FsUrlStreamHandlerFactory怎么用?Java FsUrlStreamHandlerFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


FsUrlStreamHandlerFactory类属于org.apache.hadoop.fs包,在下文中一共展示了FsUrlStreamHandlerFactory类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: ensureInitialized

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
/**
 * Make sure that this instance is initialized. This is particularly required to use HDFS {@link URL}s.
 */
public void ensureInitialized() {
    if (this.isInitialized) return;

    // Add handler for HDFS URL for java.net.URL
    LoggerFactory.getLogger(HadoopFileSystem.class).info("Adding handler for HDFS URLs.");
    try {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    } catch (Throwable t) {
        LoggerFactory.getLogger(HadoopFileSystem.class).error(
                "Could not set URL stream handler factory.", t
        );
    } finally {
        this.isInitialized = true;
    }
}
 
开发者ID:daqcri,项目名称:rheem,代码行数:19,代码来源:HadoopFileSystem.java

示例2: initHdfs

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
protected void initHdfs() {
    try {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    } catch (Throwable e) {
        // ignore as its most likely already set
        LOG.debug("Cannot set URLStreamHandlerFactory due " + e.getMessage() + ". This exception will be ignored.", e);
    }
}
 
开发者ID:HydAu,项目名称:Camel,代码行数:9,代码来源:HdfsComponent.java

示例3: setHdfsURLStreamHandlerFactory

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
public static void setHdfsURLStreamHandlerFactory()
		throws NoSuchFieldException,
		SecurityException,
		IllegalArgumentException,
		IllegalAccessException {

	Field factoryField = URL.class.getDeclaredField("factory");
	factoryField.setAccessible(true);

	URLStreamHandlerFactory urlStreamHandlerFactory = (URLStreamHandlerFactory) factoryField.get(null);

	if (urlStreamHandlerFactory == null) {
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());

	}
	else {
		try {
			factoryField.setAccessible(true);
			factoryField.set(
					null,
					new FsUrlStreamHandlerFactory());
		}
		catch (IllegalAccessException e1) {
			LOGGER.error("Could not access URLStreamHandler factory field on URL class: {}");
			throw new RuntimeException(
					"Could not access URLStreamHandler factory field on URL class: {}",
					e1);
		}
	}
}
 
开发者ID:locationtech,项目名称:geowave,代码行数:31,代码来源:SparkIngestDriver.java

示例4: loadFsUrlStreamHandler

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
/**
 * Loads the {@link FsUrlStreamHandlerFactory}.
 *
 * @param conf the configuration to use
 *
 * @throws IOException if something goes wrong
 */
public static void loadFsUrlStreamHandler(final Configuration conf) throws IOException {
    // Here to avoid https://issues.apache.org/jira/browse/HADOOP-9041
    FileSystem.get(conf);

    // Hook up the HDFS URL scheme handler
    // noinspection ErrorNotRethrown
    try {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    } catch (final Error e) {
        // This can happen if the handler has already been loaded so ignore
        System.err.println("The HDFS URL scheme handler has already been loaded");
    }
}
 
开发者ID:apigee,项目名称:lembos,代码行数:21,代码来源:RunnerUtils.java

示例5: testDfsUrls

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
/**
 * Test opening and reading from an InputStream through a hdfs:// URL.
 * <p>
 * First generate a file with some content through the FileSystem API, then
 * try to open and read the file through the URL stream API.
 * 
 * @throws IOException
 */
public void testDfsUrls() throws IOException {

  Configuration conf = new Configuration();
  MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
  FileSystem fs = cluster.getFileSystem();

  // Setup our own factory
  // setURLSteramHandlerFactor is can be set at most once in the JVM
  // the new URLStreamHandler is valid for all tests cases 
  // in TestStreamHandler
  FsUrlStreamHandlerFactory factory =
      new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
  java.net.URL.setURLStreamHandlerFactory(factory);

  Path filePath = new Path("/thefile");

  try {
    byte[] fileContent = new byte[1024];
    for (int i = 0; i < fileContent.length; ++i)
      fileContent[i] = (byte) i;

    // First create the file through the FileSystem API
    OutputStream os = fs.create(filePath);
    os.write(fileContent);
    os.close();

    // Second, open and read the file content through the URL API
    URI uri = fs.getUri();
    URL fileURL =
        new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
            .toString());

    InputStream is = fileURL.openStream();
    assertNotNull(is);

    byte[] bytes = new byte[4096];
    assertEquals(1024, is.read(bytes));
    is.close();

    for (int i = 0; i < fileContent.length; ++i)
      assertEquals(fileContent[i], bytes[i]);

    // Cleanup: delete the file
    fs.delete(filePath, false);

  } finally {
    fs.close();
    cluster.shutdown();
  }

}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:60,代码来源:TestUrlStreamHandler.java

示例6: afterPropertiesSet

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
@Override
public void afterPropertiesSet() throws Exception {
	internalConfig = createConfiguration(configuration);

	internalConfig.setClassLoader(beanClassLoader);
	if (resources != null) {
		for (Resource resource : resources) {
			internalConfig.addResource(resource.getURL());
		}
	}

	ConfigurationUtils.addProperties(internalConfig, properties);

	// for below property values we can't use constants
	// from hadoop packages because we need to able to
	// compile for different versions.
	// set hdfs / fs URI last to override all other properties
	if (StringUtils.hasText(fsUri)) {
		internalConfig.set("fs.default.name", fsUri.trim());
		internalConfig.set("fs.defaultFS", fsUri.trim());
	}

	if (StringUtils.hasText(jtUri)) {
		internalConfig.set("mapred.job.tracker", jtUri.trim());
	}

	if (StringUtils.hasText(rmUri)) {
		internalConfig.set("yarn.resourcemanager.address", rmUri.trim());
	}

	if (initialize) {
		internalConfig.size();
	}

	postProcessConfiguration(internalConfig);

	if (registerJvmUrl) {
		try {
			// force UGI init to prevent infinite loop - see SHDP-92
			UserGroupInformation.setConfiguration(internalConfig);
			URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory(getObject()));
			log.info("Registered HDFS URL stream handler");
		} catch (Error err) {
			log.warn("Cannot register Hadoop URL stream handler - one is already registered");
		}
	}
}
 
开发者ID:xianrendzw,项目名称:spring-data-hbase,代码行数:48,代码来源:ConfigurationFactoryBean.java

示例7: testDfsUrls

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; //导入依赖的package包/类
/**
 * Test opening and reading from an InputStream through a hdfs:// URL.
 * <p>
 * First generate a file with some content through the FileSystem API, then
 * try to open and read the file through the URL stream API.
 * 
 * @throws IOException
 */
public void testDfsUrls() throws IOException {

  Configuration conf = new HdfsConfiguration();
  MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
  FileSystem fs = cluster.getFileSystem();

  // Setup our own factory
  // setURLSteramHandlerFactor is can be set at most once in the JVM
  // the new URLStreamHandler is valid for all tests cases 
  // in TestStreamHandler
  FsUrlStreamHandlerFactory factory =
      new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
  java.net.URL.setURLStreamHandlerFactory(factory);

  Path filePath = new Path("/thefile");

  try {
    byte[] fileContent = new byte[1024];
    for (int i = 0; i < fileContent.length; ++i)
      fileContent[i] = (byte) i;

    // First create the file through the FileSystem API
    OutputStream os = fs.create(filePath);
    os.write(fileContent);
    os.close();

    // Second, open and read the file content through the URL API
    URI uri = fs.getUri();
    URL fileURL =
        new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
            .toString());

    InputStream is = fileURL.openStream();
    assertNotNull(is);

    byte[] bytes = new byte[4096];
    assertEquals(1024, is.read(bytes));
    is.close();

    for (int i = 0; i < fileContent.length; ++i)
      assertEquals(fileContent[i], bytes[i]);

    // Cleanup: delete the file
    fs.delete(filePath, false);

  } finally {
    fs.close();
    cluster.shutdown();
  }

}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:60,代码来源:TestUrlStreamHandler.java


注:本文中的org.apache.hadoop.fs.FsUrlStreamHandlerFactory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。