当前位置: 首页>>代码示例>>Java>>正文


Java FilterContainer类代码示例

本文整理汇总了Java中org.apache.hadoop.http.FilterContainer的典型用法代码示例。如果您正苦于以下问题:Java FilterContainer类的具体用法?Java FilterContainer怎么用?Java FilterContainer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


FilterContainer类属于org.apache.hadoop.http包,在下文中一共展示了FilterContainer类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
/**
 * Initializes Alfredo AuthenticationFilter.
 * <p/>
 * Propagates to Alfredo AuthenticationFilter configuration all Hadoop
 * configuration properties prefixed with "hadoop.http.authentication."
 *
 * @param container The filter container
 * @param conf Configuration for run-time parameters
 */
@Override
public void initFilter(FilterContainer container, Configuration conf) {
  Map<String, String> filterConfig = new HashMap<String, String>();

  //setting the cookie path to root '/' so it is used for all resources.
  filterConfig.put(AuthenticationFilter.COOKIE_PATH, "/");

  for (Map.Entry<String, String> entry : conf) {
    String name = entry.getKey();
    if (name.startsWith(PREFIX)) {
      String value = conf.get(name);
      name = name.substring(PREFIX.length());
      filterConfig.put(name, value);
    }
  }

  container.addFilter("authentication",
                      AuthenticationFilter.class.getName(),
                      filterConfig);
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:30,代码来源:AuthenticationFilterInitializer.java

示例2: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {

  String key = getEnabledConfigKey();
  boolean enabled = conf.getBoolean(key, false);
  if (enabled) {
    container.addGlobalFilter("Cross Origin Filter",
        CrossOriginFilter.class.getName(),
        getFilterParameters(conf, getPrefix()));
  } else {
    LOG.info("CORS filter not enabled. Please set " + key
        + " to 'true' to enable it");
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:15,代码来源:HttpCrossOriginFilterInitializer.java

示例3: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {
  HashMap<String, String> options = new HashMap<String, String>();
  
  String username = getUsernameFromConf(conf);
  options.put(HADOOP_HTTP_STATIC_USER, username);

  container.addFilter("static_user_filter", 
                      StaticUserFilter.class.getName(), 
                      options);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:12,代码来源:StaticUserWebFilter.java

示例4: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {

  Map<String, String> filterConfig = createFilterConfig(conf);
  container.addFilter("RMAuthenticationFilter",
    RMAuthenticationFilter.class.getName(), filterConfig);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:8,代码来源:RMAuthenticationFilterInitializer.java

示例5: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {

  // setup the filter
  // use the keys with "yarn.timeline-service.http-cross-origin" prefix to
  // override the ones with the "hadoop.http.cross-origin" prefix.

  Map<String, String> filterParameters =
      getFilterParameters(conf, HttpCrossOriginFilterInitializer.PREFIX);
  filterParameters.putAll(getFilterParameters(conf, getPrefix()));

  container.addGlobalFilter("Cross Origin Filter",
        CrossOriginFilter.class.getName(), filterParameters);
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:15,代码来源:CrossOriginFilterInitializer.java

示例6: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf)
{
  logger.debug("Conf {}", conf);
  Map<String, String> params = new HashMap<>();
  Collection<String> proxies = new ArrayList<>();
  if (ConfigUtils.isRMHAEnabled(conf)) {
    // HA is enabled get all
    for (String rmId : ConfigUtils.getRMHAIds(conf)) {
      proxies.add(getResolvedRMWebAppURLWithoutScheme(conf, rmId));
    }
    logger.info("HA proxy addresses {}", proxies);
  }
  if (proxies.isEmpty()) {
    proxies.add(getProxyHostAndPort(conf));
    logger.info("Proxy addresses {}", proxies);
  }
  StringBuilder proxyBr = new StringBuilder();
  for (String proxy : proxies) {
    if (proxyBr.length() != 0) {
      proxyBr.append(StramWSFilter.PROXY_DELIMITER);
    }
    String[] parts = proxy.split(":");
    proxyBr.append(parts[0]);
  }
  params.put(StramWSFilter.PROXY_HOST, proxyBr.toString());
  container.addFilter(FILTER_NAME, FILTER_CLASS, params);
}
 
开发者ID:apache,项目名称:apex-core,代码行数:29,代码来源:StramWSFilterInitializer.java

示例7: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {
  Map<String, String> params = new HashMap<String, String>();
  String proxy = YarnConfiguration.getProxyHostAndPort(conf);
  String[] parts = proxy.split(":");
  params.put(AmIpFilter.PROXY_HOST, parts[0]);
  params.put(AmIpFilter.PROXY_URI_BASE,
      HttpConfig.getSchemePrefix() + proxy +
      System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV));
  container.addFilter(FILTER_NAME, FILTER_CLASS, params);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:12,代码来源:AmFilterInitializer.java

示例8: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
/**
 * Initializes hadoop-auth AuthenticationFilter.
 * <p/>
 * Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop
 * configuration properties prefixed with "hadoop.http.authentication."
 *
 * @param container The filter container
 * @param conf Configuration for run-time parameters
 */
@Override
public void initFilter(FilterContainer container, Configuration conf) {
  Map<String, String> filterConfig = getFilterConfigMap(conf, PREFIX);

  // extend AuthenticationFilter's feature to
  // support proxy user operation.
  container.addFilter("authentication",
                      AuthenticationWithProxyUserFilter.class.getName(),
                      filterConfig);
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:20,代码来源:AuthenticationFilterInitializer.java

示例9: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {
  Map<String, String> params = new HashMap<String, String>();
  String proxy = WebAppUtils.getProxyHostAndPort(conf);
  String[] parts = proxy.split(":");
  params.put(AmIpFilter.PROXY_HOST, parts[0]);
  params.put(AmIpFilter.PROXY_URI_BASE,
      HttpConfig.getSchemePrefix() + proxy +
      System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV));
  container.addFilter(FILTER_NAME, FILTER_CLASS, params);
}
 
开发者ID:chendave,项目名称:hadoop-TCP,代码行数:12,代码来源:AmFilterInitializer.java

示例10: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {

  container.addGlobalFilter("Cross Origin Filter",
      CrossOriginFilter.class.getName(), getFilterParameters(conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:7,代码来源:CrossOriginFilterInitializer.java

示例11: testProxyUserConfiguration

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Test
public void testProxyUserConfiguration() {
  FilterContainer container = Mockito.mock(FilterContainer.class);
  for (int i = 0; i < 3; ++i) {
    Configuration conf = new YarnConfiguration();
    switch (i) {
      case 0:
        // hadoop.proxyuser prefix
        conf.set("hadoop.proxyuser.foo.hosts", "*");
        conf.set("hadoop.proxyuser.foo.users", "*");
        conf.set("hadoop.proxyuser.foo.groups", "*");
        break;
      case 1:
        // yarn.timeline-service.http-authentication.proxyuser prefix
        conf.set("yarn.timeline-service.http-authentication.proxyuser.foo.hosts",
            "*");
        conf.set("yarn.timeline-service.http-authentication.proxyuser.foo.users",
            "*");
        conf.set("yarn.timeline-service.http-authentication.proxyuser.foo.groups",
            "*");
        break;
      case 2:
        // hadoop.proxyuser prefix has been overwritten by
        // yarn.timeline-service.http-authentication.proxyuser prefix
        conf.set("hadoop.proxyuser.foo.hosts", "bar");
        conf.set("hadoop.proxyuser.foo.users", "bar");
        conf.set("hadoop.proxyuser.foo.groups", "bar");
        conf.set("yarn.timeline-service.http-authentication.proxyuser.foo.hosts",
            "*");
        conf.set("yarn.timeline-service.http-authentication.proxyuser.foo.users",
            "*");
        conf.set("yarn.timeline-service.http-authentication.proxyuser.foo.groups",
            "*");
        break;
      default:
        break;
    }

    TimelineAuthenticationFilterInitializer initializer =
        new TimelineAuthenticationFilterInitializer();
    initializer.initFilter(container, conf);
    Assert.assertEquals(
        "*", initializer.filterConfig.get("proxyuser.foo.hosts"));
    Assert.assertEquals(
        "*", initializer.filterConfig.get("proxyuser.foo.users"));
    Assert.assertEquals(
        "*", initializer.filterConfig.get("proxyuser.foo.groups"));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:50,代码来源:TestTimelineAuthenticationFilterInitializer.java

示例12: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override // FilterInitializer
public void initFilter(FilterContainer container, Configuration conf) {
  String filterName = S3HdfsFilter.class.getSimpleName();
  String filterClass = S3HdfsFilter.class.getName();

  String nameNodePort = conf.get(DFS_NAMENODE_HTTP_PORT_KEY,
      String.valueOf(DFS_NAMENODE_HTTP_PORT_DEFAULT));
  String serviceHostName = conf.get(S3_SERVICE_HOSTNAME_KEY);
  String s3Directory = conf.get(S3_DIRECTORY_KEY, S3_DIRECTORY_DEFAULT);
  String s3ProxyPort = conf.get(S3_PROXY_PORT_KEY, S3_PROXY_PORT_DEFAULT);
  String maxConnections = conf.get(S3_MAX_CONNECTIONS_KEY,
      S3_MAX_CONNECTIONS_DEFAULT);

  S3HdfsConfiguration s3Conf = new S3HdfsConfiguration();
  HashMap<String, String> parameters = new HashMap<String, String>();

  /**
   * We try to load configuration properties from the Conf passed to us
   * first. If they are not set there (hdfs-site + core-site), then we check
   * s3hdfs-site + s3hdfs-default configuration files.
   */

  if (nameNodePort != null && nameNodePort.length() > 0) {
    parameters.put(DFS_NAMENODE_HTTP_PORT_KEY, nameNodePort);
  }

  if (serviceHostName != null && serviceHostName.length() > 0) {
    parameters.put(S3_SERVICE_HOSTNAME_KEY, serviceHostName);
  } else {
    serviceHostName = s3Conf.get(S3_SERVICE_HOSTNAME_KEY);
    if (serviceHostName != null && serviceHostName.length() > 0) {
      parameters.put(S3_SERVICE_HOSTNAME_KEY, serviceHostName);
    }
  }

  if (s3Directory != null && s3Directory.length() > 0) {
    parameters.put(S3_DIRECTORY_KEY, s3Directory);
  } else {
    s3Directory = s3Conf.get(S3_DIRECTORY_KEY);
    if (s3Directory != null && s3Directory.length() > 0) {
      parameters.put(S3_DIRECTORY_KEY, s3Directory);
    }
  }

  if (s3ProxyPort != null && s3ProxyPort.length() > 0) {
    parameters.put(S3_PROXY_PORT_KEY, s3ProxyPort);
  } else {
    s3ProxyPort = s3Conf.get(S3_PROXY_PORT_KEY, S3_PROXY_PORT_DEFAULT);
    if (s3ProxyPort != null && s3ProxyPort.length() > 0) {
      parameters.put(S3_PROXY_PORT_KEY, s3ProxyPort);
    }
  }

  if (maxConnections != null && maxConnections.length() > 0) {
    parameters.put(S3_MAX_CONNECTIONS_KEY, maxConnections);
  } else {
    maxConnections = s3Conf.get(S3_MAX_CONNECTIONS_KEY,
        S3_MAX_CONNECTIONS_DEFAULT);
    if (maxConnections != null && maxConnections.length() > 0) {
      parameters.put(S3_MAX_CONNECTIONS_KEY, maxConnections);
    }
  }

  container.addGlobalFilter(filterName, filterClass, parameters);
}
 
开发者ID:WANdisco,项目名称:s3hdfs,代码行数:66,代码来源:S3HdfsFilter.java

示例13: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override // FilterInitializer
public void initFilter(FilterContainer container, Configuration conf) {
  String filterName = AclFilter.class.getName();
  container.addGlobalFilter(filterName, filterName, null);
}
 
开发者ID:WANdisco,项目名称:s3hdfs,代码行数:6,代码来源:AclFilter.java

示例14: initFilter

import org.apache.hadoop.http.FilterContainer; //导入依赖的package包/类
@Override
public void initFilter(FilterContainer container, Configuration conf) {
  container.addFilter("static_user_filter", StaticUserFilter.class.getName(), 
                      new HashMap<String,String>());
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:6,代码来源:StaticUserWebFilter.java


注:本文中的org.apache.hadoop.http.FilterContainer类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。