当前位置: 首页>>代码示例>>Java>>正文


Java ConfigurationException类代码示例

本文整理汇总了Java中org.apache.flume.conf.ConfigurationException的典型用法代码示例。如果您正苦于以下问题:Java ConfigurationException类的具体用法?Java ConfigurationException怎么用?Java ConfigurationException使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


ConfigurationException类属于org.apache.flume.conf包,在下文中一共展示了ConfigurationException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public void configure(Context context) throws ConfigurationException {
  super.configure(context);
  sinks = Arrays.asList(context.getString(
      BasicConfigurationConstants.CONFIG_SINKS).split("\\s+"));
  Map<String, String> params = context.getSubProperties(
      BasicConfigurationConstants.CONFIG_SINK_PROCESSOR_PREFIX);
  processorContext = new Context();
  processorContext.putAll(params);
  SinkProcessorType spType = getKnownSinkProcessor(processorContext.getString(
          BasicConfigurationConstants.CONFIG_TYPE));

  if (spType != null) {
    processorConf =
        (SinkProcessorConfiguration) ComponentConfigurationFactory.create(
            this.getComponentName() + "-processor",
            spType.toString(),
            ComponentType.SINK_PROCESSOR);
    if (processorConf != null) {
      processorConf.setSinks(new HashSet<String>(sinks));
      processorConf.configure(processorContext);
    }
  }
  setConfigured();
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:26,代码来源:SinkGroupConfiguration.java

示例2: doConfigure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
protected void doConfigure(Context context) throws FlumeException {
    LOGGER.info("configure...");

    canalConf.setServerUrl(context.getString(CanalSourceConstants.SERVER_URL));
    canalConf.setServerUrls(context.getString(CanalSourceConstants.SERVER_URLS));
    canalConf.setZkServers(context.getString(CanalSourceConstants.ZOOKEEPER_SERVERS));
    canalConf.setDestination(context.getString(CanalSourceConstants.DESTINATION));
    canalConf.setUsername(context.getString(CanalSourceConstants.USERNAME, CanalSourceConstants.DEFAULT_USERNAME));
    canalConf.setPassword(context.getString(CanalSourceConstants.PASSWORD, CanalSourceConstants.DEFAULT_PASSWORD));
    canalConf.setFilter(context.getString(CanalSourceConstants.FILTER));
    canalConf.setBatchSize(context.getInteger(CanalSourceConstants.BATCH_SIZE, CanalSourceConstants.DEFAULT_BATCH_SIZE));
    canalConf.setOldDataRequired(context.getBoolean(CanalSourceConstants.OLD_DATA_REQUIRED, CanalSourceConstants.DEFAULT_OLD_DATA_REQUIRED));

    if (!canalConf.isConnectionUrlValid()) {
        throw new ConfigurationException(String.format("\"%s\",\"%s\" AND \"%s\" at least one must be specified!",
                CanalSourceConstants.ZOOKEEPER_SERVERS,
                CanalSourceConstants.SERVER_URL,
                CanalSourceConstants.SERVER_URLS));
    }
}
 
开发者ID:lackhurt,项目名称:flume-canal-source,代码行数:22,代码来源:CanalSource.java

示例3: start

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public synchronized void start() {
  LOGGER.info("Starting Morphline Sink {} ...", this);
  sinkCounter.start();
  if (handler == null) {
    MorphlineHandler tmpHandler;
    try {
      tmpHandler = (MorphlineHandler) Class.forName(handlerClass).newInstance();
    } catch (Exception e) {
      throw new ConfigurationException(e);
    }
    tmpHandler.configure(context);
    handler = tmpHandler;
  }    
  super.start();
  LOGGER.info("Morphline Sink {} started.", getName());
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:18,代码来源:MorphlineSink.java

示例4: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public void configure(Context context) {

	String hostname = Utils.getHostNameForLinux();
	this.pollFrequency = context.getInteger(this.CONF_POLL_FREQUENCY, 60);
	this.hostname = context.getString(this.CONF_HOSTNAME, hostname);
	this.tags = context.getString(this.CONF_TAGS, this.DEFAULT_TAGS);
	// context.getSubProperties();
	logger.debug("#########@@@@@@@@@@@@@@@@#############$$$$$$$$$$$$$$$$$$4" + context.getParameters().toString());

	String urls = context.getString(this.CONF_URLS, "http://127.0.0.1:1988/v1/push");

	if (urls == null || urls.isEmpty()) {
		throw new ConfigurationException("OpenFalcon agent's v1/push interface list cannot be empty.");
	}
	this.postUrls = getPostUrlsFromString(urls);
}
 
开发者ID:maoyaowu,项目名称:flume-ng_openfalcon_monitor,代码行数:18,代码来源:OpenFalconServer.java

示例5: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public void configure(Context context) {
  super.configure(context);
  Configurables.ensureRequiredNonNull(context, "port");
  port = context.getInteger("port");
  host = context.getString("bind");

  maxChunkSize = context.getInteger("netty.max.http.chunk.size", DEFAULT_HTTP_CHUNK_SIZE);

  childSendBufferSize = context.getInteger("netty.child.sendBufferSize",
          DEFAULT_CHILD_BUFFER_SIZE);
  childRecieveBufferSize = context.getInteger("netty.child.recieveBufferSize",
          DEFAULT_CHILD_BUFFER_SIZE);

  try {
    tsdbUrl = new URL(context.getString("tsdb.url"));
  } catch (MalformedURLException e) {
    throw new ConfigurationException("tsdb.url", e);
  }
}
 
开发者ID:yandex,项目名称:opentsdb-flume,代码行数:21,代码来源:LegacyHttpSource.java

示例6: connect

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
private void connect() throws SQLException{
	if(state != State.CONFIGURED)
		throw new ConfigurationException(getClass().getSimpleName() + " is not configured");

	try {
		if(connection == null || connection.isClosed()){
			connection = DriverManager.getConnection(
					connection_url,
					connection_user,
					connection_password);
		}
	} catch (SQLException e) {
		LOG.error(e.getMessage(), e);
		throw e;
	}
}
 
开发者ID:cerndb,项目名称:flume-ng-audit-db,代码行数:17,代码来源:ReliableJdbcEventReader.java

示例7: readEvents

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
public List<Event> readEvents(int numberOfEventToRead) throws IOException {
	if(state != State.CONFIGURED)
		throw new ConfigurationException(getClass().getSimpleName() + " is not configured");

	LinkedList<Event> events = new LinkedList<Event>();

	for (int i = 0; i < numberOfEventToRead; i++){
		Event event = readEvent();

		if(event != null){
			LOG.trace("New event: " + event);

			events.add(event);
		}else{
			LOG.debug("Number of events returned: " + events.size());
			return events;
		}
	}

	LOG.debug("Number of events returned: " + events.size());
	return events;
}
 
开发者ID:cerndb,项目名称:flume-ng-audit-db,代码行数:23,代码来源:ReliableJdbcEventReader.java

示例8: readEvents

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
public List<Event> readEvents(int numberOfEventToRead) throws IOException {
	if(state != State.CONFIGURED)
		throw new ConfigurationException(getClass().getSimpleName() + " is not configured");

	LinkedList<Event> events = new LinkedList<Event>();

	for (int i = 0; i < numberOfEventToRead; i++){
		Event event = readEvent();

		if(event != null){
			events.add(event);
		}else{
			LOG.debug("Number of events returned: " + events.size());
			return events;
		}
	}

	LOG.debug("Number of events returned: " + events.size());
	return events;
}
 
开发者ID:cerndb,项目名称:flume-ng-audit-db,代码行数:21,代码来源:ReliableLogFileEventReader.java

示例9: addDocumentedKafkaProps

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
/**
 * Some of the producer properties are especially important
 * We documented them and gave them a camel-case name to match Flume config
 * If user set these, we will override any existing parameters with these
 * settings.
 * Knowledge of which properties are documented is maintained here for now.
 * If this will become a maintenance issue we'll set a proper data structure.
 */
private static void addDocumentedKafkaProps(Context context,
                                            Properties kafkaProps)
        throws ConfigurationException {
  String zookeeperConnect = context.getString(
          KafkaSourceConstants.ZOOKEEPER_CONNECT_FLUME);
  if (zookeeperConnect == null) {
    throw new ConfigurationException("ZookeeperConnect must contain " +
            "at least one ZooKeeper server");
  }
  kafkaProps.put(KafkaSourceConstants.ZOOKEEPER_CONNECT, zookeeperConnect);

  String groupID = context.getString(KafkaSourceConstants.GROUP_ID_FLUME);

  if (groupID != null ) {
    kafkaProps.put(KafkaSourceConstants.GROUP_ID, groupID);
  }
}
 
开发者ID:feiyuyitiao,项目名称:flume-ng-extends,代码行数:26,代码来源:KafkaSourceUtil.java

示例10: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
/**
 * We configure the source and generate properties for the Kafka Consumer
 *
 * Kafka Consumer properties are generated as follows:
 *
 * 1. Generate a properties object with some static defaults that can be
 * overridden by Source configuration 2. We add the configuration users added
 * for Kafka (parameters starting with kafka. and must be valid Kafka Consumer
 * properties 3. We add the source documented parameters which can override
 * other properties
 *
 * @param context
 */
public void configure(Context context) {
  this.context = context;
  batchUpperLimit = context.getInteger(KafkaSourceConstants.BATCH_SIZE,
          KafkaSourceConstants.DEFAULT_BATCH_SIZE);
  timeUpperLimit = context.getInteger(KafkaSourceConstants.BATCH_DURATION_MS,
          KafkaSourceConstants.DEFAULT_BATCH_DURATION);
  topic = context.getString(KafkaSourceConstants.TOPIC);

  if(topic == null) {
    throw new ConfigurationException("Kafka topic must be specified.");
  }

  kafkaProps = KafkaSourceUtil.getKafkaProperties(context);
  consumerTimeout = Integer.parseInt(kafkaProps.getProperty(
          KafkaSourceConstants.CONSUMER_TIMEOUT));
  kafkaAutoCommitEnabled = Boolean.parseBoolean(kafkaProps.getProperty(
          KafkaSourceConstants.AUTO_COMMIT_ENABLED));

  if (counter == null) {
    counter = new KafkaSourceCounter(getName());
  }
}
 
开发者ID:feiyuyitiao,项目名称:flume-ng-extends,代码行数:36,代码来源:KafkaSource.java

示例11: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
public void configure(Context context) {
  this.context = context;
  this.batchUpperLimit = context.getInteger("batchSize", Integer.valueOf(1000)).intValue();
  this.timeUpperLimit = context.getInteger("batchDurationMillis", Integer.valueOf(1000)).intValue();
  this.topic = context.getString("topic");
  if(this.topic == null) {
    throw new ConfigurationException("Kafka topic must be specified.");
  } else {
    this.kafkaProps = KafkaSourceUtil.getKafkaProperties(context);
    this.consumerTimeout = Integer.parseInt(this.kafkaProps.getProperty("consumer.timeout.ms"));
    this.kafkaAutoCommitEnabled = Boolean.parseBoolean(this.kafkaProps.getProperty("auto.commit.enable"));
    if(this.counter == null) {
      this.counter = new KafkaSourceCounter(this.getName());
    }

  }
}
 
开发者ID:hadooparchitecturebook,项目名称:fraud-detection-tutorial,代码行数:18,代码来源:FastKafkaSource.java

示例12: createClient

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
private void createClient() {
    try {
        final GoogleCredential credential = new GoogleCredential.Builder().setTransport(TRANSPORT)
                .setJsonFactory(JSON_FACTORY)
                .setServiceAccountId(serviceAccountId)
                .setServiceAccountScopes(Collections.singleton(SCOPE))
                .setServiceAccountPrivateKeyFromP12File(new File(serviceAccountPrivateKeyFromP12File))
                .setRequestInitializer(new HttpRequestInitializer() {
                    @Override
                    public void initialize(HttpRequest httpRequest) throws IOException {
                        httpRequest.setConnectTimeout(connectTimeoutMs);
                        httpRequest.setReadTimeout(readTimeoutMs);
                    }
                }).build();
        Bigquery.Builder builder = new Bigquery.Builder(TRANSPORT, JSON_FACTORY, credential).setApplicationName("BigQuery-Service-Accounts/0.1");
        bigquery = builder.build();
    } catch (Exception ex) {
        throw new ConfigurationException("Error creating bigquery client: " + ex.getMessage(), ex);
    }
}
 
开发者ID:DevOps-TangoMe,项目名称:flume-bigquery,代码行数:21,代码来源:GoogleBigQuerySink.java

示例13: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public void configure(Context context) {
    topic = context.getString(CONF_TOPIC);
    if (topic == null) {
        throw new ConfigurationException("Kafka topic must be specified.");
    }

    writeBody = context.getBoolean(CONF_WRITE_BODY, DEFAULT_WRITE_BODY);

    ImmutableMap<String, String> subProperties = context.getSubProperties(CONF_KAFKA);
    Properties properties = new Properties();
    properties.putAll(subProperties);

    producer = new Producer<String, String>(new ProducerConfig(properties));

    mapper = new ObjectMapper();
}
 
开发者ID:Stratio,项目名称:ingestion,代码行数:18,代码来源:KafkaSink.java

示例14: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public void configure(Context context) {
  conf = new SinkGroupConfiguration("sinkgrp");
  try {
    conf.configure(context);
  } catch (ConfigurationException e) {
    throw new FlumeException("Invalid Configuration!", e);
  }
  configure(conf);

}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:12,代码来源:SinkGroup.java

示例15: configure

import org.apache.flume.conf.ConfigurationException; //导入依赖的package包/类
@Override
public void configure(Context context) {
  this.pollFrequency = context.getInteger(this.CONF_POLL_FREQUENCY, 60);
  String localHosts = context.getString(this.CONF_HOSTS);
  if (localHosts == null || localHosts.isEmpty()) {
    throw new ConfigurationException("Hosts list cannot be empty.");
  }
  this.hosts = this.getHostsFromString(localHosts);
  this.isGanglia3 = context.getBoolean(this.CONF_ISGANGLIA3, false);
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:11,代码来源:GangliaServer.java


注:本文中的org.apache.flume.conf.ConfigurationException类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。