当前位置: 首页>>代码示例>>Java>>正文


Java Importance类代码示例

本文整理汇总了Java中org.apache.kafka.common.config.ConfigDef.Importance的典型用法代码示例。如果您正苦于以下问题:Java Importance类的具体用法?Java Importance怎么用?Java Importance使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Importance类属于org.apache.kafka.common.config.ConfigDef包,在下文中一共展示了Importance类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: conf

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
public static ConfigDef conf() {
   return new ConfigDef()
         .define(INFINISPAN_CONNECTION_HOSTS_CONF, Type.STRING, INFINISPAN_CONNECTION_HOSTS_DEFAULT, Importance.HIGH,
               INFINISPAN_CONNECTION_HOSTS_DOC)
         .define(INFINISPAN_CONNECTION_HOTROD_PORT_CONF, Type.INT, INFINISPAN_CONNECTION_HOTROD_PORT_DEFAULT,
               Importance.HIGH, INFINISPAN_CONNECTION_HOTROD_PORT_DOC)
         .define(INFINISPAN_CONNECTION_CACHE_NAME_CONF, Type.STRING, INFINISPAN_CONNECTION_CACHE_NAME_DEFAULT,
               Importance.MEDIUM, INFINISPAN_CONNECTION_CACHE_NAME_DOC)
         .define(INFINISPAN_USE_PROTO_CONF, Type.BOOLEAN, INFINISPAN_USE_PROTO_DEFAULT, Importance.MEDIUM,
               INFINISPAN_USE_PROTO_DOC)
         .define(INFINISPAN_PROTO_MARSHALLER_CLASS_CONF, Type.CLASS, INFINISPAN_PROTO_MARSHALLER_CLASS_DEFAULT,
               Importance.MEDIUM, INFINISPAN_PROTO_MARSHALLER_CLASS_DOC)
         .define(INFINISPAN_CACHE_FORCE_RETURN_VALUES_CONF, Type.BOOLEAN,
               INFINISPAN_CACHE_FORCE_RETURN_VALUES_DEFAULT, Importance.LOW,
               INFINISPAN_CACHE_FORCE_RETURN_VALUES_DOC)
         .define(INFINISPAN_USE_LIFESPAN_CONF, Type.BOOLEAN, INFINISPAN_USE_LIFESPAN_DEFAULT,
                 Importance.LOW, INFINISPAN_USE_LIFESPAN_DOC)
         .define(INFINISPAN_USE_MAX_IDLE_CONF, Type.BOOLEAN, INFINISPAN_USE_MAX_IDLE_DEFAULT,
                 Importance.LOW, INFINISPAN_USE_MAX_IDLE_DOC)
         .define(INFINISPAN_LIFESPAN_ENTRY_CONF, Type.LONG, INFINISPAN_LIFESPAN_ENTRY_DEFAULT,
                 Importance.LOW, INFINISPAN_LIFESPAN_ENTRY_DOC)
         .define(INFINISPAN_MAX_IDLE_ENTRY_CONF, Type.LONG, INFINISPAN_MAX_IDLE_ENTRY_DEFAULT,
                 Importance.LOW, INFINISPAN_MAX_IDLE_ENTRY_DOC);
}
 
开发者ID:infinispan,项目名称:infinispan-kafka,代码行数:25,代码来源:InfinispanSinkConnectorConfig.java

示例2: configDef

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
public static ConfigDef configDef() {
    return new ConfigDef()
            .define(NAME_CONFIG, Type.STRING, Importance.HIGH, NAME_DOC, COMMON_GROUP, 1, Width.MEDIUM, NAME_DISPLAY)
            .define(CONNECTOR_CLASS_CONFIG, Type.STRING, Importance.HIGH, CONNECTOR_CLASS_DOC, COMMON_GROUP, 2, Width.LONG, CONNECTOR_CLASS_DISPLAY)
            .define(TASKS_MAX_CONFIG, Type.INT, TASKS_MAX_DEFAULT, atLeast(TASKS_MIN_CONFIG), Importance.HIGH, TASKS_MAX_DOC, COMMON_GROUP, 3, Width.SHORT, TASK_MAX_DISPLAY)
            .define(KEY_CONVERTER_CLASS_CONFIG, Type.CLASS, null, Importance.LOW, KEY_CONVERTER_CLASS_DOC, COMMON_GROUP, 4, Width.SHORT, KEY_CONVERTER_CLASS_DISPLAY)
            .define(VALUE_CONVERTER_CLASS_CONFIG, Type.CLASS, null, Importance.LOW, VALUE_CONVERTER_CLASS_DOC, COMMON_GROUP, 5, Width.SHORT, VALUE_CONVERTER_CLASS_DISPLAY)
            .define(TRANSFORMS_CONFIG, Type.LIST, null, new ConfigDef.Validator() {
                @Override
                public void ensureValid(String name, Object value) {
                    if (value == null) return;
                    final List<String> transformAliases = (List<String>) value;
                    if (transformAliases.size() > new HashSet<>(transformAliases).size()) {
                        throw new ConfigException(name, value, "Duplicate alias provided.");
                    }
                }
            }, Importance.LOW, TRANSFORMS_DOC, TRANSFORMS_GROUP, 6, Width.LONG, TRANSFORMS_DISPLAY);
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:19,代码来源:ConnectorConfig.java

示例3: testGroupInference

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testGroupInference() {
    List<String> expected1 = Arrays.asList("group1", "group2");
    ConfigDef def1 = new ConfigDef()
        .define("a", Type.INT, Importance.HIGH, "docs", "group1", 1, Width.SHORT, "a")
        .define("b", Type.INT, Importance.HIGH, "docs", "group2", 1, Width.SHORT, "b")
        .define("c", Type.INT, Importance.HIGH, "docs", "group1", 2, Width.SHORT, "c");

    assertEquals(expected1, def1.groups());

    List<String> expected2 = Arrays.asList("group2", "group1");
    ConfigDef def2 = new ConfigDef()
        .define("a", Type.INT, Importance.HIGH, "docs", "group2", 1, Width.SHORT, "a")
        .define("b", Type.INT, Importance.HIGH, "docs", "group2", 2, Width.SHORT, "b")
        .define("c", Type.INT, Importance.HIGH, "docs", "group1", 2, Width.SHORT, "c");

    assertEquals(expected2, def2.groups());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:19,代码来源:ConfigDefTest.java

示例4: testValidateCannotParse

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testValidateCannotParse() {
    Map<String, ConfigValue> expected = new HashMap<>();
    String errorMessageB = "Invalid value non_integer for configuration a: Not a number of type INT";
    ConfigValue configA = new ConfigValue("a", null, Collections.emptyList(), Arrays.asList(errorMessageB));
    expected.put("a", configA);

    ConfigDef def = new ConfigDef().define("a", Type.INT, Importance.HIGH, "docs");
    Map<String, String> props = new HashMap<>();
    props.put("a", "non_integer");

    List<ConfigValue> configs = def.validate(props);
    for (ConfigValue config: configs) {
        String name = config.name();
        ConfigValue expectedConfig = expected.get(name);
        assertEquals(expectedConfig, config);
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:19,代码来源:ConfigDefTest.java

示例5: conf

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
public static ConfigDef conf() {
    return new ConfigDef()
            .define(FLUENTD_CONNECT, Type.STRING, "localhost:24224", Importance.HIGH,
                    "Connection specs for Fluentd")
            .define(FLUENTD_CLIENT_MAX_BUFFER_BYTES, Type.LONG, null, Importance.MEDIUM,
                    "Max buffer size.")
            .define(FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES, Type.INT, null, Importance.MEDIUM,
                    "Initial size of buffer chunk. Default: 1048576 (1MiB)")
            .define(FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES, Type.INT, null, Importance.MEDIUM,
                    "Retention size of buffer chunk. Default: 4194304 (4MiB)")
            .define(FLUENTD_CLIENT_FLUSH_INTERVAL, Type.INT, null, Importance.MEDIUM,
                    "Buffer flush interval in msec. Default: 600(msec)")
            .define(FLUENTD_CLIENT_ACK_RESPONSE_MODE, Type.BOOLEAN, false, Importance.MEDIUM,
                    "Enable/Disable ack response mode. Default: false")
            .define(FLUENTD_CLIENT_FILE_BACKUP_DIR, Type.STRING, null, Importance.MEDIUM,
                    "Enable file backup mode if specify backup directory path. Default: null")
            .define(FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED, Type.INT, null, Importance.MEDIUM,
                    "Max wait until all buffers are flushed in sec. Default: 60(sec)")
            .define(FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED, Type.INT, null, Importance.MEDIUM,
                    "Max wait until the flusher is terminated in sec. Default: 60(sec)")
            .define(FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE, Type.BOOLEAN, false, Importance.MEDIUM,
                    "If true use JVM heap memory for buffer pool. Default: false")
            .define(FLUENTD_CLIENT_TIMESTAMP_INTEGER, Type.BOOLEAN, false, Importance.MEDIUM,
                    "If true use integer timestamp. Default: false");
}
 
开发者ID:fluent,项目名称:kafka-connect-fluentd,代码行数:26,代码来源:FluentdSinkConnectorConfig.java

示例6: conf

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
public static ConfigDef conf() {
  return new ConfigDef()
      .define(USERNAME_CONF, Type.STRING, Importance.HIGH, USERNAME_DOC)
      .define(PASSWORD_CONF, Type.PASSWORD, Importance.HIGH, PASSWORD_DOC)
      .define(PASSWORD_TOKEN_CONF, Type.PASSWORD, Importance.HIGH, PASSWORD_TOKEN_DOC)
      .define(CONSUMER_KEY_CONF, Type.STRING, Importance.HIGH, CONSUMER_KEY_DOC)
      .define(CONSUMER_SECRET_CONF, Type.PASSWORD, Importance.HIGH, CONSUMER_SECRET_DOC)
      .define(INSTANCE_CONF, Type.STRING, "", Importance.HIGH, INSTANCE_DOC)
      .define(CURL_LOGGING_CONF, Type.BOOLEAN, false, Importance.LOW, CURL_LOGGING_DOC)
      .define(SALESFORCE_OBJECT_CONF, Type.STRING, Importance.HIGH, SALESFORCE_OBJECT_DOC)
      .define(KAFKA_TOPIC_CONF, Type.STRING, Importance.HIGH, KAFKA_TOPIC_DOC)
      .define(KAFKA_TOPIC_LOWERCASE_CONF, Type.BOOLEAN, true, Importance.HIGH, KAFKA_TOPIC_LOWERCASE_DOC)
      .define(CONNECTION_TIMEOUT_CONF, Type.LONG, 30000L, ConfigDef.Range.between(5 * 1000L, 600 * 1000L), Importance.LOW, CONNECTION_TIMEOUT_DOC)
      .define(VERSION_CONF, Type.STRING, "latest", ValidPattern.of("^(latest|[\\d\\.]+)$"), Importance.LOW, VERSION_DOC)
      .define(SALESFORCE_PUSH_TOPIC_NAME_CONF, Type.STRING, Importance.HIGH, SALESFORCE_PUSH_TOPIC_NAME_DOC)
      .define(SALESFORCE_PUSH_TOPIC_CREATE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_CREATE_DOC)
      .define(SALESFORCE_PUSH_TOPIC_NOTIFY_CREATE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_CREATE_DOC)
      .define(SALESFORCE_PUSH_TOPIC_NOTIFY_UPDATE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_UPDATE_DOC)
      .define(SALESFORCE_PUSH_TOPIC_NOTIFY_DELETE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_DELETE_DOC)
      .define(SALESFORCE_PUSH_TOPIC_NOTIFY_UNDELETE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_UNDELETE_DOC);
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-salesforce,代码行数:22,代码来源:SalesforceSourceConnectorConfig.java

示例7: conf

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
public static ConfigDef conf() {
    return new ConfigDef()
            .define(TOPIC_CONFIG, Type.STRING, Importance.HIGH, TOPIC_DOC)
            .define(OWNER_CONFIG, Type.STRING, Importance.HIGH, OWNER_DOC)
            .define(REPO_CONFIG, Type.STRING, Importance.HIGH, REPO_DOC)
            .define(BATCH_SIZE_CONFIG, Type.INT, 100, new BatchSizeValidator(), Importance.LOW, BATCH_SIZE_DOC)
            .define(SINCE_CONFIG, Type.STRING, ZonedDateTime.now().minusYears(1).toInstant().toString(),
                    new TimestampValidator(), Importance.HIGH, SINCE_DOC)
            .define(AUTH_USERNAME_CONFIG, Type.STRING, "", Importance.HIGH, AUTH_USERNAME_DOC)
            .define(AUTH_PASSWORD_CONFIG, Type.PASSWORD, "", Importance.HIGH, AUTH_PASSWORD_DOC);
}
 
开发者ID:simplesteph,项目名称:kafka-connect-github-source,代码行数:12,代码来源:GitHubSourceConnectorConfig.java

示例8: baseConfigDef

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
/**
 * Get a basic ConfigDef for a WorkerConfig. This includes all the common settings. Subclasses can use this to
 * bootstrap their own ConfigDef.
 * @return a ConfigDef with all the common options specified
 */
protected static ConfigDef baseConfigDef() {
    return new ConfigDef()
            .define(BOOTSTRAP_SERVERS_CONFIG, Type.LIST, BOOTSTRAP_SERVERS_DEFAULT,
                    Importance.HIGH, BOOTSTRAP_SERVERS_DOC)
            .define(KEY_CONVERTER_CLASS_CONFIG, Type.CLASS,
                    Importance.HIGH, KEY_CONVERTER_CLASS_DOC)
            .define(VALUE_CONVERTER_CLASS_CONFIG, Type.CLASS,
                    Importance.HIGH, VALUE_CONVERTER_CLASS_DOC)
            .define(INTERNAL_KEY_CONVERTER_CLASS_CONFIG, Type.CLASS,
                    Importance.LOW, INTERNAL_KEY_CONVERTER_CLASS_DOC)
            .define(INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, Type.CLASS,
                    Importance.LOW, INTERNAL_VALUE_CONVERTER_CLASS_DOC)
            .define(TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_CONFIG, Type.LONG,
                    TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_DEFAULT, Importance.LOW,
                    TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_DOC)
            .define(OFFSET_COMMIT_INTERVAL_MS_CONFIG, Type.LONG, OFFSET_COMMIT_INTERVAL_MS_DEFAULT,
                    Importance.LOW, OFFSET_COMMIT_INTERVAL_MS_DOC)
            .define(OFFSET_COMMIT_TIMEOUT_MS_CONFIG, Type.LONG, OFFSET_COMMIT_TIMEOUT_MS_DEFAULT,
                    Importance.LOW, OFFSET_COMMIT_TIMEOUT_MS_DOC)
            .define(REST_HOST_NAME_CONFIG, Type.STRING, null, Importance.LOW, REST_HOST_NAME_DOC)
            .define(REST_PORT_CONFIG, Type.INT, REST_PORT_DEFAULT, Importance.LOW, REST_PORT_DOC)
            .define(REST_ADVERTISED_HOST_NAME_CONFIG, Type.STRING,  null, Importance.LOW, REST_ADVERTISED_HOST_NAME_DOC)
            .define(REST_ADVERTISED_PORT_CONFIG, Type.INT,  null, Importance.LOW, REST_ADVERTISED_PORT_DOC)
            .define(ACCESS_CONTROL_ALLOW_ORIGIN_CONFIG, Type.STRING,
                    ACCESS_CONTROL_ALLOW_ORIGIN_DEFAULT, Importance.LOW,
                    ACCESS_CONTROL_ALLOW_ORIGIN_DOC)
            .define(ACCESS_CONTROL_ALLOW_METHODS_CONFIG, Type.STRING,
                    ACCESS_CONTROL_ALLOW_METHODS_DEFAULT, Importance.LOW,
                    ACCESS_CONTROL_ALLOW_METHODS_DOC)
            .define(
                    PLUGIN_PATH_CONFIG,
                    Type.LIST,
                    null,
                    Importance.LOW,
                    PLUGIN_PATH_DOC
            );
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:43,代码来源:WorkerConfig.java

示例9: testBasicTypes

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testBasicTypes() {
    ConfigDef def = new ConfigDef().define("a", Type.INT, 5, Range.between(0, 14), Importance.HIGH, "docs")
                                   .define("b", Type.LONG, Importance.HIGH, "docs")
                                   .define("c", Type.STRING, "hello", Importance.HIGH, "docs")
                                   .define("d", Type.LIST, Importance.HIGH, "docs")
                                   .define("e", Type.DOUBLE, Importance.HIGH, "docs")
                                   .define("f", Type.CLASS, Importance.HIGH, "docs")
                                   .define("g", Type.BOOLEAN, Importance.HIGH, "docs")
                                   .define("h", Type.BOOLEAN, Importance.HIGH, "docs")
                                   .define("i", Type.BOOLEAN, Importance.HIGH, "docs")
                                   .define("j", Type.PASSWORD, Importance.HIGH, "docs");

    Properties props = new Properties();
    props.put("a", "1   ");
    props.put("b", 2);
    props.put("d", " a , b, c");
    props.put("e", 42.5d);
    props.put("f", String.class.getName());
    props.put("g", "true");
    props.put("h", "FalSE");
    props.put("i", "TRUE");
    props.put("j", "password");

    Map<String, Object> vals = def.parse(props);
    assertEquals(1, vals.get("a"));
    assertEquals(2L, vals.get("b"));
    assertEquals("hello", vals.get("c"));
    assertEquals(asList("a", "b", "c"), vals.get("d"));
    assertEquals(42.5d, vals.get("e"));
    assertEquals(String.class, vals.get("f"));
    assertEquals(true, vals.get("g"));
    assertEquals(false, vals.get("h"));
    assertEquals(true, vals.get("i"));
    assertEquals(new Password("password"), vals.get("j"));
    assertEquals(Password.HIDDEN, vals.get("j").toString());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:38,代码来源:ConfigDefTest.java

示例10: testBadInputs

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
private void testBadInputs(Type type, Object... values) {
    for (Object value : values) {
        Map<String, Object> m = new HashMap<String, Object>();
        m.put("name", value);
        ConfigDef def = new ConfigDef().define("name", type, Importance.HIGH, "docs");
        try {
            def.parse(m);
            fail("Expected a config exception on bad input for value " + value);
        } catch (ConfigException e) {
            // this is good
        }
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:14,代码来源:ConfigDefTest.java

示例11: testNullDefaultWithValidator

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testNullDefaultWithValidator() {
    final String key = "enum_test";

    ConfigDef def = new ConfigDef();
    def.define(key, Type.STRING, ConfigDef.NO_DEFAULT_VALUE,
               ValidString.in("ONE", "TWO", "THREE"), Importance.HIGH, "docs");

    Properties props = new Properties();
    props.put(key, "ONE");
    Map<String, Object> vals = def.parse(props);
    assertEquals("ONE", vals.get(key));
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:14,代码来源:ConfigDefTest.java

示例12: testValidate

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testValidate() {
    Map<String, ConfigValue> expected = new HashMap<>();
    String errorMessageB = "Missing required configuration \"b\" which has no default value.";
    String errorMessageC = "Missing required configuration \"c\" which has no default value.";

    ConfigValue configA = new ConfigValue("a", 1, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList());
    ConfigValue configB = new ConfigValue("b", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageB, errorMessageB));
    ConfigValue configC = new ConfigValue("c", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageC));
    ConfigValue configD = new ConfigValue("d", 10, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList());

    expected.put("a", configA);
    expected.put("b", configB);
    expected.put("c", configC);
    expected.put("d", configD);

    ConfigDef def = new ConfigDef()
        .define("a", Type.INT, Importance.HIGH, "docs", "group", 1, Width.SHORT, "a", Arrays.asList("b", "c"), new IntegerRecommender(false))
        .define("b", Type.INT, Importance.HIGH, "docs", "group", 2, Width.SHORT, "b", new IntegerRecommender(true))
        .define("c", Type.INT, Importance.HIGH, "docs", "group", 3, Width.SHORT, "c", new IntegerRecommender(true))
        .define("d", Type.INT, Importance.HIGH, "docs", "group", 4, Width.SHORT, "d", Arrays.asList("b"), new IntegerRecommender(false));

    Map<String, String> props = new HashMap<>();
    props.put("a", "1");
    props.put("d", "10");

    List<ConfigValue> configs = def.validate(props);
    for (ConfigValue config : configs) {
        String name = config.name();
        ConfigValue expectedConfig = expected.get(name);
        assertEquals(expectedConfig, config);
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:34,代码来源:ConfigDefTest.java

示例13: testValidateMissingConfigKey

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testValidateMissingConfigKey() {
    Map<String, ConfigValue> expected = new HashMap<>();
    String errorMessageB = "Missing required configuration \"b\" which has no default value.";
    String errorMessageC = "Missing required configuration \"c\" which has no default value.";
    String errorMessageD = "d is referred in the dependents, but not defined.";

    ConfigValue configA = new ConfigValue("a", 1, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList());
    ConfigValue configB = new ConfigValue("b", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageB));
    ConfigValue configC = new ConfigValue("c", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageC));
    ConfigValue configD = new ConfigValue("d", null, Collections.emptyList(), Arrays.asList(errorMessageD));
    configD.visible(false);

    expected.put("a", configA);
    expected.put("b", configB);
    expected.put("c", configC);
    expected.put("d", configD);

    ConfigDef def = new ConfigDef()
        .define("a", Type.INT, Importance.HIGH, "docs", "group", 1, Width.SHORT, "a", Arrays.asList("b", "c", "d"), new IntegerRecommender(false))
        .define("b", Type.INT, Importance.HIGH, "docs", "group", 2, Width.SHORT, "b", new IntegerRecommender(true))
        .define("c", Type.INT, Importance.HIGH, "docs", "group", 3, Width.SHORT, "c", new IntegerRecommender(true));

    Map<String, String> props = new HashMap<>();
    props.put("a", "1");

    List<ConfigValue> configs = def.validate(props);
    for (ConfigValue config: configs) {
        String name = config.name();
        ConfigValue expectedConfig = expected.get(name);
        assertEquals(expectedConfig, config);
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:34,代码来源:ConfigDefTest.java

示例14: testCanAddInternalConfig

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testCanAddInternalConfig() throws Exception {
    final String configName = "internal.config";
    final ConfigDef configDef = new ConfigDef().defineInternal(configName, Type.STRING, "", Importance.LOW);
    final HashMap<String, String> properties = new HashMap<>();
    properties.put(configName, "value");
    final List<ConfigValue> results = configDef.validate(properties);
    final ConfigValue configValue = results.get(0);
    assertEquals("value", configValue.value());
    assertEquals(configName, configValue.name());
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:12,代码来源:ConfigDefTest.java

示例15: testInternalConfigDoesntShowUpInDocs

import org.apache.kafka.common.config.ConfigDef.Importance; //导入依赖的package包/类
@Test
public void testInternalConfigDoesntShowUpInDocs() throws Exception {
    final String name = "my.config";
    final ConfigDef configDef = new ConfigDef().defineInternal(name, Type.STRING, "", Importance.LOW);
    assertFalse(configDef.toHtmlTable().contains("my.config"));
    assertFalse(configDef.toEnrichedRst().contains("my.config"));
    assertFalse(configDef.toRst().contains("my.config"));
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:9,代码来源:ConfigDefTest.java


注:本文中的org.apache.kafka.common.config.ConfigDef.Importance类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。