本文整理汇总了Java中org.apache.kafka.common.config.ConfigDef.Type类的典型用法代码示例。如果您正苦于以下问题:Java Type类的具体用法?Java Type怎么用?Java Type使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Type类属于org.apache.kafka.common.config.ConfigDef包,在下文中一共展示了Type类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: conf
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
public static ConfigDef conf() {
return new ConfigDef()
.define(INFINISPAN_CONNECTION_HOSTS_CONF, Type.STRING, INFINISPAN_CONNECTION_HOSTS_DEFAULT, Importance.HIGH,
INFINISPAN_CONNECTION_HOSTS_DOC)
.define(INFINISPAN_CONNECTION_HOTROD_PORT_CONF, Type.INT, INFINISPAN_CONNECTION_HOTROD_PORT_DEFAULT,
Importance.HIGH, INFINISPAN_CONNECTION_HOTROD_PORT_DOC)
.define(INFINISPAN_CONNECTION_CACHE_NAME_CONF, Type.STRING, INFINISPAN_CONNECTION_CACHE_NAME_DEFAULT,
Importance.MEDIUM, INFINISPAN_CONNECTION_CACHE_NAME_DOC)
.define(INFINISPAN_USE_PROTO_CONF, Type.BOOLEAN, INFINISPAN_USE_PROTO_DEFAULT, Importance.MEDIUM,
INFINISPAN_USE_PROTO_DOC)
.define(INFINISPAN_PROTO_MARSHALLER_CLASS_CONF, Type.CLASS, INFINISPAN_PROTO_MARSHALLER_CLASS_DEFAULT,
Importance.MEDIUM, INFINISPAN_PROTO_MARSHALLER_CLASS_DOC)
.define(INFINISPAN_CACHE_FORCE_RETURN_VALUES_CONF, Type.BOOLEAN,
INFINISPAN_CACHE_FORCE_RETURN_VALUES_DEFAULT, Importance.LOW,
INFINISPAN_CACHE_FORCE_RETURN_VALUES_DOC)
.define(INFINISPAN_USE_LIFESPAN_CONF, Type.BOOLEAN, INFINISPAN_USE_LIFESPAN_DEFAULT,
Importance.LOW, INFINISPAN_USE_LIFESPAN_DOC)
.define(INFINISPAN_USE_MAX_IDLE_CONF, Type.BOOLEAN, INFINISPAN_USE_MAX_IDLE_DEFAULT,
Importance.LOW, INFINISPAN_USE_MAX_IDLE_DOC)
.define(INFINISPAN_LIFESPAN_ENTRY_CONF, Type.LONG, INFINISPAN_LIFESPAN_ENTRY_DEFAULT,
Importance.LOW, INFINISPAN_LIFESPAN_ENTRY_DOC)
.define(INFINISPAN_MAX_IDLE_ENTRY_CONF, Type.LONG, INFINISPAN_MAX_IDLE_ENTRY_DEFAULT,
Importance.LOW, INFINISPAN_MAX_IDLE_ENTRY_DOC);
}
示例2: convertConfigKey
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
private static ConfigKeyInfo convertConfigKey(ConfigKey configKey) {
String name = configKey.name;
Type type = configKey.type;
String typeName = configKey.type.name();
boolean required = false;
String defaultValue;
if (ConfigDef.NO_DEFAULT_VALUE.equals(configKey.defaultValue)) {
defaultValue = null;
required = true;
} else {
defaultValue = ConfigDef.convertToString(configKey.defaultValue, type);
}
String importance = configKey.importance.name();
String documentation = configKey.documentation;
String group = configKey.group;
int orderInGroup = configKey.orderInGroup;
String width = configKey.width.name();
String displayName = configKey.displayName;
List<String> dependents = configKey.dependents;
return new ConfigKeyInfo(name, typeName, required, defaultValue, importance, documentation, group, orderInGroup, width, displayName, dependents);
}
示例3: configDef
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
public static ConfigDef configDef() {
return new ConfigDef()
.define(NAME_CONFIG, Type.STRING, Importance.HIGH, NAME_DOC, COMMON_GROUP, 1, Width.MEDIUM, NAME_DISPLAY)
.define(CONNECTOR_CLASS_CONFIG, Type.STRING, Importance.HIGH, CONNECTOR_CLASS_DOC, COMMON_GROUP, 2, Width.LONG, CONNECTOR_CLASS_DISPLAY)
.define(TASKS_MAX_CONFIG, Type.INT, TASKS_MAX_DEFAULT, atLeast(TASKS_MIN_CONFIG), Importance.HIGH, TASKS_MAX_DOC, COMMON_GROUP, 3, Width.SHORT, TASK_MAX_DISPLAY)
.define(KEY_CONVERTER_CLASS_CONFIG, Type.CLASS, null, Importance.LOW, KEY_CONVERTER_CLASS_DOC, COMMON_GROUP, 4, Width.SHORT, KEY_CONVERTER_CLASS_DISPLAY)
.define(VALUE_CONVERTER_CLASS_CONFIG, Type.CLASS, null, Importance.LOW, VALUE_CONVERTER_CLASS_DOC, COMMON_GROUP, 5, Width.SHORT, VALUE_CONVERTER_CLASS_DISPLAY)
.define(TRANSFORMS_CONFIG, Type.LIST, null, new ConfigDef.Validator() {
@Override
public void ensureValid(String name, Object value) {
if (value == null) return;
final List<String> transformAliases = (List<String>) value;
if (transformAliases.size() > new HashSet<>(transformAliases).size()) {
throw new ConfigException(name, value, "Duplicate alias provided.");
}
}
}, Importance.LOW, TRANSFORMS_DOC, TRANSFORMS_GROUP, 6, Width.LONG, TRANSFORMS_DISPLAY);
}
示例4: testGroupInference
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testGroupInference() {
List<String> expected1 = Arrays.asList("group1", "group2");
ConfigDef def1 = new ConfigDef()
.define("a", Type.INT, Importance.HIGH, "docs", "group1", 1, Width.SHORT, "a")
.define("b", Type.INT, Importance.HIGH, "docs", "group2", 1, Width.SHORT, "b")
.define("c", Type.INT, Importance.HIGH, "docs", "group1", 2, Width.SHORT, "c");
assertEquals(expected1, def1.groups());
List<String> expected2 = Arrays.asList("group2", "group1");
ConfigDef def2 = new ConfigDef()
.define("a", Type.INT, Importance.HIGH, "docs", "group2", 1, Width.SHORT, "a")
.define("b", Type.INT, Importance.HIGH, "docs", "group2", 2, Width.SHORT, "b")
.define("c", Type.INT, Importance.HIGH, "docs", "group1", 2, Width.SHORT, "c");
assertEquals(expected2, def2.groups());
}
示例5: testValidateCannotParse
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testValidateCannotParse() {
Map<String, ConfigValue> expected = new HashMap<>();
String errorMessageB = "Invalid value non_integer for configuration a: Not a number of type INT";
ConfigValue configA = new ConfigValue("a", null, Collections.emptyList(), Arrays.asList(errorMessageB));
expected.put("a", configA);
ConfigDef def = new ConfigDef().define("a", Type.INT, Importance.HIGH, "docs");
Map<String, String> props = new HashMap<>();
props.put("a", "non_integer");
List<ConfigValue> configs = def.validate(props);
for (ConfigValue config: configs) {
String name = config.name();
ConfigValue expectedConfig = expected.get(name);
assertEquals(expectedConfig, config);
}
}
示例6: conf
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
public static ConfigDef conf() {
return new ConfigDef()
.define(FLUENTD_CONNECT, Type.STRING, "localhost:24224", Importance.HIGH,
"Connection specs for Fluentd")
.define(FLUENTD_CLIENT_MAX_BUFFER_BYTES, Type.LONG, null, Importance.MEDIUM,
"Max buffer size.")
.define(FLUENTD_CLIENT_BUFFER_CHUNK_INITIAL_BYTES, Type.INT, null, Importance.MEDIUM,
"Initial size of buffer chunk. Default: 1048576 (1MiB)")
.define(FLUENTD_CLIENT_BUFFER_CHUNK_RETENTION_BYTES, Type.INT, null, Importance.MEDIUM,
"Retention size of buffer chunk. Default: 4194304 (4MiB)")
.define(FLUENTD_CLIENT_FLUSH_INTERVAL, Type.INT, null, Importance.MEDIUM,
"Buffer flush interval in msec. Default: 600(msec)")
.define(FLUENTD_CLIENT_ACK_RESPONSE_MODE, Type.BOOLEAN, false, Importance.MEDIUM,
"Enable/Disable ack response mode. Default: false")
.define(FLUENTD_CLIENT_FILE_BACKUP_DIR, Type.STRING, null, Importance.MEDIUM,
"Enable file backup mode if specify backup directory path. Default: null")
.define(FLUENTD_CLIENT_WAIT_UNTIL_BUFFER_FLUSHED, Type.INT, null, Importance.MEDIUM,
"Max wait until all buffers are flushed in sec. Default: 60(sec)")
.define(FLUENTD_CLIENT_WAIT_UNTIL_FLUSHER_TERMINATED, Type.INT, null, Importance.MEDIUM,
"Max wait until the flusher is terminated in sec. Default: 60(sec)")
.define(FLUENTD_CLIENT_JVM_HEAP_BUFFER_MODE, Type.BOOLEAN, false, Importance.MEDIUM,
"If true use JVM heap memory for buffer pool. Default: false")
.define(FLUENTD_CLIENT_TIMESTAMP_INTEGER, Type.BOOLEAN, false, Importance.MEDIUM,
"If true use integer timestamp. Default: false");
}
示例7: conf
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
public static ConfigDef conf() {
return new ConfigDef()
.define(USERNAME_CONF, Type.STRING, Importance.HIGH, USERNAME_DOC)
.define(PASSWORD_CONF, Type.PASSWORD, Importance.HIGH, PASSWORD_DOC)
.define(PASSWORD_TOKEN_CONF, Type.PASSWORD, Importance.HIGH, PASSWORD_TOKEN_DOC)
.define(CONSUMER_KEY_CONF, Type.STRING, Importance.HIGH, CONSUMER_KEY_DOC)
.define(CONSUMER_SECRET_CONF, Type.PASSWORD, Importance.HIGH, CONSUMER_SECRET_DOC)
.define(INSTANCE_CONF, Type.STRING, "", Importance.HIGH, INSTANCE_DOC)
.define(CURL_LOGGING_CONF, Type.BOOLEAN, false, Importance.LOW, CURL_LOGGING_DOC)
.define(SALESFORCE_OBJECT_CONF, Type.STRING, Importance.HIGH, SALESFORCE_OBJECT_DOC)
.define(KAFKA_TOPIC_CONF, Type.STRING, Importance.HIGH, KAFKA_TOPIC_DOC)
.define(KAFKA_TOPIC_LOWERCASE_CONF, Type.BOOLEAN, true, Importance.HIGH, KAFKA_TOPIC_LOWERCASE_DOC)
.define(CONNECTION_TIMEOUT_CONF, Type.LONG, 30000L, ConfigDef.Range.between(5 * 1000L, 600 * 1000L), Importance.LOW, CONNECTION_TIMEOUT_DOC)
.define(VERSION_CONF, Type.STRING, "latest", ValidPattern.of("^(latest|[\\d\\.]+)$"), Importance.LOW, VERSION_DOC)
.define(SALESFORCE_PUSH_TOPIC_NAME_CONF, Type.STRING, Importance.HIGH, SALESFORCE_PUSH_TOPIC_NAME_DOC)
.define(SALESFORCE_PUSH_TOPIC_CREATE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_CREATE_DOC)
.define(SALESFORCE_PUSH_TOPIC_NOTIFY_CREATE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_CREATE_DOC)
.define(SALESFORCE_PUSH_TOPIC_NOTIFY_UPDATE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_UPDATE_DOC)
.define(SALESFORCE_PUSH_TOPIC_NOTIFY_DELETE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_DELETE_DOC)
.define(SALESFORCE_PUSH_TOPIC_NOTIFY_UNDELETE_CONF, Type.BOOLEAN, true, Importance.LOW, SALESFORCE_PUSH_TOPIC_NOTIFY_UNDELETE_DOC);
}
示例8: conf
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
public static ConfigDef conf() {
return new ConfigDef()
.define(TOPIC_CONFIG, Type.STRING, Importance.HIGH, TOPIC_DOC)
.define(OWNER_CONFIG, Type.STRING, Importance.HIGH, OWNER_DOC)
.define(REPO_CONFIG, Type.STRING, Importance.HIGH, REPO_DOC)
.define(BATCH_SIZE_CONFIG, Type.INT, 100, new BatchSizeValidator(), Importance.LOW, BATCH_SIZE_DOC)
.define(SINCE_CONFIG, Type.STRING, ZonedDateTime.now().minusYears(1).toInstant().toString(),
new TimestampValidator(), Importance.HIGH, SINCE_DOC)
.define(AUTH_USERNAME_CONFIG, Type.STRING, "", Importance.HIGH, AUTH_USERNAME_DOC)
.define(AUTH_PASSWORD_CONFIG, Type.PASSWORD, "", Importance.HIGH, AUTH_PASSWORD_DOC);
}
示例9: baseConfigDef
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
/**
* Get a basic ConfigDef for a WorkerConfig. This includes all the common settings. Subclasses can use this to
* bootstrap their own ConfigDef.
* @return a ConfigDef with all the common options specified
*/
protected static ConfigDef baseConfigDef() {
return new ConfigDef()
.define(BOOTSTRAP_SERVERS_CONFIG, Type.LIST, BOOTSTRAP_SERVERS_DEFAULT,
Importance.HIGH, BOOTSTRAP_SERVERS_DOC)
.define(KEY_CONVERTER_CLASS_CONFIG, Type.CLASS,
Importance.HIGH, KEY_CONVERTER_CLASS_DOC)
.define(VALUE_CONVERTER_CLASS_CONFIG, Type.CLASS,
Importance.HIGH, VALUE_CONVERTER_CLASS_DOC)
.define(INTERNAL_KEY_CONVERTER_CLASS_CONFIG, Type.CLASS,
Importance.LOW, INTERNAL_KEY_CONVERTER_CLASS_DOC)
.define(INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, Type.CLASS,
Importance.LOW, INTERNAL_VALUE_CONVERTER_CLASS_DOC)
.define(TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_CONFIG, Type.LONG,
TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_DEFAULT, Importance.LOW,
TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_DOC)
.define(OFFSET_COMMIT_INTERVAL_MS_CONFIG, Type.LONG, OFFSET_COMMIT_INTERVAL_MS_DEFAULT,
Importance.LOW, OFFSET_COMMIT_INTERVAL_MS_DOC)
.define(OFFSET_COMMIT_TIMEOUT_MS_CONFIG, Type.LONG, OFFSET_COMMIT_TIMEOUT_MS_DEFAULT,
Importance.LOW, OFFSET_COMMIT_TIMEOUT_MS_DOC)
.define(REST_HOST_NAME_CONFIG, Type.STRING, null, Importance.LOW, REST_HOST_NAME_DOC)
.define(REST_PORT_CONFIG, Type.INT, REST_PORT_DEFAULT, Importance.LOW, REST_PORT_DOC)
.define(REST_ADVERTISED_HOST_NAME_CONFIG, Type.STRING, null, Importance.LOW, REST_ADVERTISED_HOST_NAME_DOC)
.define(REST_ADVERTISED_PORT_CONFIG, Type.INT, null, Importance.LOW, REST_ADVERTISED_PORT_DOC)
.define(ACCESS_CONTROL_ALLOW_ORIGIN_CONFIG, Type.STRING,
ACCESS_CONTROL_ALLOW_ORIGIN_DEFAULT, Importance.LOW,
ACCESS_CONTROL_ALLOW_ORIGIN_DOC)
.define(ACCESS_CONTROL_ALLOW_METHODS_CONFIG, Type.STRING,
ACCESS_CONTROL_ALLOW_METHODS_DEFAULT, Importance.LOW,
ACCESS_CONTROL_ALLOW_METHODS_DOC)
.define(
PLUGIN_PATH_CONFIG,
Type.LIST,
null,
Importance.LOW,
PLUGIN_PATH_DOC
);
}
示例10: testBasicTypes
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testBasicTypes() {
ConfigDef def = new ConfigDef().define("a", Type.INT, 5, Range.between(0, 14), Importance.HIGH, "docs")
.define("b", Type.LONG, Importance.HIGH, "docs")
.define("c", Type.STRING, "hello", Importance.HIGH, "docs")
.define("d", Type.LIST, Importance.HIGH, "docs")
.define("e", Type.DOUBLE, Importance.HIGH, "docs")
.define("f", Type.CLASS, Importance.HIGH, "docs")
.define("g", Type.BOOLEAN, Importance.HIGH, "docs")
.define("h", Type.BOOLEAN, Importance.HIGH, "docs")
.define("i", Type.BOOLEAN, Importance.HIGH, "docs")
.define("j", Type.PASSWORD, Importance.HIGH, "docs");
Properties props = new Properties();
props.put("a", "1 ");
props.put("b", 2);
props.put("d", " a , b, c");
props.put("e", 42.5d);
props.put("f", String.class.getName());
props.put("g", "true");
props.put("h", "FalSE");
props.put("i", "TRUE");
props.put("j", "password");
Map<String, Object> vals = def.parse(props);
assertEquals(1, vals.get("a"));
assertEquals(2L, vals.get("b"));
assertEquals("hello", vals.get("c"));
assertEquals(asList("a", "b", "c"), vals.get("d"));
assertEquals(42.5d, vals.get("e"));
assertEquals(String.class, vals.get("f"));
assertEquals(true, vals.get("g"));
assertEquals(false, vals.get("h"));
assertEquals(true, vals.get("i"));
assertEquals(new Password("password"), vals.get("j"));
assertEquals(Password.HIDDEN, vals.get("j").toString());
}
示例11: testNullDefault
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testNullDefault() {
ConfigDef def = new ConfigDef().define("a", Type.INT, null, null, null, "docs");
Map<String, Object> vals = def.parse(new Properties());
assertEquals(null, vals.get("a"));
}
示例12: testBadInputs
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testBadInputs() {
testBadInputs(Type.INT, "hello", "42.5", 42.5, Long.MAX_VALUE, Long.toString(Long.MAX_VALUE), new Object());
testBadInputs(Type.LONG, "hello", "42.5", Long.toString(Long.MAX_VALUE) + "00", new Object());
testBadInputs(Type.DOUBLE, "hello", new Object());
testBadInputs(Type.STRING, new Object());
testBadInputs(Type.LIST, 53, new Object());
testBadInputs(Type.BOOLEAN, "hello", "truee", "fals");
testBadInputs(Type.CLASS, "ClassDoesNotExist");
}
示例13: testValidators
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testValidators() {
testValidators(Type.INT, Range.between(0, 10), 5, new Object[]{1, 5, 9}, new Object[]{-1, 11, null});
testValidators(Type.STRING, ValidString.in("good", "values", "default"), "default",
new Object[]{"good", "values", "default"}, new Object[]{"bad", "inputs", null});
testValidators(Type.LIST, ConfigDef.ValidList.in("1", "2", "3"), "1", new Object[]{"1", "2", "3"}, new Object[]{"4", "5", "6"});
}
示例14: testNullDefaultWithValidator
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testNullDefaultWithValidator() {
final String key = "enum_test";
ConfigDef def = new ConfigDef();
def.define(key, Type.STRING, ConfigDef.NO_DEFAULT_VALUE,
ValidString.in("ONE", "TWO", "THREE"), Importance.HIGH, "docs");
Properties props = new Properties();
props.put(key, "ONE");
Map<String, Object> vals = def.parse(props);
assertEquals("ONE", vals.get(key));
}
示例15: testValidate
import org.apache.kafka.common.config.ConfigDef.Type; //导入依赖的package包/类
@Test
public void testValidate() {
Map<String, ConfigValue> expected = new HashMap<>();
String errorMessageB = "Missing required configuration \"b\" which has no default value.";
String errorMessageC = "Missing required configuration \"c\" which has no default value.";
ConfigValue configA = new ConfigValue("a", 1, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList());
ConfigValue configB = new ConfigValue("b", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageB, errorMessageB));
ConfigValue configC = new ConfigValue("c", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageC));
ConfigValue configD = new ConfigValue("d", 10, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList());
expected.put("a", configA);
expected.put("b", configB);
expected.put("c", configC);
expected.put("d", configD);
ConfigDef def = new ConfigDef()
.define("a", Type.INT, Importance.HIGH, "docs", "group", 1, Width.SHORT, "a", Arrays.asList("b", "c"), new IntegerRecommender(false))
.define("b", Type.INT, Importance.HIGH, "docs", "group", 2, Width.SHORT, "b", new IntegerRecommender(true))
.define("c", Type.INT, Importance.HIGH, "docs", "group", 3, Width.SHORT, "c", new IntegerRecommender(true))
.define("d", Type.INT, Importance.HIGH, "docs", "group", 4, Width.SHORT, "d", Arrays.asList("b"), new IntegerRecommender(false));
Map<String, String> props = new HashMap<>();
props.put("a", "1");
props.put("d", "10");
List<ConfigValue> configs = def.validate(props);
for (ConfigValue config : configs) {
String name = config.name();
ConfigValue expectedConfig = expected.get(name);
assertEquals(expectedConfig, config);
}
}