本文整理汇总了Java中org.apache.kafka.connect.data.Struct.schema方法的典型用法代码示例。如果您正苦于以下问题:Java Struct.schema方法的具体用法?Java Struct.schema怎么用?Java Struct.schema使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kafka.connect.data.Struct
的用法示例。
在下文中一共展示了Struct.schema方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: sourceRecord
import org.apache.kafka.connect.data.Struct; //导入方法依赖的package包/类
SourceRecord sourceRecord(String consumerTag, Envelope envelope, AMQP.BasicProperties basicProperties, byte[] bytes) {
Struct key = MessageConverter.key(basicProperties);
Struct value = MessageConverter.value(consumerTag, envelope, basicProperties, bytes);
final String topic = this.config.kafkaTopic.execute(RabbitMQSourceConnectorConfig.KAFKA_TOPIC_TEMPLATE, value);
return new SourceRecord(
ImmutableMap.of("routingKey", envelope.getRoutingKey()),
ImmutableMap.of("deliveryTag", envelope.getDeliveryTag()),
topic,
null,
key.schema(),
key,
value.schema(),
value,
null == basicProperties.getTimestamp() ? this.time.milliseconds() : basicProperties.getTimestamp().getTime()
);
}
示例2: convert
import org.apache.kafka.connect.data.Struct; //导入方法依赖的package包/类
private SourceRecord convert(FileMetadata metadata, Offset offset, Struct struct) {
return new SourceRecord(
new HashMap<String, Object>() {
{
put("path", metadata.getPath());
//TODO manage blocks
//put("blocks", metadata.getBlocks().toString());
}
},
Collections.singletonMap("offset", offset.getRecordOffset()),
config.getTopic(),
struct.schema(),
struct
);
}
示例3: applyWithSchema
import org.apache.kafka.connect.data.Struct; //导入方法依赖的package包/类
private R applyWithSchema(R record) {
final Struct value = requireStruct(operatingValue(record), PURPOSE);
final Struct updatedValue = new Struct(value.schema());
for (Field field : value.schema().fields()) {
final Object origFieldValue = value.get(field);
updatedValue.put(field, maskedFields.contains(field.name()) ? masked(origFieldValue) : origFieldValue);
}
return newRecord(record, updatedValue);
}
示例4: generate
import org.apache.kafka.connect.data.Struct; //导入方法依赖的package包/类
@Disabled
@Test
public void generate() throws IOException {
List<String> messages = Arrays.asList(
"CEF:0|Security|threatmanager|1.0|100|worm successfully stopped|10|src=10.0.0.1 dst=2.1.2.2 spt=1232",
"CEF:0|security|threatmanager|1.0|100|detected a \\| in message|10|src=10.0.0.1 act=blocked a | dst=1.1.1.1",
"CEF:0|security|threatmanager|1.0|100|detected a \\ in packet|10|src=10.0.0.1 act=blocked a \\ dst=1.1.1.1",
"CEF:0|security|threatmanager|1.0|100|detected a = in message|10|src=10.0.0.1 act=blocked a \\= dst=1.1.1.1",
"CEF:0|ArcSight|Logger|5.0.0.5355.2|sensor:115|Logger Internal Event|1|cat=/Monitor/Sensor/Fan5 cs2=Current Value cnt=1 dvc=10.0.0.1 cs3=Ok cs1=null type=0 cs1Label=unit rt=1305034099211 cs3Label=Status cn1Label=value cs2Label=timeframe",
"CEF:0|Trend Micro Inc.|OSSEC HIDS|v2.5.1|5302|User missed the password to change UID to root.|9|dvc=ubuntusvr cs2=ubuntusvr->/var/log/auth.log cs2Label=Location src= suser=root msg=May 11 21:16:05 ubuntusvr su[24120]: - /dev/pts/1 xavier:root",
"CEF:0|security|threatmanager|1.0|100|Detected a threat. No action needed.|10|src=10.0.0.1 msg=Detected a threat.\\n No action needed.",
"CEF:0|security|threatmanager|1.0|100|Detected a threat. No action needed.|10",
"filterlog: 5,16777216,,1000000003,igb1,match,block,in,6,0x00,0x00000,255,ICMPv6,58,32,2605:6000:c00:96::1,ff02::1:ffac:f98,",
"dhcpd: DHCPACK on 10.10.0.10 to 00:26:ab:fb:27:dc via igb2",
"dhcpd: DHCPREQUEST for 10.10.0.10 from 00:26:ab:fb:27:dc via igb2",
"dhcpleases: Sending HUP signal to dns daemon(69876)"
);
Multiset<String> counts = HashMultiset.create();
for (String message : messages) {
TestCase testCase = new TestCase();
Struct valueInput = new Struct(VALUE_SCHEMA)
.put("date", new Date(1493195158000L))
.put("facility", 16)
.put("host", "filterlog")
.put("level", 6)
.put("message", message)
.put("charset", "utf-8")
.put("remote_address", "/10.10.0.1:514")
.put("hostname", "vpn.example.com");
testCase.input = new SourceRecord(
ImmutableMap.of(),
ImmutableMap.of(),
"syslog",
null,
null,
null,
valueInput.schema(),
valueInput,
1493195158000L
);
String fileNameFormat;
try {
testCase.expected = (SourceRecord) this.transformation.apply(testCase.input);
fileNameFormat = testCase.expected.topic().equals("syslog.cef") ? "CEF%04d.json" : "NotCEF%04d.json";
((Struct) testCase.expected.value()).validate();
// fileNameFormat = "CEF%04d.json";
} catch (IllegalStateException ex) {
fileNameFormat = "NotCEF%04d.json";
testCase.expected = testCase.input;
}
counts.add(fileNameFormat);
int testNumber = counts.count(fileNameFormat);
File root = new File("src/test/resources/com/github/jcustenborder/kafka/connect/transform/cef/records");
String filename = String.format(fileNameFormat, testNumber);
File file = new File(root, filename);
log.trace("Saving {}", filename);
ObjectMapperFactory.INSTANCE.writeValue(file, testCase);
}
}