本文整理汇总了TypeScript中@restorecommerce/kafka-client.Events.topic方法的典型用法代码示例。如果您正苦于以下问题:TypeScript Events.topic方法的具体用法?TypeScript Events.topic怎么用?TypeScript Events.topic使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类@restorecommerce/kafka-client.Events
的用法示例。
在下文中一共展示了Events.topic方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的TypeScript代码示例。
示例1: before
before(async function setup() {
cfg = sconfig(process.cwd() + '/test');
const logger = new chassis.Logger(cfg.get('logger'));
events = new Events(cfg.get('events:kafka'), logger);
await events.start();
const topics = cfg.get('events:kafka:topics');
testTopic = events.topic(cfg.get('events:kafka:topics:test.resource:topic'));
commandTopic = events.topic(cfg.get('events:kafka:topics:command:topic'));
// subscribe all response events
for (let eventName of cfg.get('events:kafka:topics:command:events')) {
await commandTopic.on(eventName, eventListener);
}
server = new Server(cfg.get('server'), logger);
db = await database.get(cfg.get('database:arango'), logger);
await db.truncate();
const config = cfg.get();
delete config.database.nedb; // not supported in default implementation
const cis = new CommandInterface(server, config, logger, events);
await server.bind('commandinterface', cis);
await server.start();
const client = new Client(cfg.get('client:commandinterface'), logger);
service = await client.connect();
});
示例2: testConsumeListener
async function testConsumeListener() {
this.timeout(4000);
// emit testMessage to kafka
topic = await events.topic(topicName);
await topic.emit(eventName, testMessage);
// start offsetTracker subscribing to previous offset value read
// from redis and consume the above message
offsetStore = new OffsetStore(events, cfg, logger);
const listener = async function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
let startingOffset = await offsetStore.getOffset(topicName);
await topic.on(eventName, listener, { startingOffset });
// wait for 2sec so that message is consumed and
// test is not ended immediately
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve();
}, 2000);
});
});
示例3: updateTopicOffsets
/**
* updates the topic offset in redis periodically
*
*/
updateTopicOffsets(): any {
// Iterate through the topics and updateOffsets periodically for each topic
// events.topic(TopicName) - gives the topic object
const kafkaCfg = this.config.get('events:kafka');
const topicTypes = _.keys(kafkaCfg.topics);
for (let i = 0; i < topicTypes.length; i += 1) {
const topicType = topicTypes[i];
const topicName = kafkaCfg.topics[topicType].topic;
this.topics[topicType] = this.kafkaEvents.topic(topicName);
this.timerID[i] = setInterval(this.storeOffset.bind(this),
this.config.get('redis:offsetStoreInterval'), this.topics[topicType], topicName);
}
}
示例4: testStoredOffsetValue
async function testStoredOffsetValue() {
this.timeout(10000);
offsetStore = new OffsetStore(events, cfg, logger);
topic = await (events.topic(topicName));
const listener = function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
const currentOffset = await topic.$offset(-1);
// emit message to kafka
await topic.on(eventName, listener);
await topic.emit(eventName, testMessage);
const newOffset = await new Promise((resolve, reject) => {
setTimeout(async () => {
const offsetValue = await offsetStore.getOffset(topicName);
resolve(offsetValue);
}, 8000);
});
should.exist(newOffset);
Number(newOffset).should.equal(currentOffset + 1);
});
示例5: constructor
constructor(server: Server, config: any, logger: Logger, events: Events) {
if (_.isNil(events)) {
if (logger.error) {
logger.error('No Kafka client was provided. Disabling all commands.');
return;
}
}
if (!_.has(config, 'server.services')) {
throw new Error('missing config server.services');
}
this.config = config;
this.logger = logger;
if (!_.has(this.config, 'events')
|| !_.has(this.config.events, 'kafka')
|| !_.has(this.config.events.kafka, 'topics')
|| !_.has(this.config.events.kafka.topics, 'command')) {
throw new Error('Commands topic configuration was not provided.');
}
this.kafkaEvents = events;
// Health
this.health = {
status: ServingStatus.UNKNOWN,
};
this.service = {};
const service = this.service;
const health = this.health;
_.forEach(config.server.services, (serviceCfg, serviceName) => {
service[serviceName] = {
bound: false,
transport: {},
};
});
server.on('bound', (serviceName) => {
service[serviceName].bound = true;
health.status = ServingStatus.NOT_SERVING;
});
server.on('serving', (transports) => {
health.status = ServingStatus.SERVING;
_.forEach(transports, (transport, transportName) => {
_.forEach(service, (srv, serviceName) => {
service[serviceName].transport[transportName] = ServingStatus.SERVING;
});
});
});
server.on('stopped', (transports) => {
health.status = ServingStatus.NOT_SERVING;
_.forEach(transports, (transport, transportName) => {
_.forEach(service, (srv, serviceName) => {
service[serviceName].transport[transportName] = ServingStatus.NOT_SERVING;
});
});
});
// list of available commands
this.commands = {
reset: this.reset,
restore: this.restore,
reconfigure: this.reconfigure,
health_check: this.check,
version: this.version
};
const topicCfg = config.events.kafka.topics.command;
this.commandTopic = events.topic(topicCfg.topic);
// check for buffer fields
this.bufferedCollection = new Map<string, string>();
if (this.config.fieldHandlers && this.config.fieldHandlers.bufferFields) {
for (let bufferedCollection in this.config.fieldHandlers.bufferFields) {
this.bufferedCollection.set(bufferedCollection,
this.config.fieldHandlers.bufferFields[bufferedCollection]);
}
this.logger.info('Buffered collections are:', this.bufferedCollection);
}
}
示例6: restore
/**
* Restore the system by re-reading Kafka messages.
* This base implementation restores documents from a set of
* ArangoDB database collections, using the chassis-srv database provider.
* @param topics list of Kafka topics to be restored
*/
async restore(payload: any): Promise<any> {
if (_.isEmpty(payload) || _.isEmpty(payload.data)) {
throw new errors.InvalidArgument('Invalid payload for restore command');
}
const restoreData: RestoreData[] = payload.data || [];
// the Kafka config should contains a key-value pair, mapping
// a label with the topic's name
const kafkaEventsCfg = this.config.events.kafka;
const kafkaCfg = this.config.events.kafka.topics;
if (_.isNil(kafkaCfg) || kafkaCfg.length == 0) {
throw new errors.Internal('Kafka topics config not available');
}
const topicLabels = _.keys(kafkaCfg).filter((elem, index) => {
return elem.includes('.resource');
}).map((elem) => {
return elem.replace('.resource', '');
});
const restoreSetup = {};
const restoreEventSetup = {};
restoreData.forEach((data) => {
const ignoreOffset = (data.ignore_offset || []).filter((offset) => {
const isNumber = Number(offset) != NaN;
if (!isNumber) {
this.logger.warn(`Invalid value for "ignore_offset" parameter in restore: ${offset}`);
}
return isNumber;
});
restoreSetup[data.entity] = {
baseOffset: Number(data.base_offset) || 0,
ignoreOffset
};
});
const restoreCollections = _.keys(restoreSetup);
try {
const dbCfgs = this.config.database;
const dbCfgNames = _.keys(dbCfgs);
for (let i = 0; i < dbCfgNames.length; i += 1) {
const dbCfgName = dbCfgNames[i];
const dbCfg = dbCfgs[dbCfgName];
const collections = dbCfg.collections;
let graphName;
if (this.config.graph) {
graphName = this.config.graph.graphName;
}
const db = await database.get(dbCfg, this.logger, graphName);
if (_.isNil(collections)) {
this.logger.warn('No collections found on DB config');
return {};
}
let intersection: string[] = _.intersection(restoreCollections, collections);
if (intersection.length > 0) {
intersection = _.intersection(intersection, topicLabels);
for (let resource of intersection) {
const topicName = kafkaCfg[`${resource}.resource`].topic;
restoreEventSetup[topicName] = {
topic: this.kafkaEvents.topic(topicName),
events: this.makeResourcesRestoreSetup(db, resource),
baseOffset: restoreSetup[resource].baseOffset,
ignoreOffset: restoreSetup[resource].ignoreOffset
};
}
}
}
if (_.isEmpty(restoreEventSetup)) {
this.logger.warn('No data was setup for the restore process.');
} else {
const that = this;
// Start the restore process
this.logger.warn('restoring data');
for (let topicName in restoreEventSetup) {
const topicSetup: any = restoreEventSetup[topicName];
const restoreTopic: Topic = topicSetup.topic;
const topicEvents: any = topicSetup.events;
// saving listeners for potentially subscribed events on this topic,
// so they don't get called during the restore process
const previousEvents: string[] = _.cloneDeep(restoreTopic.subscribed);
const listenersBackup = new Map<string, Function[]>();
for (let event of previousEvents) {
listenersBackup.set(event, (restoreTopic.emitter as EventEmitter).listeners(event));
await restoreTopic.removeAllListeners(event);
}
//.........这里部分代码省略.........