本文整理匯總了TypeScript中@restorecommerce/kafka-client.Topic類的典型用法代碼示例。如果您正苦於以下問題:TypeScript Topic類的具體用法?TypeScript Topic怎麽用?TypeScript Topic使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
在下文中一共展示了Topic類的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的TypeScript代碼示例。
示例1: version
/**
* Retrieve current NPM package and Node version of service
*/
async version(): Promise<any> {
const response = {
nodejs: process.version,
version: process.env.npm_package_version,
};
await this.commandTopic.emit('versionResponse', {
services: _.keys(this.service),
payload: this.encodeMsg(response)
});
return response;
}
示例2: describe
describe('offsetStore', () => {
let events: Events;
const topicName = 'test';
let topic: Topic;
let offsetStore: OffsetStore;
const eventName = 'testCreated';
const testMessage = { value: 'testValue', count: 1 };
const cfg = sconfig(process.cwd() + '/test');
const logger = new Logger(cfg.get('logger'));
beforeEach(async function start() {
events = new Events(cfg.get('events:kafka'), logger);
await events.start();
});
afterEach(async function stop() {
await offsetStore.stop();
await events.stop();
});
it('should emit an event and verify the stored offset value from redis',
async function testStoredOffsetValue() {
this.timeout(10000);
offsetStore = new OffsetStore(events, cfg, logger);
topic = await (events.topic(topicName));
const listener = function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
const currentOffset = await topic.$offset(-1);
// emit message to kafka
await topic.on(eventName, listener);
await topic.emit(eventName, testMessage);
const newOffset = await new Promise((resolve, reject) => {
setTimeout(async () => {
const offsetValue = await offsetStore.getOffset(topicName);
resolve(offsetValue);
}, 8000);
});
should.exist(newOffset);
Number(newOffset).should.equal(currentOffset + 1);
});
it('should consume a previously emitted message from Kafka',
async function testConsumeListener() {
this.timeout(4000);
// emit testMessage to kafka
topic = await events.topic(topicName);
await topic.emit(eventName, testMessage);
// start offsetTracker subscribing to previous offset value read
// from redis and consume the above message
offsetStore = new OffsetStore(events, cfg, logger);
const listener = async function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
let startingOffset = await offsetStore.getOffset(topicName);
await topic.on(eventName, listener, { startingOffset });
// wait for 2sec so that message is consumed and
// test is not ended immediately
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve();
}, 2000);
});
});
});
示例3: restore
/**
* Restore the system by re-reading Kafka messages.
* This base implementation restores documents from a set of
* ArangoDB database collections, using the chassis-srv database provider.
* @param topics list of Kafka topics to be restored
*/
async restore(payload: any): Promise<any> {
if (_.isEmpty(payload) || _.isEmpty(payload.data)) {
throw new errors.InvalidArgument('Invalid payload for restore command');
}
const restoreData: RestoreData[] = payload.data || [];
// the Kafka config should contains a key-value pair, mapping
// a label with the topic's name
const kafkaEventsCfg = this.config.events.kafka;
const kafkaCfg = this.config.events.kafka.topics;
if (_.isNil(kafkaCfg) || kafkaCfg.length == 0) {
throw new errors.Internal('Kafka topics config not available');
}
const topicLabels = _.keys(kafkaCfg).filter((elem, index) => {
return elem.includes('.resource');
}).map((elem) => {
return elem.replace('.resource', '');
});
const restoreSetup = {};
const restoreEventSetup = {};
restoreData.forEach((data) => {
const ignoreOffset = (data.ignore_offset || []).filter((offset) => {
const isNumber = Number(offset) != NaN;
if (!isNumber) {
this.logger.warn(`Invalid value for "ignore_offset" parameter in restore: ${offset}`);
}
return isNumber;
});
restoreSetup[data.entity] = {
baseOffset: Number(data.base_offset) || 0,
ignoreOffset
};
});
const restoreCollections = _.keys(restoreSetup);
try {
const dbCfgs = this.config.database;
const dbCfgNames = _.keys(dbCfgs);
for (let i = 0; i < dbCfgNames.length; i += 1) {
const dbCfgName = dbCfgNames[i];
const dbCfg = dbCfgs[dbCfgName];
const collections = dbCfg.collections;
let graphName;
if (this.config.graph) {
graphName = this.config.graph.graphName;
}
const db = await database.get(dbCfg, this.logger, graphName);
if (_.isNil(collections)) {
this.logger.warn('No collections found on DB config');
return {};
}
let intersection: string[] = _.intersection(restoreCollections, collections);
if (intersection.length > 0) {
intersection = _.intersection(intersection, topicLabels);
for (let resource of intersection) {
const topicName = kafkaCfg[`${resource}.resource`].topic;
restoreEventSetup[topicName] = {
topic: this.kafkaEvents.topic(topicName),
events: this.makeResourcesRestoreSetup(db, resource),
baseOffset: restoreSetup[resource].baseOffset,
ignoreOffset: restoreSetup[resource].ignoreOffset
};
}
}
}
if (_.isEmpty(restoreEventSetup)) {
this.logger.warn('No data was setup for the restore process.');
} else {
const that = this;
// Start the restore process
this.logger.warn('restoring data');
for (let topicName in restoreEventSetup) {
const topicSetup: any = restoreEventSetup[topicName];
const restoreTopic: Topic = topicSetup.topic;
const topicEvents: any = topicSetup.events;
// saving listeners for potentially subscribed events on this topic,
// so they don't get called during the restore process
const previousEvents: string[] = _.cloneDeep(restoreTopic.subscribed);
const listenersBackup = new Map<string, Function[]>();
for (let event of previousEvents) {
listenersBackup.set(event, (restoreTopic.emitter as EventEmitter).listeners(event));
await restoreTopic.removeAllListeners(event);
}
//.........這裏部分代碼省略.........
示例4: storeOffset
/**
* stores the offset to redis
* @param {object} topic Topic object
* @param {object} redisClient
* @return {object}
*/
async storeOffset(topic: Topic, topicName: string): Promise<any> {
// get the latest offset here each time and store it.
const offsetValue = await topic.$offset(-1);
const redisKey = this.config.get('events:kafka:clientId') + ':' + topicName;
this.redisClient.set(redisKey, offsetValue, this.redisClient.print);
}
示例5: testStoredOffsetValue
async function testStoredOffsetValue() {
this.timeout(10000);
offsetStore = new OffsetStore(events, cfg, logger);
topic = await (events.topic(topicName));
const listener = function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
const currentOffset = await topic.$offset(-1);
// emit message to kafka
await topic.on(eventName, listener);
await topic.emit(eventName, testMessage);
const newOffset = await new Promise((resolve, reject) => {
setTimeout(async () => {
const offsetValue = await offsetStore.getOffset(topicName);
resolve(offsetValue);
}, 8000);
});
should.exist(newOffset);
Number(newOffset).should.equal(currentOffset + 1);
});