本文整理匯總了TypeScript中@restorecommerce/kafka-client.Topic.%24offset方法的典型用法代碼示例。如果您正苦於以下問題:TypeScript Topic.%24offset方法的具體用法?TypeScript Topic.%24offset怎麽用?TypeScript Topic.%24offset使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類@restorecommerce/kafka-client.Topic
的用法示例。
在下文中一共展示了Topic.%24offset方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的TypeScript代碼示例。
示例1: testStoredOffsetValue
async function testStoredOffsetValue() {
this.timeout(10000);
offsetStore = new OffsetStore(events, cfg, logger);
topic = await (events.topic(topicName));
const listener = function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
const currentOffset = await topic.$offset(-1);
// emit message to kafka
await topic.on(eventName, listener);
await topic.emit(eventName, testMessage);
const newOffset = await new Promise((resolve, reject) => {
setTimeout(async () => {
const offsetValue = await offsetStore.getOffset(topicName);
resolve(offsetValue);
}, 8000);
});
should.exist(newOffset);
Number(newOffset).should.equal(currentOffset + 1);
});
示例2: restore
/**
* Restore the system by re-reading Kafka messages.
* This base implementation restores documents from a set of
* ArangoDB database collections, using the chassis-srv database provider.
* @param topics list of Kafka topics to be restored
*/
async restore(payload: any): Promise<any> {
if (_.isEmpty(payload) || _.isEmpty(payload.data)) {
throw new errors.InvalidArgument('Invalid payload for restore command');
}
const restoreData: RestoreData[] = payload.data || [];
// the Kafka config should contains a key-value pair, mapping
// a label with the topic's name
const kafkaEventsCfg = this.config.events.kafka;
const kafkaCfg = this.config.events.kafka.topics;
if (_.isNil(kafkaCfg) || kafkaCfg.length == 0) {
throw new errors.Internal('Kafka topics config not available');
}
const topicLabels = _.keys(kafkaCfg).filter((elem, index) => {
return elem.includes('.resource');
}).map((elem) => {
return elem.replace('.resource', '');
});
const restoreSetup = {};
const restoreEventSetup = {};
restoreData.forEach((data) => {
const ignoreOffset = (data.ignore_offset || []).filter((offset) => {
const isNumber = Number(offset) != NaN;
if (!isNumber) {
this.logger.warn(`Invalid value for "ignore_offset" parameter in restore: ${offset}`);
}
return isNumber;
});
restoreSetup[data.entity] = {
baseOffset: Number(data.base_offset) || 0,
ignoreOffset
};
});
const restoreCollections = _.keys(restoreSetup);
try {
const dbCfgs = this.config.database;
const dbCfgNames = _.keys(dbCfgs);
for (let i = 0; i < dbCfgNames.length; i += 1) {
const dbCfgName = dbCfgNames[i];
const dbCfg = dbCfgs[dbCfgName];
const collections = dbCfg.collections;
let graphName;
if (this.config.graph) {
graphName = this.config.graph.graphName;
}
const db = await database.get(dbCfg, this.logger, graphName);
if (_.isNil(collections)) {
this.logger.warn('No collections found on DB config');
return {};
}
let intersection: string[] = _.intersection(restoreCollections, collections);
if (intersection.length > 0) {
intersection = _.intersection(intersection, topicLabels);
for (let resource of intersection) {
const topicName = kafkaCfg[`${resource}.resource`].topic;
restoreEventSetup[topicName] = {
topic: this.kafkaEvents.topic(topicName),
events: this.makeResourcesRestoreSetup(db, resource),
baseOffset: restoreSetup[resource].baseOffset,
ignoreOffset: restoreSetup[resource].ignoreOffset
};
}
}
}
if (_.isEmpty(restoreEventSetup)) {
this.logger.warn('No data was setup for the restore process.');
} else {
const that = this;
// Start the restore process
this.logger.warn('restoring data');
for (let topicName in restoreEventSetup) {
const topicSetup: any = restoreEventSetup[topicName];
const restoreTopic: Topic = topicSetup.topic;
const topicEvents: any = topicSetup.events;
// saving listeners for potentially subscribed events on this topic,
// so they don't get called during the restore process
const previousEvents: string[] = _.cloneDeep(restoreTopic.subscribed);
const listenersBackup = new Map<string, Function[]>();
for (let event of previousEvents) {
listenersBackup.set(event, (restoreTopic.emitter as EventEmitter).listeners(event));
await restoreTopic.removeAllListeners(event);
}
//.........這裏部分代碼省略.........
示例3: storeOffset
/**
* stores the offset to redis
* @param {object} topic Topic object
* @param {object} redisClient
* @return {object}
*/
async storeOffset(topic: Topic, topicName: string): Promise<any> {
// get the latest offset here each time and store it.
const offsetValue = await topic.$offset(-1);
const redisKey = this.config.get('events:kafka:clientId') + ':' + topicName;
this.redisClient.set(redisKey, offsetValue, this.redisClient.print);
}