本文整理汇总了TypeScript中@restorecommerce/kafka-client.Topic类的典型用法代码示例。如果您正苦于以下问题:TypeScript Topic类的具体用法?TypeScript Topic怎么用?TypeScript Topic使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Topic类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的TypeScript代码示例。
示例1: version
/**
* Retrieve current NPM package and Node version of service
*/
async version(): Promise<any> {
const response = {
nodejs: process.version,
version: process.env.npm_package_version,
};
await this.commandTopic.emit('versionResponse', {
services: _.keys(this.service),
payload: this.encodeMsg(response)
});
return response;
}
开发者ID:restorecommerce,项目名称:chassis-srv,代码行数:14,代码来源:index.ts
示例2: describe
describe('offsetStore', () => {
let events: Events;
const topicName = 'test';
let topic: Topic;
let offsetStore: OffsetStore;
const eventName = 'testCreated';
const testMessage = { value: 'testValue', count: 1 };
const cfg = sconfig(process.cwd() + '/test');
const logger = new Logger(cfg.get('logger'));
beforeEach(async function start() {
events = new Events(cfg.get('events:kafka'), logger);
await events.start();
});
afterEach(async function stop() {
await offsetStore.stop();
await events.stop();
});
it('should emit an event and verify the stored offset value from redis',
async function testStoredOffsetValue() {
this.timeout(10000);
offsetStore = new OffsetStore(events, cfg, logger);
topic = await (events.topic(topicName));
const listener = function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
const currentOffset = await topic.$offset(-1);
// emit message to kafka
await topic.on(eventName, listener);
await topic.emit(eventName, testMessage);
const newOffset = await new Promise((resolve, reject) => {
setTimeout(async () => {
const offsetValue = await offsetStore.getOffset(topicName);
resolve(offsetValue);
}, 8000);
});
should.exist(newOffset);
Number(newOffset).should.equal(currentOffset + 1);
});
it('should consume a previously emitted message from Kafka',
async function testConsumeListener() {
this.timeout(4000);
// emit testMessage to kafka
topic = await events.topic(topicName);
await topic.emit(eventName, testMessage);
// start offsetTracker subscribing to previous offset value read
// from redis and consume the above message
offsetStore = new OffsetStore(events, cfg, logger);
const listener = async function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
let startingOffset = await offsetStore.getOffset(topicName);
await topic.on(eventName, listener, { startingOffset });
// wait for 2sec so that message is consumed and
// test is not ended immediately
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve();
}, 2000);
});
});
});
开发者ID:restorecommerce,项目名称:chassis-srv,代码行数:72,代码来源:kafka_offsetstore_test.ts
示例3: restore
/**
* Restore the system by re-reading Kafka messages.
* This base implementation restores documents from a set of
* ArangoDB database collections, using the chassis-srv database provider.
* @param topics list of Kafka topics to be restored
*/
async restore(payload: any): Promise<any> {
if (_.isEmpty(payload) || _.isEmpty(payload.data)) {
throw new errors.InvalidArgument('Invalid payload for restore command');
}
const restoreData: RestoreData[] = payload.data || [];
// the Kafka config should contains a key-value pair, mapping
// a label with the topic's name
const kafkaEventsCfg = this.config.events.kafka;
const kafkaCfg = this.config.events.kafka.topics;
if (_.isNil(kafkaCfg) || kafkaCfg.length == 0) {
throw new errors.Internal('Kafka topics config not available');
}
const topicLabels = _.keys(kafkaCfg).filter((elem, index) => {
return elem.includes('.resource');
}).map((elem) => {
return elem.replace('.resource', '');
});
const restoreSetup = {};
const restoreEventSetup = {};
restoreData.forEach((data) => {
const ignoreOffset = (data.ignore_offset || []).filter((offset) => {
const isNumber = Number(offset) != NaN;
if (!isNumber) {
this.logger.warn(`Invalid value for "ignore_offset" parameter in restore: ${offset}`);
}
return isNumber;
});
restoreSetup[data.entity] = {
baseOffset: Number(data.base_offset) || 0,
ignoreOffset
};
});
const restoreCollections = _.keys(restoreSetup);
try {
const dbCfgs = this.config.database;
const dbCfgNames = _.keys(dbCfgs);
for (let i = 0; i < dbCfgNames.length; i += 1) {
const dbCfgName = dbCfgNames[i];
const dbCfg = dbCfgs[dbCfgName];
const collections = dbCfg.collections;
let graphName;
if (this.config.graph) {
graphName = this.config.graph.graphName;
}
const db = await database.get(dbCfg, this.logger, graphName);
if (_.isNil(collections)) {
this.logger.warn('No collections found on DB config');
return {};
}
let intersection: string[] = _.intersection(restoreCollections, collections);
if (intersection.length > 0) {
intersection = _.intersection(intersection, topicLabels);
for (let resource of intersection) {
const topicName = kafkaCfg[`${resource}.resource`].topic;
restoreEventSetup[topicName] = {
topic: this.kafkaEvents.topic(topicName),
events: this.makeResourcesRestoreSetup(db, resource),
baseOffset: restoreSetup[resource].baseOffset,
ignoreOffset: restoreSetup[resource].ignoreOffset
};
}
}
}
if (_.isEmpty(restoreEventSetup)) {
this.logger.warn('No data was setup for the restore process.');
} else {
const that = this;
// Start the restore process
this.logger.warn('restoring data');
for (let topicName in restoreEventSetup) {
const topicSetup: any = restoreEventSetup[topicName];
const restoreTopic: Topic = topicSetup.topic;
const topicEvents: any = topicSetup.events;
// saving listeners for potentially subscribed events on this topic,
// so they don't get called during the restore process
const previousEvents: string[] = _.cloneDeep(restoreTopic.subscribed);
const listenersBackup = new Map<string, Function[]>();
for (let event of previousEvents) {
listenersBackup.set(event, (restoreTopic.emitter as EventEmitter).listeners(event));
await restoreTopic.removeAllListeners(event);
}
//.........这里部分代码省略.........
开发者ID:restorecommerce,项目名称:chassis-srv,代码行数:101,代码来源:index.ts
示例4: storeOffset
/**
* stores the offset to redis
* @param {object} topic Topic object
* @param {object} redisClient
* @return {object}
*/
async storeOffset(topic: Topic, topicName: string): Promise<any> {
// get the latest offset here each time and store it.
const offsetValue = await topic.$offset(-1);
const redisKey = this.config.get('events:kafka:clientId') + ':' + topicName;
this.redisClient.set(redisKey, offsetValue, this.redisClient.print);
}
开发者ID:restorecommerce,项目名称:chassis-srv,代码行数:12,代码来源:index.ts
示例5: testStoredOffsetValue
async function testStoredOffsetValue() {
this.timeout(10000);
offsetStore = new OffsetStore(events, cfg, logger);
topic = await (events.topic(topicName));
const listener = function listener(message, context) {
testMessage.value.should.equal(message.value);
testMessage.count.should.equal(message.count);
};
// get the current offsetValue for 'test' topic before emitting message
const currentOffset = await topic.$offset(-1);
// emit message to kafka
await topic.on(eventName, listener);
await topic.emit(eventName, testMessage);
const newOffset = await new Promise((resolve, reject) => {
setTimeout(async () => {
const offsetValue = await offsetStore.getOffset(topicName);
resolve(offsetValue);
}, 8000);
});
should.exist(newOffset);
Number(newOffset).should.equal(currentOffset + 1);
});
开发者ID:restorecommerce,项目名称:chassis-srv,代码行数:23,代码来源:kafka_offsetstore_test.ts
注:本文中的@restorecommerce/kafka-client.Topic类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论