I have implemented Avro schema in node.js with schema being sent with the message payload. And it is working fine. I am looking if there is any way I can use schema-registry with Kafka-node module. I have explored but was not successful in finding any.
And sending schema in each message increase the message size? Does it affect the performance compared to using schema registry?
Any help in this will be appreciated.
You can use "avro-schema-registry" module. It is working for me. I am also new to Kafka, just trying it out.
const kafka = require('kafka-node');
const avroSchemaRegistry = require('avro-schema-registry');
/* Configuration */
const kafkaTopic = 'newkafkatopic';//'kafka.test';
const host = 'localhost:9092';
const schemaRegistry = 'http://localhost:8081';
const Consumer = kafka.Consumer;
const Client = kafka.KafkaClient;
const registry = avroSchemaRegistry(schemaRegistry);
var client = new Client(host);
var topics = [{
topic: kafkaTopic
}];
var options = {
autoCommit: false,
fetchMaxWaitMs: 1000,
fetchMaxBytes: 1024 * 1024,
encoding: 'buffer'
};
var consumer = new Consumer(client, topics, options);
consumer.on('message', function(rawMessage) {
console.log("Raw Message", rawMessage);
registry.decode(rawMessage.value)
.then((msg) => {
console.log(msg)
})
.catch(err=>console.log(err))
});
consumer.on('error', (e) => {
console.log(e.message)
consumer.close();
})
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With