write to kafka using request handler channel

This commit is contained in:
“nishant-sharma”
2021-03-30 16:03:09 +05:30
parent 503dfdee98
commit 5acd0d552e
6 changed files with 38 additions and 48 deletions

View File

@@ -8,7 +8,7 @@ import (
var (
schemaVersionMap = make(map[string]int)
SchemaVersionMap = make(map[string]int)
)
@@ -22,10 +22,10 @@ func GetSchemaVersions(schemaRegistryEndpoint string, topicList []string) {
log.Println(err)
} else {
schemaId := schema.ID()
schemaVersionMap[topic] = schemaId
SchemaVersionMap[topic] = schemaId
}
}
log.Println(schemaVersionMap)
log.Println(SchemaVersionMap)
}

View File

@@ -1,49 +1,16 @@
package producer
import (
// "fmt"
"log"
// "io/ioutil"
"encoding/binary"
// data "com.navi.medici.janus/data"
"github.com/Shopify/sarama"
)
func KafkaWriter(producer sarama.SyncProducer, topic string, messageToSendBytes []byte) {
// to be of the format [magicByte] + [schemaID] + [messageIndex] + [value]
recordValue := []byte{}
// add [magicByte]
recordValue = append(recordValue, byte(0))
// add schemaID]
schemaIDBytes := make([]byte, 4)
binary.BigEndian.PutUint32(schemaIDBytes, uint32(schemaVersionMap[topic]))
recordValue = append(recordValue, schemaIDBytes...)
// add [messageIndex]
messageIndexBytes := []byte{byte(2), byte(0)}
recordValue = append(recordValue, messageIndexBytes...)
// Now write the bytes from the actual value...
// valueBytes, _ := proto.Marshal(&sensorReading)
recordValue = append(recordValue, messageToSendBytes...)
msg := &sarama.ProducerMessage{
Topic: topic,
Value: sarama.ByteEncoder(recordValue),
}
partition, offset, error := producer.SendMessage(msg)
if error != nil {
log.Fatalln("Failed to write to kafka:", error)
}
log.Printf("Partition: ", partition)
log.Printf("Offset: ", offset)
// log.Printf(error)
// log.Printf("Pixel sent: %s", messageToSend)
func WriteMessageToKafkaAsync(message *sarama.ProducerMessage) {
select {
case asyncProducer.Input() <- message:
case err := <- asyncProducer.Errors():
log.Printf("FAILED TO WRITE TO KAFKA", err)
}
}

View File

@@ -8,6 +8,10 @@ import (
// "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
)
var (
syncProducer sarama.SyncProducer
asyncProducer sarama.AsyncProducer
)
func GetSyncProducerConfig(kafkaConfiguration config.KafkaConfigurations) *sarama.Config {
config := sarama.NewConfig()
@@ -81,6 +85,12 @@ func GetAsyncProducer(kafkaConfiguration config.KafkaConfigurations) sarama.Asyn
return producer
}
func InitializeProducers(kafkaConfiguration config.KafkaConfigurations) {
syncProducer = GetSyncProducer(kafkaConfiguration)
asyncProducer = GetAsyncProducer(kafkaConfiguration)
}
// using confluent-kafka-go
// func GetProducerConfig(kafkaConfiguration config.KafkaConfigurations) *kafka.ConfigMap {
// var config = kafka.ConfigMap {