DE-3247 | refactored code and added logs

This commit is contained in:
puru
2024-08-23 15:24:00 +05:30
parent 90ddba18b0
commit 7981dedde4
8 changed files with 311 additions and 205 deletions

View File

@@ -1,19 +1,17 @@
package lib
import (
metrics "com.navi.medici.janus/instrumentation"
producer_module "com.navi.medici.janus/producer"
"com.navi.medici.janus/utils"
"context"
"encoding/json"
"net/http"
"time"
"github.com/Shopify/sarama"
)
metrics "com.navi.medici.janus/instrumentation"
producer_module "com.navi.medici.janus/producer"
"com.navi.medici.janus/utils"
var (
ProtobufRequestChannel = make(chan *RequestObject)
JsonRequestChannel = make(chan *RequestObject)
"github.com/Shopify/sarama"
"go.uber.org/zap"
)
type RequestObject struct {
@@ -21,45 +19,72 @@ type RequestObject struct {
Header http.Header
}
func ProcessJsonRequestChannel(topic string) {
for {
request := <-JsonRequestChannel
ClickstreamJsonEventHandler(*request, topic)
type WorkerPool struct {
workers int
jobQueue chan RequestObject
logger *zap.Logger
}
func NewWorkerPool(workers int) *WorkerPool {
return &WorkerPool{
workers: workers,
jobQueue: make(chan RequestObject, workers),
logger: utils.GetLogger(),
}
}
func ClickstreamJsonEventHandler(request RequestObject, topic string) {
func (wp *WorkerPool) Start(ctx context.Context) {
for i := 0; i < wp.workers; i++ {
go wp.worker(ctx)
}
}
func (wp *WorkerPool) worker(ctx context.Context) {
for {
select {
case <-ctx.Done():
return
case job := <-wp.jobQueue:
wp.processRequest(job)
}
}
}
func (wp *WorkerPool) AddJob(job RequestObject) {
wp.jobQueue <- job
}
func (wp *WorkerPool) processRequest(request RequestObject) {
defer func() {
if r := recover(); r != nil {
wp.logger.Error("Recovered from panic in processRequest", zap.Any("panic", r))
}
}()
eventProcessingStartTime := utils.NanosToMillis(time.Now().UnixNano())
messageBytes := request.Body
//getting the client which has sent this event
var result map[string]interface{}
json.Unmarshal(messageBytes, &result)
if err := json.Unmarshal(messageBytes, &result); err != nil {
wp.logger.Error("Failed to unmarshal JSON", zap.Error(err))
return
}
source := getSource(result)
// to be of the format [magicByte] + [schemaID] + [messageIndex] + [value]
recordValue := []byte{}
// add [magicByte]
// recordValue = append(recordValue, byte(0))
// // add schemaID]
// schemaIDBytes := make([]byte, 4)
// binary.BigEndian.PutUint32(schemaIDBytes, uint32(schema.SchemaVersionMap[topic]))
// recordValue = append(recordValue, schemaIDBytes...)
// Now write the bytes from the actual message...
recordValue = append(recordValue, messageBytes...)
recordValue := messageBytes
message := &sarama.ProducerMessage{
Topic: topic,
Topic: producer_module.GetKafkaTopic(),
Value: sarama.ByteEncoder(recordValue),
}
metrics.RequestCounter.WithLabelValues(source).Inc()
// processing complete, record duration metrics
metrics.EventProcessingTimeHist.WithLabelValues(topic, source).Observe(float64(utils.NanosToMillis(time.Now().UnixNano()) - eventProcessingStartTime))
producer_module.WriteMessageToKafkaAsync(message, source)
metrics.EventProcessingTimeHist.WithLabelValues(producer_module.GetKafkaTopic(), source).Observe(float64(utils.NanosToMillis(time.Now().UnixNano()) - eventProcessingStartTime))
if err := producer_module.WriteMessageToKafkaAsync(message, source); err != nil {
wp.logger.Error("Failed to write message to Kafka", zap.Error(err), zap.String("source", source))
}
}
func getSource(event map[string]interface{}) string {