Files
janus/producer/producer_client.go
2023-02-02 17:47:11 +05:30

134 lines
4.3 KiB
Go

package producer
import (
"com.navi.medici.janus/config"
"com.navi.medici.janus/utils"
"crypto/tls"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"strings"
"time"
)
var (
syncProducer sarama.SyncProducer
asyncProducer sarama.AsyncProducer
logger *zap.Logger
)
func GetSyncProducerConfig(kafkaConfiguration config.KafkaConfigurations, env string) *sarama.Config {
config := sarama.NewConfig()
// security configs
//if env == "PROD" {
config.Net.TLS.Enable = true
config.Net.TLS.Config = createTLSConfiguration()
config.Net.SASL.Enable = true
config.Net.SASL.Handshake = true
config.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA512
config.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient {
return &XDGSCRAMClient{HashGeneratorFcn: SHA512}
}
config.Net.SASL.User = kafkaConfiguration.Sasl_User
config.Net.SASL.Password = kafkaConfiguration.Sasl_Password
//}
// producer configs
// to be changed: read from config file
config.Producer.Retry.Max = 3
config.Producer.RequiredAcks = sarama.WaitForLocal
config.Producer.Compression = sarama.CompressionGZIP
config.Producer.Return.Successes = true
// metadata configs
config.Metadata.RefreshFrequency = 1 * time.Minute
return config
}
func GetAsyncProducerConfig(kafkaConfiguration config.KafkaConfigurations, env string) *sarama.Config {
config := sarama.NewConfig()
// security configs
//if env == "PROD" {
config.Net.TLS.Enable = true
config.Net.TLS.Config = createTLSConfiguration()
config.Net.SASL.Enable = true
config.Net.SASL.Handshake = true
config.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA512)
config.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient {
return &XDGSCRAMClient{HashGeneratorFcn: SHA512}
}
config.Net.SASL.User = kafkaConfiguration.Sasl_User
config.Net.SASL.Password = kafkaConfiguration.Sasl_Password
//}
// producer configs
// to be changed: read from config file
config.Producer.Retry.Max = 3
config.Producer.RequiredAcks = sarama.WaitForLocal
config.Producer.Compression = sarama.CompressionGZIP
config.Producer.Flush.Bytes = 31000
config.Producer.Flush.Frequency = 100 * time.Millisecond
// metadata configs
config.Metadata.RefreshFrequency = 1 * time.Minute
return config
}
func GetSyncProducer(kafkaConfiguration config.KafkaConfigurations, env string) sarama.SyncProducer {
config := GetSyncProducerConfig(kafkaConfiguration, env)
brokerList := strings.Split(kafkaConfiguration.Bootstrap_Servers, ",")
producer, err := sarama.NewSyncProducer(brokerList, config)
if err != nil {
panic(err)
}
return producer
}
func GetAsyncProducer(kafkaConfiguration config.KafkaConfigurations, env string) sarama.AsyncProducer {
config := GetAsyncProducerConfig(kafkaConfiguration, env)
brokerList := strings.Split(kafkaConfiguration.Bootstrap_Servers, ",")
producer, err := sarama.NewAsyncProducer(brokerList, config)
if err != nil {
panic(err)
}
go processProducerErrors()
return producer
}
func InitializeProducers(kafkaConfiguration config.KafkaConfigurations, env string) {
logger = utils.GetLogger()
syncProducer = GetSyncProducer(kafkaConfiguration, env)
asyncProducer = GetAsyncProducer(kafkaConfiguration, env)
}
func createTLSConfiguration() (t *tls.Config) {
t = &tls.Config{
InsecureSkipVerify: false,
}
return t
}
// using confluent-kafka-go
// func GetProducerConfig(kafkaConfiguration config.KafkaConfigurations) *kafka.ConfigMap {
// var config = kafka.ConfigMap {
// "bootstrap.servers": kafkaConfiguration.Bootstrap_Servers,
// // "ssl.endpoint.identification.algorithm" : kafkaConfiguration.SSL_Endpoint_Algorithm,
// // "sasl.mechanism": kafkaConfiguration.SASL_Mechanism,
// // "request.timeout.ms": kafkaConfiguration.Request_Timeout_Ms,
// // "security.protocol": kafkaConfiguration.Security_Protocol,
// // "retry.backoff.ms": kafkaConfiguration.Retry_Backoff_MS,
// // "sasl.jaas.config": kafkaConfiguration.Sasl_JAAS_Config
// }
// return &config
// }
// func GetProducer(kafkaConfiguration config.KafkaConfigurations) *kafka.Producer {
// var config = GetProducerConfig(kafkaConfiguration)
// producer, err := kafka.NewProducer(config)
// if err != nil {
// panic(err)
// }
// return producer
// }