Merge pull request #4 from navi-ppl/TP-5555/clean-up

Tp 5555/clean up
This commit is contained in:
Varnit Goyal
2024-10-11 12:36:58 +05:30
committed by GitHub
13 changed files with 117 additions and 41 deletions

View File

@@ -13,7 +13,6 @@ tmp_dir = "tmp"
exclude_unchanged = false
follow_symlink = false
full_bin = ""
include_dir = ["bins"]
include_ext = ["go", "tpl", "tmpl", "html"]
include_file = []
kill_delay = "0s"

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "cybertron-symbolicator"]
path = cybertron-symbolicator
url = git@github.com:navi-ppl/cybertron-symbolicator.git

View File

@@ -1,4 +1,5 @@
ARG GOLANG_TAG=193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/golang:1.19
ARG GOLANG_TAG=193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/golang:1.22.4
# To run locally, use
#ARG GOLANG_TAG=registry.cmd.navi-tech.in/common/golang:1.19
@@ -10,6 +11,23 @@ COPY . /build
RUN /bin/bash -c "make build-log-enricher"
RUN /bin/bash -c "make build-log-enricher"
FROM rust:slim AS symbolicator-builder
SHELL ["/bin/bash", "-uo", "pipefail", "-c"]
# Update this version when a new version of element is released
ENV ELEMENT_VERSION 1.11.80
RUN mkdir /src
WORKDIR /src
COPY . .
RUN cd cybertron-symbolicator \
&& cargo build --release --locked \
&& mv "target/release/source-map" . \
&& strip source-map
FROM ${GOLANG_TAG}
RUN mkdir -p /usr/local
WORKDIR /usr/local
@@ -17,6 +35,7 @@ COPY --from=0 /build/out/log-enricher /usr/local/
COPY --from=0 /build/out/migrations /usr/local/migrations
COPY --from=0 /build/db/migrations/*.sql /usr/local/db/migrations/
COPY --from=0 /build/configs/application.yml /usr/local/configs/
COPY --from=symbolicator-builder /src /usr/local/bins/
RUN adduser --system --uid 4000 --disabled-password api-user && chown -R 4000:4000 /usr/local && chmod -R g+w /usr/local/
USER 4000
CMD /bin/bash -c "./log-enricher"

Binary file not shown.

View File

@@ -1,8 +1,8 @@
port: 9001
name: log-enricher
env: local
port: PORT
name: NAME
env: ENV
metrics:
port: 4001
port: METRICS_PORT
timezone: Asia/Kolkata
#DB config
@@ -16,19 +16,19 @@ db:
max:
idle: 10
open: 300
username: postgres
password: admin
host: localhost
username: DB_USERNAME
password: DB_PASSWORD
host: DB_HOST
port: 5432
name: cybertron_dev
name: DB_NAME
ssl:
mode: disable
#Prometheus config
prometheus:
app.name: log-enricher
host: localhost
port: 4001
app.name: PROMETHEUS_APP_NAME
host: PROMETHEUS_HOST
port: PROMETHEUS_POST
enabled: true
timeout: 10
flush.interval.in.ms: 200
@@ -42,15 +42,15 @@ http:
# Kafka config
kafka:
password: kDia1uC.GI;)Al5eQ)+Q
username: varnitgoyal/varnitgoyal95@gmail.com/ocid1.streampool.oc1.ap-mumbai-1.amaaaaaaotdslraanepwp54txqqxkmg4l6dghrhufiezqkx2lqhndgxoq7pa
brokers: cell-1.streaming.ap-mumbai-1.oci.oraclecloud.com:9092
password: KAFKA_PASSWORD
username: KAFKA_USERNAME
brokers: KAFKA_BROKERS
group:
names: {"kafka-stream": "kafka-stream"}
topics: {"kafka-stream": "kafka-stream"}
names: KAFKA_GROUP
topics: KAFKA_TOPICS
tls:
insecureSkipVerify: true
enabled: true
enabled: false
sasl:
enabled: true
consumer:
@@ -67,12 +67,22 @@ DocumentService:
generate_token: DOCUMENT_SERVICE_MOCK_GENERATE_TOKEN
elastic:
addresses: https://localhost:9200
username: elastic
password: 9457611267
index: cybertron
api_key: Uk1ScGFKRUJYaHF5bTJkOWRuUmY6eWhIRGpFc1lUak9sRjcxY0taMzUydw==
addresses: ELASTIC_ADDRESS
username: ELASTIC_USERNAME
password: ELASTIC_PASSWORD
index: ELASTIC_INDEX
api_key: ELASTIC_API_KEY
aws:
region: ap-south-1
bucket: navi-cd955a63c4476df0f00c1cea0e4a40d1
region: AWS_REGION
bucket: AWS_BUCKET
mjolnir:
service.url: MJOLNIR_SERVICE_URL
realm.id: MJOLNIR_REALM
houston:
service.url: HOUSTON_SERVICE_URL
realm.id: HOUSTON_REALM_ID

View File

@@ -10,4 +10,8 @@ type ErrorDocument struct {
ProjectId string `json:"project_id"`
ReleaseVersion string `json:"release_id"`
SignificantStack string `json:"significant_stack"`
CreatedAt int64 `json:"created_at"`
Breadcrumbs interface{} `json:"breadcrumbs"`
Extra interface{} `json:"extra"`
Request interface{} `json:"request"`
}

View File

@@ -39,6 +39,6 @@ func (el *ElasticSearchClient) IndexDocument(document interface{}) {
Do(context.Background())
if err != nil {
log.Fatalf("Error ingesting the doc: %s", err)
log.Printf("Error ingesting the doc: %s", err)
}
}

View File

@@ -66,8 +66,8 @@ func (consumer *Consumer) AttachErrorListener() {
switch e := ev.(type) {
case *ConfluentKafka.Message:
// Process the message received.
fmt.Printf("%% Message on %s:\n%s\n",
e.TopicPartition, string(e.Value))
//fmt.Printf("%% Message on %s:\n%s\n",
// e.TopicPartition, string(e.Value))
go consumer.errorService.ProcessError(e.Value)

View File

@@ -13,7 +13,11 @@ type Command struct {
}
type SymbolicatedFrame struct {
Token string `json:"token"`
Token string `json:"token"`
OriginalLine int `json:"original_line"`
Lines []string `json:"lines"`
Start int `json:"start"`
End int `json:"end"`
}
type SymbolicatedStackTrace struct {
Frames []SymbolicatedFrame `json:"symbolicated_frames"`

View File

@@ -0,0 +1,9 @@
package utils
// Reverse reverses a slice of any type and returns the reversed slice.
func Reverse[T any](slice []T) []T {
for i, j := 0, len(slice)-1; i < j; i, j = i+1, j-1 {
slice[i], slice[j] = slice[j], slice[i]
}
return slice
}

View File

@@ -8,8 +8,10 @@ import (
"log-enricher/pkg/encoder"
"log-enricher/pkg/log"
"log-enricher/pkg/symbolicator"
"log-enricher/pkg/utils"
"path"
"strings"
"time"
)
type ErrorProcessor struct {
@@ -43,13 +45,20 @@ type Mechanism struct {
Data MechanismData `json:"data"`
}
type Extra struct {
ReleaseId string `json:"release_id"`
}
type Exception struct {
Type string `json:"type"`
Value string `json:"value"`
Stacktrace Stacktrace `json:"stacktrace"`
Mechanism Mechanism `json:"mechanism"`
ProjectId string `json:"project_id,omitempty"`
ReleaseId string `json:"release_id,omitempty"`
Type string `json:"type"`
Value string `json:"value"`
Stacktrace Stacktrace `json:"stacktrace"`
Mechanism Mechanism `json:"mechanism"`
ProjectId string `json:"project_id,omitempty"`
ReleaseId string `json:"release_id,omitempty"`
Breadcrumbs interface{} `json:"breadcrumbs,omitempty"`
Extra interface{} `json:"extra,omitempty"`
Request interface{} `json:"request,omitempty"`
}
func NewErrorProcessor(logger *log.Logger, elasticSearchClient *dbPackage.ElasticSearchClient, sourceMapFetcherService *SourceMapService) *ErrorProcessor {
@@ -73,13 +82,25 @@ func extractFilename(url string) string {
func (ep *ErrorProcessor) ProcessError(error []byte) {
fmt.Println("got error in consumer processing the same...")
var payload Exception
json.Unmarshal(error, &payload)
err := json.Unmarshal(error, &payload)
if err != nil {
println("unable to unmarshal exception")
return
}
extraMap := payload.Extra.(map[string]interface{})
releaseId, ok := extraMap["release_id"].(string)
if !ok {
println("release id not found")
return
}
//getting source map path
for i, frame := range payload.Stacktrace.Frames {
fileName := extractFilename(frame.Filename)
projectId := payload.ProjectId
frame := &payload.Stacktrace.Frames[i]
//releaseId := payload.ReleaseId
//todo make release dynamic
sourceMapPath, err := ep.sourceMapFetcher.GetSourceMap(fileName, projectId, "release-1")
if err != nil {
@@ -97,18 +118,24 @@ func (ep *ErrorProcessor) ProcessError(error []byte) {
output, err := symbolicator.SymbolicatorClient(command)
if err != nil {
fmt.Printf("error occured in symbolicator client %s", err)
return
}
//make md5 hash of error
hash := encoder.Md5Encode(string(error))
//creating es document
println("processed document successfully saving it to elasticsearch", hash)
errorDocument := &es.ErrorDocument{
Error: payload.Value,
Title: payload.Type,
StackTrace: output.Frames,
StackTrace: utils.Reverse(output.Frames),
SignificantStack: output.Frames[len(output.Frames)-1].Token,
ErrorHash: hash,
ProjectId: payload.ProjectId,
ReleaseVersion: payload.ReleaseId,
ReleaseVersion: releaseId,
CreatedAt: time.Now().Unix(),
Breadcrumbs: payload.Breadcrumbs,
Extra: payload.Extra,
Request: payload.Request,
}
ep.elasticSearchClient.IndexDocument(errorDocument)
}

View File

@@ -74,7 +74,7 @@ func (sm *SourceMapService) GetSourceMap(fileName string, projectName string, re
cacheFilePath := filepath.Join(cacheDir, projectName, releaseVersion, fileName)
//checking local cache
if path, found := checkLocalCache(projectName, releaseVersion, fileName); found {
fmt.Printf("Loading source map from cache: %s\n", path)
//fmt.Printf("Loading source map from cache: %s\n", path)
return path, nil
}
fmt.Println("Source map not found in cache, downloading from S3...")