initial commit

This commit is contained in:
2026-03-08 16:14:42 +05:30
parent a6c5792518
commit 9f90b62759
323 changed files with 23869 additions and 3 deletions

1
alfred

Submodule alfred deleted from fb3c590a0a

Submodule alfred-ingester deleted from 336177ef3a

View File

@@ -0,0 +1,45 @@
name: Semgrep
on:
# Scan changed files in PRs, block on new issues only (existing issues ignored)
pull_request:
branches:
- master
- main
- develop
- portal
# Schedule this job to run at a certain time, using cron syntax
# Note that * is a special character in YAML so you have to quote this string
schedule:
- cron: '00 03 * * 0' # scheduled for 8.30 AM on every sunday
jobs:
central-semgrep:
name: Static code Analysis
uses: navi-infosec/central-semgrep-action/.github/workflows/central-semgrep.yml@using-token
with:
github-event-number: ${{github.event.number}}
github-event-name: ${{github.event_name}}
github-repository: ${{github.repository}}
github-pr_owner_name: ${{github.event.pull_request.user.login}}
secrets:
READ_SEMGREP_RULES_TOKEN: ${{secrets.READ_SEMGREP_RULES_TOKEN}}
EMAIL_FETCH_TOKEN: ${{secrets.EMAIL_FETCH_TOKEN}}
run-if-failed:
runs-on: [ self-hosted, Linux ]
needs: [central-semgrep]
if: always() && (needs.semgrep.result == 'failure')
steps:
- name: Create comment
if: ${{ ( github.event.number != '' ) }}
uses: navi-synced-actions/create-or-update-comment@v2
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
**Vulnerabilities have been discovered in this PR. Please check the vulnerability Analysis section of Semgrep Workflow to understand the security vulnerability. Feel free to reach out to #sast-help for more information **
- name: Assign Reviewers
if: ${{ ( github.event.number != '' ) }}
uses: navi-infosec/security-oncall-action@v1.1

33
alfred-ingester/.gitignore vendored Normal file
View File

@@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@@ -0,0 +1,19 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
wrapperVersion=3.3.2
distributionType=only-script
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.7/apache-maven-3.9.7-bin.zip

259
alfred-ingester/mvnw vendored Executable file
View File

@@ -0,0 +1,259 @@
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Apache Maven Wrapper startup batch script, version 3.3.2
#
# Optional ENV vars
# -----------------
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
# MVNW_REPOURL - repo url base for downloading maven distribution
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
# ----------------------------------------------------------------------------
set -euf
[ "${MVNW_VERBOSE-}" != debug ] || set -x
# OS specific support.
native_path() { printf %s\\n "$1"; }
case "$(uname)" in
CYGWIN* | MINGW*)
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
native_path() { cygpath --path --windows "$1"; }
;;
esac
# set JAVACMD and JAVACCMD
set_java_home() {
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
if [ -n "${JAVA_HOME-}" ]; then
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
JAVACCMD="$JAVA_HOME/jre/sh/javac"
else
JAVACMD="$JAVA_HOME/bin/java"
JAVACCMD="$JAVA_HOME/bin/javac"
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
return 1
fi
fi
else
JAVACMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v java
)" || :
JAVACCMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v javac
)" || :
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
return 1
fi
fi
}
# hash string like Java String::hashCode
hash_string() {
str="${1:-}" h=0
while [ -n "$str" ]; do
char="${str%"${str#?}"}"
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
str="${str#?}"
done
printf %x\\n $h
}
verbose() { :; }
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
die() {
printf %s\\n "$1" >&2
exit 1
}
trim() {
# MWRAPPER-139:
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
# Needed for removing poorly interpreted newline sequences when running in more
# exotic environments such as mingw bash on Windows.
printf "%s" "${1}" | tr -d '[:space:]'
}
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do
case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in
maven-mvnd-*bin.*)
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
*)
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
distributionPlatform=linux-amd64
;;
esac
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
distributionUrlName="${distributionUrl##*/}"
distributionUrlNameMain="${distributionUrlName%.*}"
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
exec_maven() {
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
}
if [ -d "$MAVEN_HOME" ]; then
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
exec_maven "$@"
fi
case "${distributionUrl-}" in
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
esac
# prepare tmp dir
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
trap clean HUP INT TERM EXIT
else
die "cannot create temp dir"
fi
mkdir -p -- "${MAVEN_HOME%/*}"
# Download and Install Apache Maven
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
verbose "Downloading from: $distributionUrl"
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
# select .zip or .tar.gz
if ! command -v unzip >/dev/null; then
distributionUrl="${distributionUrl%.zip}.tar.gz"
distributionUrlName="${distributionUrl##*/}"
fi
# verbose opt
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
# normalize http auth
case "${MVNW_PASSWORD:+has-password}" in
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
esac
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
verbose "Found wget ... using wget"
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
verbose "Found curl ... using curl"
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
elif set_java_home; then
verbose "Falling back to use Java to download"
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
cat >"$javaSource" <<-END
public class Downloader extends java.net.Authenticator
{
protected java.net.PasswordAuthentication getPasswordAuthentication()
{
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
}
public static void main( String[] args ) throws Exception
{
setDefault( new Downloader() );
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
}
}
END
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
verbose " - Compiling Downloader.java ..."
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
verbose " - Running Downloader.java ..."
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
fi
# If specified, validate the SHA-256 sum of the Maven distribution zip file
if [ -n "${distributionSha256Sum-}" ]; then
distributionSha256Result=false
if [ "$MVN_CMD" = mvnd.sh ]; then
echo "Checksum validation is not supported for maven-mvnd." >&2
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
elif command -v sha256sum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
elif command -v shasum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
else
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
fi
if [ $distributionSha256Result = false ]; then
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
exit 1
fi
fi
# unzip and move
if command -v unzip >/dev/null; then
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || :
exec_maven "$@"

149
alfred-ingester/mvnw.cmd vendored Normal file
View File

@@ -0,0 +1,149 @@
<# : batch portion
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Apache Maven Wrapper startup batch script, version 3.3.2
@REM
@REM Optional ENV vars
@REM MVNW_REPOURL - repo url base for downloading maven distribution
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output
@REM ----------------------------------------------------------------------------
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0)
@SET __MVNW_CMD__=
@SET __MVNW_ERROR__=
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath%
@SET PSModulePath=
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @(
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B)
)
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE%
@SET __MVNW_PSMODULEP_SAVE=
@SET __MVNW_ARG0_NAME__=
@SET MVNW_USERNAME=
@SET MVNW_PASSWORD=
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*)
@echo Cannot start maven from wrapper >&2 && exit /b 1
@GOTO :EOF
: end batch / begin powershell #>
$ErrorActionPreference = "Stop"
if ($env:MVNW_VERBOSE -eq "true") {
$VerbosePreference = "Continue"
}
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl
if (!$distributionUrl) {
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
}
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
"maven-mvnd-*" {
$USE_MVND = $true
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip"
$MVN_CMD = "mvnd.cmd"
break
}
default {
$USE_MVND = $false
$MVN_CMD = $script -replace '^mvnw','mvn'
break
}
}
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
if ($env:MVNW_REPOURL) {
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" }
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')"
}
$distributionUrlName = $distributionUrl -replace '^.*/',''
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
if ($env:MAVEN_USER_HOME) {
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain"
}
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME"
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
exit $?
}
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) {
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl"
}
# prepare tmp dir
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir"
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null
trap {
if ($TMP_DOWNLOAD_DIR.Exists) {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
}
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null
# Download and Install Apache Maven
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
Write-Verbose "Downloading from: $distributionUrl"
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
$webclient = New-Object System.Net.WebClient
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) {
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD)
}
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null
# If specified, validate the SHA-256 sum of the Maven distribution zip file
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum
if ($distributionSha256Sum) {
if ($USE_MVND) {
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties."
}
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) {
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property."
}
}
# unzip and move
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
try {
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
} catch {
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) {
Write-Error "fail to move MAVEN_HOME"
}
} finally {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"

138
alfred-ingester/pom.xml Normal file
View File

@@ -0,0 +1,138 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.2.4</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.navi</groupId>
<artifactId>alfred-ingester</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>alfred-ingester</name>
<description>Alfred-ingester in Java</description>
<properties>
<log4j2.version>2.19.0</log4j2.version>
<min.code.coverage.percentage>75%</min.code.coverage.percentage>
<nexus.host>https://nexus.cmd.navi-tech.in</nexus.host>
<spring-cloud.version>2023.0.0</spring-cloud.version>
<java.version>21</java.version>
<kotlin.version>1.9.24</kotlin.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-kotlin</artifactId>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-reflect</artifactId>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-test-junit5</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.bucket4j</groupId>
<artifactId>bucket4j-core</artifactId>
<version>8.10.1</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>co.elastic.clients</groupId>
<artifactId>elasticsearch-java</artifactId>
<version>8.11.1</version>
</dependency>
<dependency>
<groupId>com.navi.sa.clients</groupId>
<artifactId>cache</artifactId>
<version>0.3-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
<sourceDirectory>${project.basedir}/src/main/kotlin</sourceDirectory>
<testSourceDirectory>${project.basedir}/src/test/kotlin</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-maven-plugin</artifactId>
<configuration>
<args>
<arg>-Xjsr305=strict</arg>
</args>
<compilerPlugins>
<plugin>spring</plugin>
</compilerPlugins>
</configuration>
<dependencies>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-maven-allopen</artifactId>
<version>${kotlin.version}</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>nexus</id>
<name>Snapshot</name>
<url>${nexus.host}/repository/maven-snapshots</url>
</repository>
<repository>
<id>Release</id>
<url>${nexus.host}/repository/maven-releases</url>
</repository>
<repository>
<id>MavenCentral</id>
<url>https://repo.maven.apache.org/maven2/</url>
</repository>
</repositories>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>${spring-cloud.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@@ -0,0 +1,11 @@
package com.navi.ingester
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
@SpringBootApplication
class AlfredIngesterApplication
fun main(args: Array<String>) {
runApplication<AlfredIngesterApplication>(*args)
}

View File

@@ -0,0 +1,8 @@
package com.navi.ingester.common
const val APP_VERSION_NAME = "appVersionName"
const val OS_VERSION = "osVersion"
const val DEVICE_ID = "deviceId"
const val APP_VERSION_CODE = "appVersionCode"
const val APP_OS = "appOs"
const val ANDROID_OS = "Android"

View File

@@ -0,0 +1,21 @@
package com.navi.ingester.controller
import com.navi.ingester.common.*
import org.springframework.web.bind.annotation.GetMapping
import org.springframework.web.bind.annotation.RequestHeader
import org.springframework.web.bind.annotation.RestController
@RestController
class CruiseController {
@GetMapping("/cruise")
fun getCruiseControlConfig(
@RequestHeader(APP_VERSION_NAME) appVersionName: String,
@RequestHeader(OS_VERSION) osVersion: String,
@RequestHeader(DEVICE_ID) deviceId: String,
@RequestHeader(APP_VERSION_CODE) appVersionCode: String?,
@RequestHeader(APP_OS, defaultValue = ANDROID_OS) appOs: String
) {
}
}

View File

@@ -0,0 +1,13 @@
package com.navi.ingester.controller
import org.springframework.http.ResponseEntity
import org.springframework.web.bind.annotation.GetMapping
import org.springframework.web.bind.annotation.RestController
@RestController
class PingController {
@GetMapping("/ping")
fun ping(): ResponseEntity<Map<String, Any>> {
return ResponseEntity.ok(mapOf("success" to true))
}
}

View File

@@ -0,0 +1,29 @@
package com.navi.ingester.dtos
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.PropertyNamingStrategies
import com.fasterxml.jackson.databind.annotation.JsonNaming
import com.navi.ingester.enums.EventType
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class AppEvent(
val baseAttributes: BaseAttributes,
val events: List<EventAttributes>
)
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class EventAttributes(
val eventId: String? = null,
val parentSessionId: String? = null,
val sessionId: String? = null,
val screenName: String? = null,
val screenshotTime: Long? = null,
val moduleName: String? = null,
val eventName: String? = null,
val eventTimestamp: Long? = null,
val attributes: Map<String, Any>? = null,
val eventType: EventType? = null,
val zipName: String? = null
)

View File

@@ -0,0 +1,36 @@
package com.navi.ingester.dtos
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.PropertyNamingStrategies
import com.fasterxml.jackson.databind.annotation.JsonNaming
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class BaseAttributes(
val appVersionCode: String? = null,
val appVersionName: String? = null,
val clientTs: Long? = null,
val deviceId: String? = null,
val deviceModel: String? = null,
val deviceManufacturer: String? = null,
val screenResolution: String? = null,
val appOS: String? = null,
val osVersion: String? = null,
val latitude: Float? = null,
val longitude: Float? = null,
val networkType: String? = null,
val agentId: String? = null,
val upTime: Long? = null,
val carrierName: String? = null,
val metadata: Map<String, String>? = null,
val sessionTimeStamp: Long? = null,
val eventTimestamp: Long? = null,
val sessionId: String? = null,
val parentSessionId: String? = null,
val traceId: String? = null,
val eventEndTimeStamp: Long? = null,
val phoneNumber: String? = null,
val hasErrors: Boolean? = null,
val snapshotPerSecond: Long? = null,
val imageType: String? = null
)

View File

@@ -0,0 +1,14 @@
package com.navi.ingester.dtos
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.PropertyNamingStrategies
import com.fasterxml.jackson.databind.annotation.JsonNaming
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class DeviceAndNetworkAttributes(
val deviceId: String,
val networkType: String,
val networkStrength: Double,
val deviceAttributes: DeviceAttributes
)

View File

@@ -0,0 +1,14 @@
package com.navi.ingester.dtos
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.PropertyNamingStrategies
import com.fasterxml.jackson.databind.annotation.JsonNaming
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class DeviceAttributes(
val battery: Double,
val cpu: Double? = null,
val storage: Double? = null,
val memory: Double? = null
)

View File

@@ -0,0 +1,20 @@
package com.navi.ingester.dtos
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.PropertyNamingStrategies
import com.fasterxml.jackson.databind.annotation.JsonNaming
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class SessionUploadRequest(
val baseAttributes: BaseAttributes,
val sessionUploadEventAttributes: SessionUploadEventAttributes
)
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy::class)
data class SessionUploadEventAttributes(
val beginningDeviceAttributes: DeviceAttributes,
val endDeviceAttributes: DeviceAttributes,
val eventId: String
)

View File

@@ -0,0 +1,15 @@
package com.navi.ingester.enums
enum class EventType {
TOUCH_EVENT,
SCROLL_EVENT,
INFO_LOG,
WARN_LOG,
ERROR_LOG,
SESSION_UPLOAD_EVENT,
CRASH_ANALYTICS_EVENT,
ANR_EVENT,
START_RECORDING_EVENT,
STOP_RECORDING_EVENT,
SCREEN_TRANSITION_EVENT
}

View File

@@ -0,0 +1,5 @@
package com.navi.ingester.handler
interface AppHandler {
}

View File

@@ -0,0 +1,15 @@
package com.navi.ingester.interceptor
import org.springframework.context.annotation.Configuration
import org.springframework.web.servlet.config.annotation.InterceptorRegistry
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer
@Configuration
class InterceptorConfig(
val rateLimitInterceptor: RateLimitInterceptor
): WebMvcConfigurer {
override fun addInterceptors(registry: InterceptorRegistry) {
registry.addInterceptor(rateLimitInterceptor)
}
}

View File

@@ -0,0 +1,74 @@
package com.navi.ingester.interceptor
import io.github.bucket4j.Bandwidth
import io.github.bucket4j.Bucket
import jakarta.servlet.http.HttpServletRequest
import jakarta.servlet.http.HttpServletResponse
import org.springframework.beans.factory.annotation.Value
import org.springframework.http.HttpStatus
import org.springframework.stereotype.Component
import org.springframework.web.servlet.HandlerInterceptor
import java.time.Duration
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.TimeUnit
@Component
class RateLimitInterceptor(
private val buckets: ConcurrentHashMap<String, Bucket> = ConcurrentHashMap<String, Bucket>(),
@Value("\${rate.limit.capacity}") val rateLimitCapacity: Long,
@Value("\${rate.limit.tokens}") val rateLimitTokens: Long,
@Value("\${rate.limit.refill.time.in.secs}") val rateLimitRefillTimeInSecs: Long,
private val tokensToConsumeByDefault: Int = 1
) : HandlerInterceptor {
override fun preHandle(
request: HttpServletRequest,
response: HttpServletResponse,
handler: Any
): Boolean {
val clientIP = getClientIP(request)
val bucketByClientIP: Bucket?
if (buckets.containsKey(clientIP)) {
bucketByClientIP = buckets[clientIP]
} else {
bucketByClientIP = getDefaultBucket()
buckets[clientIP] = bucketByClientIP
}
val probe = bucketByClientIP!!.tryConsumeAndReturnRemaining(tokensToConsumeByDefault.toLong())
if (probe.isConsumed) {
response.addHeader(
"X-Rate-Limit-Remaining",
probe.remainingTokens.toString()
)
return true
}
response.status = HttpStatus.TOO_MANY_REQUESTS.value() // 429
response.addHeader(
"X-Rate-Limit-Retry-After-Milliseconds",
TimeUnit.NANOSECONDS.toMillis(probe.nanosToWaitForRefill).toString()
)
return false
}
private fun getClientIP(request: HttpServletRequest): String {
var ip = request.getHeader("X-FORWARDED-FOR")
if (ip.isNullOrEmpty()) {
ip = request.remoteAddr
}
return ip
}
private fun getDefaultBucket(): Bucket {
return Bucket.builder()
.addLimit(Bandwidth.builder()
.capacity(rateLimitCapacity)
.refillIntervally(rateLimitTokens, Duration.ofSeconds(rateLimitRefillTimeInSecs))
.build())
.build()
}
}

View File

@@ -0,0 +1,12 @@
#
# Copyright @ 2022 by Navi Technologies Private Limited
# All rights reserved. Strictly confidential.
#
# Application
spring.config.activate.on-profile=docker
spring.jackson.mapper.default_view_inclusion=true
# Rate Limiting
rate.limit.capacity=${RATE_LIMIT_CAPACITY}
rate.limit.tokens=${RATE_LIMIT_TOKENS}
rate.limit.refill.time.in.secs=${RATE_LIMIT_REFILL_TIME_IN_SECS}

View File

@@ -0,0 +1,12 @@
#
# Copyright @ 2022 by Navi Technologies Private Limited
# All rights reserved. Strictly confidential.
#
# Application
spring.config.activate.on-profile=local
spring.jackson.mapper.default_view_inclusion=true
# Rate Limiting
rate.limit.capacity=2
rate.limit.tokens=2
rate.limit.refill.time.in.secs=60

View File

@@ -0,0 +1,10 @@
spring.application.name=alfred-ingester
server.port=${PORT:8084}
spring.threads.virtual.enabled=true
# Metrics related configuration
management.endpoint.metrics.enabled=true
management.endpoints.web.exposure.include=*
management.endpoint.prometheus.enabled=true
management.prometheus.metrics.export.enabled=true
management.server.port=4001

View File

@@ -0,0 +1,13 @@
package com.navi.ingester
import org.junit.jupiter.api.Test
import org.springframework.boot.test.context.SpringBootTest
@SpringBootTest
class AlfredIngesterApplicationTests {
@Test
fun contextLoads() {
}
}

Submodule alfred-web-session-recorder deleted from 55057919b4

View File

@@ -0,0 +1 @@
* @apoorva-gupta_navi @lokesh-dugar_navi @lalit-garghate_navi

View File

@@ -0,0 +1,39 @@
name: Publish Package
on:
workflow_dispatch:
jobs:
publish_package:
runs-on: [default]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://nexus.cmd.navi-tech.in/repository/navi-commons'
cache: 'npm'
env:
NODE_AUTH_TOKEN: ${{ secrets.NAVI_COMMONS_NPM_AUTH_TOKEN }}
- name: Setup tsc
run: npm install -g typescript
env:
NODE_AUTH_TOKEN: ${{ secrets.NAVI_COMMONS_NPM_AUTH_TOKEN }}
- name: Setup yarn
run: npm install -g yarn
env:
NODE_AUTH_TOKEN: ${{ secrets.NAVI_COMMONS_NPM_AUTH_TOKEN }}
- name: yarn install
run: yarn install
env:
NODE_AUTH_TOKEN: ${{ secrets.NAVI_COMMONS_NPM_AUTH_TOKEN }}
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://nexus.cmd.navi-tech.in/repository/npm-packages/'
env:
NODE_AUTH_TOKEN: ${{ secrets.NAVI_COMMONS_NPM_AUTH_TOKEN }}
- name: Publishing Package
run: |
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NAVI_COMMONS_NPM_AUTH_TOKEN }}

View File

@@ -0,0 +1,45 @@
name: Semgrep
on:
# Scan changed files in PRs, block on new issues only (existing issues ignored)
pull_request:
branches:
- master
- main
- develop
- portal
# Schedule this job to run at a certain time, using cron syntax
# Note that * is a special character in YAML so you have to quote this string
schedule:
- cron: '00 03 * * 0' # scheduled for 8.30 AM on every sunday
jobs:
central-semgrep:
name: Static code Analysis
uses: navi-infosec/central-semgrep-action/.github/workflows/central-semgrep.yml@using-token
with:
github-event-number: ${{github.event.number}}
github-event-name: ${{github.event_name}}
github-repository: ${{github.repository}}
github-pr_owner_name: ${{github.event.pull_request.user.login}}
secrets:
READ_SEMGREP_RULES_TOKEN: ${{secrets.READ_SEMGREP_RULES_TOKEN}}
EMAIL_FETCH_TOKEN: ${{secrets.EMAIL_FETCH_TOKEN}}
run-if-failed:
runs-on: [ self-hosted, Linux ]
needs: [central-semgrep]
if: always() && (needs.semgrep.result == 'failure')
steps:
- name: Create comment
if: ${{ ( github.event.number != '' ) }}
uses: navi-synced-actions/create-or-update-comment@v2
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
**Vulnerabilities have been discovered in this PR. Please check the vulnerability Analysis section of Semgrep Workflow to understand the security vulnerability. Feel free to reach out to #sast-help for more information **
- name: Assign Reviewers
if: ${{ ( github.event.number != '' ) }}
uses: navi-infosec/security-oncall-action@v1.1

109
alfred-web-session-recorder/.gitignore vendored Normal file
View File

@@ -0,0 +1,109 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
.storage/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
.idea
.DS_Store

View File

@@ -0,0 +1,2 @@
@navi:registry=https://nexus.cmd.navi-tech.in/repository/npm-packages/
//https://nexus.cmd.navi-tech.in/repository/npm-packages/:__authToken=NpmToken.1a3d3462-fb82-364c-bc64-0051e24635b3

View File

@@ -0,0 +1 @@
dist

View File

@@ -0,0 +1,8 @@
{
"singleQuote": false,
"jsxSingleQuote": false,
"tabWidth": 2,
"printWidth": 100,
"trailingComma": "all",
"semi": true
}

View File

@@ -0,0 +1 @@
* @lokesh-dugar_navi

View File

@@ -0,0 +1,45 @@
<h1 align="center">Welcome to alfred-session-recorder 👋</h1>
> A package that records web sessions, events, and user agents and sends the serialized encrypted data to a specified URL.
## 🚀 Usage
Make sure you have node version > 16
Add tokens in .npmrc
```sh
//https://nexus.cmd.navi-tech.in/repository/npm-packages/:__authToken=NpmToken.1a3d3462-fb82-364c-bc64-0051e24635b3
```
Add alfred-session-recorder
```sh
yarn add @navi/alfred-session-recorder
```
Use below code snippet below to start sending recorded data to the backend.
```sh
const recorder = new SnapshotRecorder({
apiUrl: 'https://qa-alfred-ingester.np.navi-sa.in', // This will change for prod
projectName: 'your-project-id', // Put Project name
deviceId?: 'custom-device-id'
clientkey : <CONTACT_ALFRED_TEAM>
});
const cleanup = recorder.startSnapshotRecording();
```
```
recorder.stopRecording(). // Use it to stop recording make sure to use cleanup accordingly
```
Note: apiUrl for Prod will be different, please reach out to the team for the prod link
## Watch Your Videos here
```
https://qa-alfred-ui.np.navi-sa.in/
```
---

View File

@@ -0,0 +1 @@
declare module 'rrweb';

View File

@@ -0,0 +1,125 @@
{
"name": "alfred-web-recording",
"version": "2.0.0-beta",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@rrweb/types": {
"version": "2.0.0-alpha.8",
"resolved": "https://registry.npmjs.org/@rrweb/types/-/types-2.0.0-alpha.8.tgz",
"integrity": "sha512-yAr6ZQrgmr7+qZU5DMGqYXnVsolC5epftmZtkOtgFD/bbvCWflNnl09M32hUjttlKCV1ohhmQGioXkCQ37IF7A==",
"requires": {
"rrweb-snapshot": "^2.0.0-alpha.8"
},
"dependencies": {
"rrweb-snapshot": {
"version": "2.0.0-alpha.8",
"resolved": "https://registry.npmjs.org/rrweb-snapshot/-/rrweb-snapshot-2.0.0-alpha.8.tgz",
"integrity": "sha512-3Rb7c+mnDEADQ8N9qn9SDH5PzCyHlZ1cwZC932qRyt9O8kJWLM11JLYqqEyQCa2FZVQbzH2iAaCgnyM7A32p7A=="
}
}
},
"@tsconfig/svelte": {
"version": "1.0.13",
"resolved": "https://registry.npmjs.org/@tsconfig/svelte/-/svelte-1.0.13.tgz",
"integrity": "sha512-5lYJP45Xllo4yE/RUBccBT32eBlRDbqN8r1/MIvQbKxW3aFqaYPCNgm8D5V20X4ShHcwvYWNlKg3liDh1MlBoA=="
},
"@types/css-font-loading-module": {
"version": "0.0.7",
"resolved": "https://registry.npmjs.org/@types/css-font-loading-module/-/css-font-loading-module-0.0.7.tgz",
"integrity": "sha512-nl09VhutdjINdWyXxHWN/w9zlNCfr60JUqJbd24YXUuCwgeL0TpFSdElCwb6cxfB6ybE19Gjj4g0jsgkXxKv1Q=="
},
"@types/prop-types": {
"version": "15.7.5",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
"integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==",
"dev": true
},
"@types/react": {
"version": "17.0.58",
"resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.58.tgz",
"integrity": "sha512-c1GzVY97P0fGxwGxhYq989j4XwlcHQoto6wQISOC2v6wm3h0PORRWJFHlkRjfGsiG3y1609WdQ+J+tKxvrEd6A==",
"dev": true,
"requires": {
"@types/prop-types": "*",
"@types/scheduler": "*",
"csstype": "^3.0.2"
}
},
"@types/scheduler": {
"version": "0.16.3",
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz",
"integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==",
"dev": true
},
"@xstate/fsm": {
"version": "1.6.5",
"resolved": "https://registry.npmjs.org/@xstate/fsm/-/fsm-1.6.5.tgz",
"integrity": "sha512-b5o1I6aLNeYlU/3CPlj/Z91ybk1gUsKT+5NAJI+2W4UjvS5KLG28K9v5UvNoFVjHV8PajVZ00RH3vnjyQO7ZAw=="
},
"base64-arraybuffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
"integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ=="
},
"csstype": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz",
"integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==",
"dev": true
},
"fflate": {
"version": "0.4.8",
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.4.8.tgz",
"integrity": "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="
},
"mitt": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.0.tgz",
"integrity": "sha512-7dX2/10ITVyqh4aOSVI9gdape+t9l2/8QxHrFmUXu4EEUpdlxl6RudZUPZoc+zuY2hk1j7XxVroIVIan/pD/SQ=="
},
"rrdom": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/rrdom/-/rrdom-0.1.7.tgz",
"integrity": "sha512-ZLd8f14z9pUy2Hk9y636cNv5Y2BMnNEY99wxzW9tD2BLDfe1xFxtLjB4q/xCBYo6HRe0wofzKzjm4JojmpBfFw==",
"requires": {
"rrweb-snapshot": "^2.0.0-alpha.4"
}
},
"rrweb": {
"version": "2.0.0-alpha.4",
"resolved": "https://registry.npmjs.org/rrweb/-/rrweb-2.0.0-alpha.4.tgz",
"integrity": "sha512-wEHUILbxDPcNwkM3m4qgPgXAiBJyqCbbOHyVoNEVBJzHszWEFYyTbrZqUdeb1EfmTRC2PsumCIkVcomJ/xcOzA==",
"requires": {
"@rrweb/types": "^2.0.0-alpha.4",
"@types/css-font-loading-module": "0.0.7",
"@xstate/fsm": "^1.4.0",
"base64-arraybuffer": "^1.0.1",
"fflate": "^0.4.4",
"mitt": "^3.0.0",
"rrdom": "^0.1.7",
"rrweb-snapshot": "^2.0.0-alpha.4"
}
},
"rrweb-player": {
"version": "1.0.0-alpha.4",
"resolved": "https://registry.npmjs.org/rrweb-player/-/rrweb-player-1.0.0-alpha.4.tgz",
"integrity": "sha512-Wlmn9GZ5Fdqa37vd3TzsYdLl/JWEvXNUrLCrYpnOwEgmY409HwVIvvA5aIo7k582LoKgdRCsB87N+f0oWAR0Kg==",
"requires": {
"@tsconfig/svelte": "^1.0.0",
"rrweb": "^2.0.0-alpha.4"
}
},
"rrweb-snapshot": {
"version": "2.0.0-alpha.4",
"resolved": "https://registry.npmjs.org/rrweb-snapshot/-/rrweb-snapshot-2.0.0-alpha.4.tgz",
"integrity": "sha512-KQ2OtPpXO5jLYqg1OnXS/Hf+EzqnZyP5A+XPqBCjYpj3XIje/Od4gdUwjbFo3cVuWq5Cw5Y1d3/xwgIS7/XpQQ=="
},
"typescript": {
"version": "4.9.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
"integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
"dev": true
}
}
}

View File

@@ -0,0 +1,41 @@
{
"name": "@navi/alfred-session-recorder",
"version": "3.0.2-test",
"description": "session-recorder",
"main": "dist/src/index.js",
"type": "commonjs",
"scripts": {
"watch": "tsc --w -noEmit",
"watch-build": "tsc --w",
"build": "tsc",
"prepublishOnly": "npm run build",
"verdaccio": "verdaccio --config ./verdaccio.yml"
},
"files": [
"dist/"
],
"types": "dist/src/index.d.ts",
"license": "MIT",
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
},
"devDependencies": {
"@types/pako": "^2.0.0",
"@types/react": "^17.0.18",
"@types/uuid": "^9.0.1",
"prettier": "^3.2.4",
"typescript": "^4.8.3",
"verdaccio": "^5.26.3"
},
"dependencies": {
"@types/ua-parser-js": "^0.7.39",
"axios": "^1.7.9",
"dayjs": "^1.11.13",
"pako": "^2.1.0",
"rrweb": "^2.0.0-alpha.4",
"uuid": "^9.0.0"
},
"keywords": [
"alfred-session-recorder"
]
}

View File

@@ -0,0 +1,19 @@
export const DEFAULT_INGEST_EVENT_INTERVAL = 2500;
export const MAX_VIDEO_LENGTH = 60 * 1000 * 5; // 5 minutes
export const RR_SAMPLING_CONFIG = {
mousemove: true,
mouseInteraction: true,
scroll: 150,
media: 800,
input: "last",
};
export const CS_EVENTS = {
INGEST_FAILURE: "INGEST_FAILURE",
GLOBAL_AXIOS_RESP_ERROR: "GLOBAL_AXIOS_RESP_ERROR",
GLOBAL_AXIOS_REQ_ERROR: "GLOBAL_AXIOS_REQ_ERROR",
S3_UPLOAD_FAILURE: "S3_UPLOAD_FAILURE",
PRE_SIGN_FETCH_FAILURE: "PRE_SIGN_FETCH_FAILURE",
};

View File

@@ -0,0 +1,201 @@
import { record, pack } from "rrweb";
import { v4 as uuidv4 } from "uuid";
import { AxiosError } from "axios";
import {
CS_EVENTS,
DEFAULT_INGEST_EVENT_INTERVAL,
MAX_VIDEO_LENGTH,
RR_SAMPLING_CONFIG,
} from "./constants";
import {
getAlfredPreSignUrl,
getBrowserInfo,
getErrorObjForClickStream,
getEventIngestApiUrl,
getPackageVersion,
} from "./utils";
import canUseDOM from "./utils/canUseDom";
import { postClickStreamEvent } from "./utils/analytics";
import ApiHelper from "./utils/apiHelper";
import { OptionType, EventsDTO, LogAnalyticsEvent, EventPayloadType } from "./types";
import { DateHelper } from "./utils/dateHelper";
const browserInfo = getBrowserInfo();
class SnapshotRecorder {
private events: Record<string, Array<any>>;
private sessionId: string;
private options: any;
private stopFn: any;
private sendEventsTimer: any;
private sessionTimer: any;
private isRecording: boolean;
private startTimeMap: Record<string, number>;
constructor(options: OptionType) {
this.events = {};
this.startTimeMap = {};
this.sessionId = "";
this.sendEventsTimer = null;
this.options = options;
this.isRecording = false;
this.sessionTimer = null;
if (canUseDOM) window.analyticsData = options.analyticsData;
window.alfredClientKey = options.clientkey;
}
private startNewSession = () => {
this.sessionId = uuidv4();
this.startTimeMap[this.sessionId] = DateHelper.now();
if (canUseDOM) {
window.alfredSessionId = this.sessionId;
this.options?.onSessionIdChange?.(this.sessionId);
}
this.events[this.sessionId] = [];
};
private logAnalytics = (event: LogAnalyticsEvent): void => {
const uri = this.options?.analyticsData?.uri;
const source = this.options?.analyticsData?.source;
const { event_name, attributes } = event;
if (uri && source)
postClickStreamEvent({
uri,
source,
event_name,
attributes: { session_id: this.sessionId, ...(attributes || {}) },
});
};
private getEventBaseAttributes = () => {
return {
...browserInfo,
session_id: this.sessionId,
client_timestamp: DateHelper.now(),
project_name: this.options?.projectName,
device_id: this.options?.deviceId,
metadata: {
...this.options?.metaData,
page_url: window?.location?.origin || "-",
package_version: getPackageVersion(),
...(window?.alfredSessionMetaData || {}),
},
user_email: this.options?.userEmail,
version: 2,
};
};
private getPresidedUrl = async () => {
return ApiHelper.GET(getAlfredPreSignUrl(this.options?.apiUrl, this.sessionId));
};
private getEventPayload = (): EventPayloadType | undefined => {
const domData = [...(this.events?.[this.sessionId] || [])];
this.events[this.sessionId] = [];
const eventData: EventsDTO = {
base_attributes: { ...this.getEventBaseAttributes() },
session_attribute: {
event_id: uuidv4(),
start_timestamp: this.startTimeMap?.[this.sessionId] || DateHelper.now(),
end_timestamp: DateHelper.now(),
},
};
return {
eventData,
rrwebData: { event_timestamp: DateHelper.now(), dom_events_data: domData },
};
};
// Ingest event data
private ingestEvents = (eventData: EventsDTO): void => {
const WEB_SESSION_URL = getEventIngestApiUrl(this.options?.apiUrl);
navigator?.sendBeacon?.(WEB_SESSION_URL, JSON.stringify(eventData));
};
private uploadSessionDataToS3 = () => {
const { eventData, rrwebData } = this.getEventPayload() || {};
const { dom_events_data } = rrwebData || {};
if (!dom_events_data?.length) return;
this.getPresidedUrl()
.then((resp) => {
const preSignUrl = resp?.data?.data;
ApiHelper.PUT(preSignUrl, rrwebData)
.then((response) => {
const isErr = response.status !== 200;
if (isErr) throw new Error(`Error uploading to S3: ${response.status}`);
})
.catch((error: AxiosError) => {
this.logAnalytics({
event_name: CS_EVENTS.S3_UPLOAD_FAILURE,
attributes: { ...getErrorObjForClickStream(error), session_id: this.sessionId },
});
});
})
.catch((err: AxiosError) => {
this.logAnalytics({
event_name: CS_EVENTS.PRE_SIGN_FETCH_FAILURE,
attributes: { message: err, session_id: this.sessionId },
});
});
if (eventData) this.ingestEvents(eventData);
};
private resetSessionTimer = () => {
this.sessionTimer = setInterval(() => {
this.stopRecording();
this.startRecording();
}, MAX_VIDEO_LENGTH);
};
private startRecording = () => {
if (this.isRecording) return;
this.startNewSession();
this.isRecording = true;
this.stopFn = null;
this.sendEventsTimer = setInterval(() => {
this.uploadSessionDataToS3();
}, this?.options?.ingestEventInterval || DEFAULT_INGEST_EVENT_INTERVAL);
this.stopFn = record({
emit: (event: any) => {
this.events[this.sessionId].push(event);
},
sampling: { ...RR_SAMPLING_CONFIG },
packFn: pack,
...this.options?.sessionConfig,
});
this.resetSessionTimer();
};
public stopRecording = () => {
this.uploadSessionDataToS3();
this.stopFn();
// Clean up
if (this.sendEventsTimer) clearInterval(this.sendEventsTimer);
if (this.sessionTimer) clearInterval(this.sessionTimer);
this.sendEventsTimer = null;
this.sessionTimer = null;
this.stopFn = null;
this.isRecording = false;
};
public startSnapshotRecording = () => {
this.startRecording();
// Cleanup function
return () => {};
};
}
export default SnapshotRecorder;

View File

@@ -0,0 +1,69 @@
import { DeviceDetailsType } from "../utils";
export interface BrowserMetaInfo extends DeviceDetailsType {
browser: string;
screen_resolution: string;
}
export interface OptionType {
apiUrl?: string;
projectName: string;
deviceId?: string;
userEmail?: string;
metaData?: any;
sessionConfig?: any;
ingestEventInterval?: number;
clientkey: string;
onSessionIdChange?: (sessionId: string) => void;
analyticsData?: {
uri: string;
source: string;
};
}
export interface BaseAttributesDTO extends BrowserMetaInfo {
session_id: string;
client_timestamp: number;
device_id?: string;
project_name: string;
user_email: string | undefined;
metadata: any;
version: number;
}
export interface EventsDTO {
base_attributes: BaseAttributesDTO;
session_attribute: {
event_id: string;
start_timestamp: number;
end_timestamp: number;
};
}
export interface EventPayloadType {
eventData: EventsDTO;
rrwebData: { dom_events_data: any[]; event_timestamp: number };
}
export interface S3EventDTO {
event_timestamp: string;
dom_events_data: Array<any>;
}
declare global {
interface Window {
alfredSessionId: string;
alfredSessionMetaData: Record<string, unknown>;
alfredClientKey: string;
analyticsData?: {
uri: string;
source: string;
};
onSessionIdChange?: (sessionId: string) => void;
}
}
export interface LogAnalyticsEvent {
event_name: string;
attributes?: Record<string, unknown>;
}

View File

@@ -0,0 +1,31 @@
import { getBrowserInfo, getPackageVersion } from "./index";
import { DateHelper } from "./dateHelper";
interface PostClickStreamEvent {
uri: string;
source: string;
event_name: string;
attributes: Record<string, unknown>;
}
export const postClickStreamEvent = (payload: PostClickStreamEvent): void => {
const { uri, source, event_name, attributes } = payload;
const finalBody = {
source,
client_ts: DateHelper.now(),
events: [
{
event_name,
timestamp: DateHelper.now(),
attributes: { ...getBrowserInfo(), package_version: getPackageVersion(), ...attributes },
},
],
};
try {
const payloadBlob = new Blob([JSON.stringify(finalBody)], { type: "application/json" });
navigator?.sendBeacon?.(uri, payloadBlob);
} catch (err) {
console.error("Failed to send analytics events", err);
}
};

View File

@@ -0,0 +1,61 @@
// @ts-nocheck
import axios, { AxiosError, AxiosResponse } from "axios";
const axiosInstance = axios.create({});
// Set up interceptors if needed (optional)
axiosInstance.interceptors.request.use(
(config) => {
// @ts-ignore
config.retry = config?.retry < 5 ? config.retry : 4;
return config;
},
(error: AxiosError) => {
return Promise.reject(error);
},
);
axiosInstance.interceptors.response.use(
(response) => {
return response;
},
(error: AxiosError) => {
const { config } = error;
if (!config || config.retry <= 1) return Promise.reject(error);
config.retry -= 1;
const delayRetryRequest = new Promise<void>((resolve) => {
setTimeout(() => {
resolve();
}, 0);
});
return delayRetryRequest.then(() => axiosInstance(config));
},
);
// Create an ApiHelper object
const ApiHelper = {
POST: async <T = any>(url: string, data?: any): Promise<AxiosResponse<T>> => {
return axiosInstance.post<T>(url, data, {
headers: { "X-Api-Key": window?.alfredClientKey },
});
},
GET: async <T = any>(url: string): Promise<AxiosResponse<T>> => {
return axiosInstance.get<T>(url, {
headers: {
"X-Api-Key": window?.alfredClientKey,
"Origin": window?.location?.origin
},
});
},
PUT: async <T = any>(url: string, data?: any): Promise<AxiosResponse<T>> => {
return axiosInstance.put<T>(url, data, {
headers: { "X-Api-Key": window?.alfredClientKey },
});
},
};
export default ApiHelper;

View File

@@ -0,0 +1,7 @@
const canUseDOM = !!(
typeof window !== 'undefined' &&
window.document &&
window.document.createElement
);
export default canUseDOM;

View File

@@ -0,0 +1,5 @@
import dayjs from "dayjs";
export const DateHelper = {
now: () => dayjs().valueOf(), // Get current time in epoch (milliseconds)
};

View File

@@ -0,0 +1,117 @@
import { BrowserMetaInfo } from "../types";
import UAParser from "ua-parser-js";
import canUseDOM from "./canUseDom";
import { v4 as uuidv4 } from "uuid";
import { AxiosError } from "axios";
import packageJson from "../../package.json";
const parser = new UAParser();
const parserResults = parser.getResult();
export interface DeviceDetailsType {
os: string;
os_version: string;
manufacturer: string;
model: string;
}
const DEFAULT_BROWSER_INFO = {
manufacturer: "",
model: "",
os: "",
os_version: "",
browser: "unknow",
screen_resolution: "unknow",
};
const JSON = ".json";
export type BrowserReturnType =
| "Chrome"
| "Mobile Safari"
| "Firefox"
| "Samsung Internet"
| "Brave"
| "Unknown";
export const getEventIngestApiUrl = (uri: string): string => {
if (!uri) return uri;
const url = new URL(uri);
if (url.pathname && url.pathname.includes("ingest/web/sessions")) return uri;
return `${uri}/v2/ingest/web/sessions`;
};
export const getAlfredPreSignUrl = (uri: string, sessionId: string): string => {
const fileName = uuidv4();
return `${uri}/ingest/web/session/pre-sign/${fileName}${JSON}?fileTypeExtension=${JSON}&directoryName=${sessionId}`;
};
export const getErrorObjForClickStream = (error: AxiosError) => {
return {
message: error.message,
status: error?.response?.status,
statusText: error?.response?.statusText,
url: error?.config?.url,
code: error.code,
};
};
export const getDeviceDetails = (): DeviceDetailsType => {
return {
os: parserResults.os.name || "unknown",
os_version: parserResults.os.version || "unknown",
manufacturer: parserResults.device.vendor || "unknown",
model: parserResults.device.model || "unknown",
};
};
export const getBrowser = (): BrowserReturnType => {
const userAgent = navigator.userAgent;
let browser = "unknown" as BrowserReturnType;
if (/chrome/i.test(userAgent)) {
browser = "chrome" as BrowserReturnType;
} else if (/safari/i.test(userAgent)) {
browser = "safari" as BrowserReturnType;
} else if (/firefox/i.test(userAgent)) {
browser = "firefox" as BrowserReturnType;
}
return browser;
};
export const getBrowserName = (): string => {
if (!canUseDOM) {
return "unknown";
}
let browser: string = getBrowser();
// navigator.brave is only added in brave browser
// eslint-disable-next-line
// @ts-ignore
if (navigator?.brave) {
browser = "brave";
}
return browser;
};
export const getBrowserInfo = (): BrowserMetaInfo => {
try {
const screenWidth = typeof window !== "undefined" ? window.screen.width : 0;
const screenHeight = typeof window !== "undefined" ? window.screen.height : 0;
const screenResolution = `${screenWidth}x${screenHeight}`;
return {
browser: getBrowserName(),
screen_resolution: screenResolution,
...getDeviceDetails(),
};
} catch (err) {
return DEFAULT_BROWSER_INFO;
}
};
export const getPackageVersion = () => {
return packageJson.version;
};

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"resolveJsonModule": true,
"target": "ES2015" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
"declaration": true /* Generates corresponding '.d.ts' file. */,
"sourceMap": true /* Generates corresponding '.map' file. */,
"outDir": "./dist" /* Redirect output structure to the directory. */,
"rootDir": "." /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
"strict": true /* Enable all strict type-checking options. */,
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
"inlineSources": true /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */,
"skipLibCheck": true /* Skip type checking of declaration files. */,
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */,
},
}

View File

@@ -0,0 +1,5 @@
storage: ./.storage
packages:
'**':
access: $anonymous
publish: $anonymous

File diff suppressed because it is too large Load Diff

45
alfred/.github/workflows/semgrep.yml vendored Normal file
View File

@@ -0,0 +1,45 @@
name: Semgrep
on:
# Scan changed files in PRs, block on new issues only (existing issues ignored)
pull_request:
branches:
- master
- main
- develop
- portal
# Schedule this job to run at a certain time, using cron syntax
# Note that * is a special character in YAML so you have to quote this string
schedule:
- cron: '00 03 * * 0' # scheduled for 8.30 AM on every sunday
jobs:
central-semgrep:
name: Static code Analysis
uses: navi-infosec/central-semgrep-action/.github/workflows/central-semgrep.yml@using-token
with:
github-event-number: ${{github.event.number}}
github-event-name: ${{github.event_name}}
github-repository: ${{github.repository}}
github-pr_owner_name: ${{github.event.pull_request.user.login}}
secrets:
READ_SEMGREP_RULES_TOKEN: ${{secrets.READ_SEMGREP_RULES_TOKEN}}
EMAIL_FETCH_TOKEN: ${{secrets.EMAIL_FETCH_TOKEN}}
run-if-failed:
runs-on: [ self-hosted, Linux ]
needs: [central-semgrep]
if: always() && (needs.semgrep.result == 'failure')
steps:
- name: Create comment
if: ${{ ( github.event.number != '' ) }}
uses: navi-synced-actions/create-or-update-comment@v2
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
**Vulnerabilities have been discovered in this PR. Please check the vulnerability Analysis section of Semgrep Workflow to understand the security vulnerability. Feel free to reach out to #sast-help for more information **
- name: Assign Reviewers
if: ${{ ( github.event.number != '' ) }}
uses: navi-infosec/security-oncall-action@v1.1

26
alfred/.gitignore vendored Normal file
View File

@@ -0,0 +1,26 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
/.idea
.idea
/out
go.sum
*.env
*local-*.properties*
.DS_Store

0
alfred/Dockerfile Normal file
View File

View File

@@ -0,0 +1,27 @@
ARG GOLANG_TAG=193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/golang:1.23.8
# To run locally, use
#ARG GOLANG_TAG=registry.cmd.navi-tech.in/common/golang:1.22.12
FROM ${GOLANG_TAG} as builder
RUN mkdir -p /build/collector
RUN mkdir -p /root/.ssh && echo "Host *\n StrictHostKeyChecking no" > ~/.ssh/config && chmod 0400 /root/.ssh
ARG SSH_KEY
RUN echo "${SSH_KEY}" > /root/.ssh/id_rsa \
&& chmod 0600 /root/.ssh/id_rsa \
&& git config --global url."git@github.com:".insteadOf "https://github.com/" \
&& go env -w GOPRIVATE='github.com/navi-*'
WORKDIR /build/collector/
COPY . /build/collector/
RUN /bin/bash -c "make build-collector"
FROM ${GOLANG_TAG}
RUN mkdir -p /usr/local
WORKDIR /usr/local
RUN mkdir -p alfredTmp
COPY --from=0 /build/collector/alfred-collector /usr/local/
COPY --from=0 /build/collector/config/application-collector.properties /usr/local/config/
COPY --from=0 /build/collector/config/elasticapm-collector.properties /usr/local/config/
RUN adduser --system --uid 4000 --disabled-password app-user && chown -R 4000:4000 /usr/local && chmod -R g+w /usr/local/
USER 4000
CMD /bin/bash -c "./alfred-collector"

30
alfred/Dockerfile.core Normal file
View File

@@ -0,0 +1,30 @@
ARG GOLANG_TAG=193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/golang:1.23.8
# To run locally, use
# ARG GOLANG_TAG=registry.cmd.navi-tech.in/common/golang:1.22.12
FROM ${GOLANG_TAG} as builder
RUN mkdir -p /build/core
RUN mkdir -p /root/.ssh && echo "Host *\n StrictHostKeyChecking no" > ~/.ssh/config && chmod 0400 /root/.ssh
ARG SSH_KEY
RUN echo "${SSH_KEY}" > /root/.ssh/id_rsa \
&& chmod 0600 /root/.ssh/id_rsa \
&& git config --global url."git@github.com:".insteadOf "https://github.com/" \
&& go env -w GOPRIVATE='github.com/navi-*'
WORKDIR /build/core/
COPY . /build/core/
RUN /bin/bash -c "make build-core"
FROM ${GOLANG_TAG}
RUN mkdir -p /usr/local
RUN apt-get -y update
RUN apt-get install -y ffmpeg
RUN apt-get -y install webp
WORKDIR /usr/local
RUN mkdir -p alfredTmp
COPY --from=0 /build/core/alfred-core /usr/local/
COPY --from=0 /build/core/config/application-core.properties /usr/local/config/
COPY --from=0 /build/core/config/elasticapm-core.properties /usr/local/config/
RUN adduser --system --uid 4000 --disabled-password app-user && chown -R 4000:4000 /usr/local && chmod -R g+w /usr/local/
USER 4000
CMD /bin/bash -c "./alfred-core"

26
alfred/Dockerfile.ferret Normal file
View File

@@ -0,0 +1,26 @@
ARG GOLANG_TAG=193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/golang:1.23.8
# To run locally, use
#ARG GOLANG_TAG=registry.cmd.navi-tech.in/common/golang:1.22.12
FROM ${GOLANG_TAG} as builder
RUN mkdir -p /build/ferret
RUN mkdir -p /root/.ssh && echo "Host *\n StrictHostKeyChecking no" > ~/.ssh/config && chmod 0400 /root/.ssh
ARG SSH_KEY
RUN echo "${SSH_KEY}" > /root/.ssh/id_rsa \
&& chmod 0600 /root/.ssh/id_rsa \
&& git config --global url."git@github.com:".insteadOf "https://github.com/" \
&& go env -w GOPRIVATE='github.com/navi-*'
WORKDIR /build/ferret/
COPY . /build/ferret/
RUN /bin/bash -c "make build-ferret"
FROM ${GOLANG_TAG}
RUN mkdir -p /usr/local
WORKDIR /usr/local
COPY --from=0 /build/ferret/alfred-ferret /usr/local/
COPY --from=0 /build/ferret/config/application-ferret.properties /usr/local/config/
COPY --from=0 /build/ferret/config/elasticapm-ferret.properties /usr/local/config/
RUN adduser --system --uid 4000 --disabled-password app-user && chown -R 4000:4000 /usr/local && chmod -R g+w /usr/local/
USER 4000
CMD /bin/bash -c "./alfred-ferret"

View File

@@ -0,0 +1,26 @@
ARG GOLANG_TAG=193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/golang:1.23.8
# To run locally, use
# ARG GOLANG_TAG=registry.cmd.navi-tech.in/common/golang:1.22.12
FROM ${GOLANG_TAG} as builder
RUN mkdir -p /build/ingester
RUN mkdir -p /root/.ssh && echo "Host *\n StrictHostKeyChecking no" > ~/.ssh/config && chmod 0400 /root/.ssh
ARG SSH_KEY
RUN echo "${SSH_KEY}" > /root/.ssh/id_rsa \
&& chmod 0600 /root/.ssh/id_rsa \
&& git config --global url."git@github.com:".insteadOf "https://github.com/" \
&& go env -w GOPRIVATE='github.com/navi-*'
WORKDIR /build/ingester/
COPY . /build/ingester/
RUN /bin/bash -c "make build-ingester"
FROM ${GOLANG_TAG}
RUN mkdir -p /usr/local
WORKDIR /usr/local
COPY --from=0 /build/ingester/alfred-ingester /usr/local/
COPY --from=0 /build/ingester/config/application-ingester.properties /usr/local/config/
COPY --from=0 /build/ingester/config/elasticapm-ingester.properties /usr/local/config/
RUN adduser --system --uid 4000 --disabled-password app-user && chown -R 4000:4000 /usr/local && chmod -R g+w /usr/local/
USER 4000
CMD /bin/bash -c "./alfred-ingester"

47
alfred/Makefile Normal file
View File

@@ -0,0 +1,47 @@
.PHONY: build-ingester
build-ingester:
go mod tidy && CGO_ENABLED=0 go build -ldflags="-s -w" -o alfred-ingester cmd/ingester/main.go
.PHONY: build-collector
build-collector:
go mod tidy && CGO_ENABLED=0 go build -ldflags="-s -w" -o alfred-collector cmd/collector/main.go
.PHONY: build-core
build-core:
go mod tidy && CGO_ENABLED=0 go build -ldflags="-s -w" -o alfred-core cmd/core/main.go
.PHONY: build-ferret
build-ferret:
go mod tidy && CGO_ENABLED=0 go build -ldflags="-s -w" -o alfred-ferret cmd/ferret/main.go
.PHONY: build-ingester-docker-dev
build-ingester-docker-dev: build-ingester
docker build -t alfred-ingester . -f Dockerfile.ingester
.PHONY: build-collector-docker-dev
build-collector-docker-dev: build-collector
docker build -t alfred-collector . -f Dockerfile.collector
.PHONY: build-core-docker-dev
build-core-docker-dev: build-core
docker build alfred-core . -f Dockerfile.core
.PHONY: build-ferret-docker-dev
build-ferret-docker-dev: build-ferret
docker build alfred-ferret . -f Dockerfile.ferret
.PHONY: run-ingester-docker-dev
run-ingester-docker-dev: run-ingester
docker run alfred-ingester
.PHONY: run-collector-docker-dev
run-collector-docker-dev: run-collector
docker run alfred-collector
.PHONY: run-core-docker-dev
run-core-docker-dev: run-core
docker run alfred-core
.PHONY: run-ferret-docker-dev
run-ferret-docker-dev: run-ferret
docker run alfred-ferret

0
alfred/api/.gitignore vendored Normal file
View File

View File

@@ -0,0 +1,34 @@
package request
type SessionFilters struct {
CustomerId string
DeviceId string
SessionId string
PhoneNumber string
StartTimestamp int64
EndTimestamp int64
Labels string
AppName string
ScreenName string
FragmentNames string
Vertical string
AppVersion string
ScreenTags string
CodePushVersion string
AgentEmailId string
SnapshotPerSecond string
SortBy string
AppOs string
}
type WebSessionFilters struct {
StartTimestamp int64
EndTimestamp int64
AgentId string
TicketId string
SessionId string
DeviceId []string
ProjectName string
EmailId string
SortBy string
}

View File

@@ -0,0 +1,69 @@
package response
import "alfred/model/ingester"
type BaseAttributesDTO struct {
AppVersionCode string `json:"app_version_code,omitempty"`
AppVersionName string `json:"app_version_name,omitempty"`
DeviceId string `json:"device_id,omitempty"`
DeviceModel string `json:"device_model,omitempty"`
DeviceManufacturer string `json:"device_manufacturer,omitempty"`
ScreenResolution string `json:"screen_resolution,omitempty"`
AppOS string `json:"app_os,omitempty"`
OsVersion string `json:"os_version,omitempty"`
Latitude float32 `json:"latitude,omitempty"`
Longitude float32 `json:"longitude,omitempty"`
NetworkType string `json:"network_type,omitempty"`
CustomerId string `json:"customer_id,omitempty"`
UpTime int64 `json:"up_time,omitempty"`
CarrierName string `json:"carrier_name,omitempty"`
Metadata map[string]string `json:"metadata,omitempty"`
SessionId string `json:"session_id,omitempty"`
ParentSessionId string `json:"parent_session_id,omitempty"`
TraceId string `json:"trace_id,omitempty"`
SessionTimeStamp int64 `json:"session_time_stamp,omitempty"`
StartTimestamp int64 `json:"start_timestamp"`
EndTimestamp int64 `json:"end_timestamp"`
SnapshotPerSecond int64 `json:"snapshot_per_second,omitempty"`
HasErrors bool `json:"has_errors"`
ImageType string `json:"image_type,omitempty"`
}
type VideoMetadata struct {
Duration *int64 `json:"duration,omitempty"`
}
type SearchSessionResponseData struct {
DeviceAttributes []ingester.DeviceAttributes `json:"device_attributes"`
BaseAttributesDTO BaseAttributesDTO `json:"base_attributes"`
Labels []string `json:"labels"`
Metadata VideoMetadata `json:"metadata"`
TouchCounts int64 `json:"touch_counts"`
CreatedAt int64 `json:"created_at"`
}
type FilterData struct {
Label string `json:"label"`
Value string `json:"value"`
}
type SelectionConfig string
const (
MULTI_SELECT SelectionConfig = "MULTI_SELECT"
SINGLE_SELECT SelectionConfig = "SINGLE_SELECT"
RANGE_PICKER SelectionConfig = "RANGE_PICKER"
)
type FilterResponseData struct {
FilterName string `json:"filter_name"`
FilterKey string `json:"filter_key"`
FilterData []FilterData `json:"filter_data"`
SelectionConfig SelectionConfig `json:"selection_config"`
}
type DropdownResponseData struct {
DropdownKey string `json:"dropdown_key"`
FilterData []FilterData `json:"data"`
SelectionConfig SelectionConfig `json:"selection_config"`
}

View File

@@ -0,0 +1,42 @@
package response
type SessionResponseData struct {
Link string `json:"link"`
SessionId string `json:"sessionId,omitempty"`
DeviceId string `json:"deviceId,omitempty"`
CustomerId string `json:"customerId,omitempty"`
PhoneNumber string `json:"phoneNumber,omitempty"`
Model string `json:"model,omitempty"`
DeviceCarrierName string `json:"deviceCarrierName,omitempty"`
AppVersionCode string `json:"appVersionCode,omitempty"`
AppVersionName string `json:"appVersionname,omitempty"`
DeviceOs string `json:"deviceOs,omitempty"`
RecordStartingTime int64 `json:"recordStartingTime,omitempty"`
Labels []string `json:"labels,omitempty"`
}
type SessionErrorData struct {
SessionId string `json:"sessionId,omitempty"`
DeviceId string `json:"deviceId,omitempty"`
CustomerId string `json:"customerId,omitempty"`
PhoneNumber string `json:"phoneNumber,omitempty"`
}
type VideoGenerationStatus struct {
Link string `json:"link"`
SessionId string `json:"sessionId,omitempty"`
DeviceId string `json:"deviceId,omitempty"`
CustomerId string `json:"customerId,omitempty"`
PhoneNumber string `json:"phoneNumber,omitempty"`
Model string `json:"model,omitempty"`
DeviceCarrierName string `json:"deviceCarrierName,omitempty"`
AppVersionCode string `json:"appVersionCode,omitempty"`
AppVersionName string `json:"appVersionname,omitempty"`
DeviceOs string `json:"deviceOs,omitempty"`
RecordStartingTime int64 `json:"recordStartingTime,omitempty"`
Labels []string `json:"labels,omitempty"`
FragmentsCompletedTillNow int `json:"fragmentsCompletedTillNow"`
TotalFragments int `json:"TotalFragments"`
LatestUrl string `json:"LatestUrl,omitempty"`
VideoGeneratedTillNow int `json:"videoGeneratedTillNow"`
}

View File

@@ -0,0 +1,11 @@
package response
import (
"alfred/model/ingester"
)
type WebSessionResponseData struct {
BaseAttributesDTO ingester.WebBaseAttributes `json:"base_attributes"`
SessionAttributes ingester.WebSessionAttributes `json:"session_attribute,omitempty"`
DurationInMillis int64 `json:"duration,omitempty"`
}

35
alfred/application.yml Normal file
View File

@@ -0,0 +1,35 @@
APP_VERSION: "0.0.1"
APP_NAME: "alfred"
ENVIRONMENT: "DEVELOPMENT"
LOG_LEVEL: "DEBUG"
APP_PORT: 9999
TIMEZONE: "Asia/Kolkata"
DEFAULT_LOCALE: "en"
# Prometheus Config
PROMETHEUS_APP_NAME: "alfred"
PROMETHEUS_HOST: "localhost"
PROMETHEUS_PORT: 4001
PROMETHEUS_ENABLED: true
PROMETHEUS_TIMEOUT: 10
PROMETHEUS_FLUSH_INTERVAL_IN_MS: 200
PROMETHEUS_HISTOGRAM_BUCKETS: 50.0,75.0,90.0,95.0,99.0
#Sentry Configs
SENTRY_ENABLED: "false"
SENTRY_DSN: "dummy"
## Redis
REDIS_HOST: "localhost"
REDIS_PORT: 6379
REDIS_USERNAME: "root"
REDIS_PASSWORD: "password"
REDIS_MAX_ACTIVE_CONNECTIONS: 50
REDIS_MAX_IDLE_CONNECTIONS: 50
REDIS_READ_TIMEOUT_MS: 500
REDIS_CONNECTION_TIMEOUT_MS: 1000
REDIS_DB_ID: 0
# Translations
TRANSLATIONS_PATH: "./i18n"

35
alfred/ci.sample.yml Normal file
View File

@@ -0,0 +1,35 @@
APP_VERSION: "0.0.1"
APP_NAME: "alfred"
ENVIRONMENT: "DEVELOPMENT"
LOG_LEVEL: "DEBUG"
APP_PORT: 9999
TIMEZONE: "Asia/Kolkata"
DEFAULT_LOCALE: "en"
# Prometheus Config
PROMETHEUS_APP_NAME: "alfred"
PROMETHEUS_HOST: "localhost"
PROMETHEUS_PORT: 4001
PROMETHEUS_ENABLED: true
PROMETHEUS_TIMEOUT: 10
PROMETHEUS_FLUSH_INTERVAL_IN_MS: 200
PROMETHEUS_HISTOGRAM_BUCKETS: 50.0,75.0,90.0,95.0,99.0
#Sentry Configs
SENTRY_ENABLED: "false"
SENTRY_DSN: "dummy"
## Redis
REDIS_HOST: "localhost"
REDIS_PORT: 6379
REDIS_USERNAME: "root"
REDIS_PASSWORD: "password"
REDIS_MAX_ACTIVE_CONNECTIONS: 50
REDIS_MAX_IDLE_CONNECTIONS: 50
REDIS_READ_TIMEOUT_MS: 500
REDIS_CONNECTION_TIMEOUT_MS: 1000
REDIS_DB_ID: 0
# Translations
TRANSLATIONS_PATH: "./i18n"

View File

@@ -0,0 +1,21 @@
package dependency
import (
"alfred/config"
kafka "alfred/pkg/kafka/produce"
"alfred/pkg/s3"
"alfred/repository"
"alfred/repositoryAccessLayer"
)
type CollectorDependencies struct {
}
func InitCollectorDependencies() {
kafkaProducer := kafka.NewKProducer(config.GetCollectorConfig().BaseConfig.Env, config.GetCollectorConfig().KafkaConfig.BaseConfig)
esConfig := config.GetCollectorConfig().ElasticSearchConfig.BaseConfig
repositories := repository.InitRepositories(esConfig)
repositoryAccessLayer := repositoryAccessLayer.InitRepositoryAccessLayer(repositories)
s3Client := s3.NewS3Client()
InitConsumer(repositoryAccessLayer, s3Client, kafkaProducer)
}

View File

@@ -0,0 +1,71 @@
package dependency
import (
"alfred/cmd/collector/listener"
"alfred/config"
kafka "alfred/pkg/kafka/produce"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"context"
)
func InitConsumer(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client, kafkaProducer kafka.KProducer) {
sessionUploadListener := listener.NewSessionUploadListener(repositories, kafkaProducer)
eventIngestListener := listener.NewEventIngestListener(repositories, kafkaProducer)
webSessionListener := listener.NewWebSessionUploadListener(repositories, s3Client)
errorEventsIngestListener := listener.NewErrorEventsIngestListener(repositories, kafkaProducer)
//to keep it running on separate goroutine without blocking main thread
go func() {
for {
sessionUploadListener.ConsumerGroup.Consume(context.Background(),
[]string{config.GetCollectorConfig().KafkaConfig.SessionUploadTopic},
sessionUploadListener)
}
log.Error("consumer for session upload stopped.")
}()
// todo: in phase 2
//go func() {
// metricUploadListener.consumerGroup.Consume(context.Background(),
// viper.GetStringSlice("kafka.alfred.mobile.metric.ingestion.topic"),
// metricUploadListener)
//}()
go func() {
for {
eventIngestListener.ConsumerGroup.Consume(context.Background(),
[]string{config.GetCollectorConfig().KafkaConfig.EventIngestionTopic},
eventIngestListener)
}
log.Error("consumer for event ingestion stopped.")
}()
go func() {
for {
webSessionListener.ConsumerGroup.Consume(context.Background(),
[]string{config.GetCollectorConfig().KafkaConfig.WebSessionUploadTopic},
webSessionListener)
}
log.Error("consumer for session upload stopped.")
}()
go func() {
for {
errorEventsIngestListener.ConsumerGroup.Consume(context.Background(),
[]string{config.GetCollectorConfig().KafkaConfig.ErrorEventsUploadTopic},
errorEventsIngestListener)
}
log.Error("consumer for error-events stopped.")
}()
// go func() {
// for {
// errorEventsUpdateListener.ConsumerGroup.Consume(context.Background(),
// []string{config.GetCollectorConfig().KafkaConfig.ErrorEventsUpdateTopic},
// errorEventsUpdateListener)
// }
// log.Error("consumer for error-events update stopped.")
// }()
}

View File

@@ -0,0 +1,44 @@
package app
import (
"alfred/cmd/collector/app/dependency"
"alfred/config"
"alfred/internal/metrics"
"alfred/pkg/log"
"alfred/utils"
"fmt"
"github.com/gin-gonic/gin"
"go.elastic.co/apm/module/apmgin/v2"
"go.uber.org/zap"
"net/http"
"strconv"
)
type Server struct {
gin *gin.Engine
}
func NewServer(gin *gin.Engine) *Server {
dependency.InitCollectorDependencies()
return &Server{
gin: gin,
}
}
func (s *Server) Handler() {
s.gin.Use(apmgin.Middleware(s.gin))
metrics.AdminHandler(config.GetCollectorConfig().BaseConfig.MetricPort)
s.healthCheckHandler()
}
func (s *Server) healthCheckHandler() {
s.gin.GET(utils.PING, func(c *gin.Context) {
c.String(http.StatusOK, utils.PONG)
})
}
func (s *Server) Start() {
s.Handler()
log.Info("starting alfred collector server", zap.String("port", strconv.Itoa(config.GetCollectorConfig().BaseConfig.Port)))
s.gin.Run(fmt.Sprintf(":%v", config.GetCollectorConfig().BaseConfig.Port))
}

View File

@@ -0,0 +1,22 @@
package helper
import (
"alfred/utils"
"github.com/Shopify/sarama"
)
func IdentifyClientThroughHeader(headers []*sarama.RecordHeader) string {
client := utils.NAVI_USER_APP
for _, header := range headers {
key := header.Key
encodedValue := header.Value
if string(key) == utils.CLIENT_NAME {
client = string(encodedValue)
break
}
}
return client
}

View File

@@ -0,0 +1,78 @@
package helper
import (
"alfred/config"
"alfred/internal/clients"
"alfred/model/ferret"
"alfred/model/ingester"
"alfred/utils"
)
func BuildESUploadErrorEvent(alfredEvent ingester.BaseAttributes, zipName string, client string, err error, index, eventId string, statusCode int) ferret.ErrorEventsAttributes {
return ferret.ErrorEventsAttributes{
ferret.ErrorAttribute{
SessionId: alfredEvent.SessionId,
ClientName: ferret.ClientName(client),
DeviceId: alfredEvent.DeviceId,
CustomerId: alfredEvent.CustomerId,
PhoneNumber: alfredEvent.PhoneNumber,
AppVersionCode: alfredEvent.AppVersionCode,
AppVersionName: alfredEvent.AppVersionName,
},
[]ferret.ErrorEvent{{
ErrorTimestamp: utils.GetCurrentTimeInMillis(),
ZipNames: []string{zipName},
ErrorType: ferret.ES_UPLOAD_FAILURE,
RequestURL: ferret.RequestUrl(index),
RequestMethod: ferret.POST,
ClientTs: alfredEvent.ClientTs,
ErrorName: ferret.ES_UPLOAD,
ErrorStatusCode: statusCode,
ErrorMessage: err.Error(),
IsActive: true,
SessionId: alfredEvent.SessionId,
EventIdList: []string{eventId},
}},
}
}
func BuildErrorEventsSlackMessage(errorEvent ferret.ErrorEventAttribute) clients.SlackRequest {
slackMessage := make(map[string]interface{})
slackMessage["session_id"] = errorEvent.ErrorAttribute.SessionId
slackMessage["client_name"] = errorEvent.ErrorAttribute.ClientName
slackMessage["device_id"] = errorEvent.ErrorAttribute.DeviceId
slackMessage["customer_id"] = errorEvent.ErrorAttribute.CustomerId
slackMessage["phone_number"] = errorEvent.ErrorAttribute.PhoneNumber
slackMessage["error_timestamp"] = errorEvent.ErrorEvent.ErrorTimestamp
slackMessage["zip_names"] = errorEvent.ErrorEvent.ZipNames
slackMessage["error_type"] = errorEvent.ErrorEvent.ErrorType
slackMessage["request_url"] = errorEvent.ErrorEvent.RequestURL
slackMessage["request_method"] = errorEvent.ErrorEvent.RequestMethod
slackMessage["client_ts"] = errorEvent.ErrorEvent.ClientTs
slackMessage["error_name"] = errorEvent.ErrorEvent.ErrorName
slackMessage["error_status_code"] = errorEvent.ErrorEvent.ErrorStatusCode
slackMessage["error_message"] = errorEvent.ErrorEvent.ErrorMessage
slackMessage["is_active"] = errorEvent.ErrorEvent.IsActive
slackMessage["network_strength_in_kbps"] = errorEvent.ErrorEvent.NetworkStrengthInKbps
slackMessage["event_session_id"] = errorEvent.ErrorEvent.SessionId
slackMessage["event_id_list"] = errorEvent.ErrorEvent.EventIdList
return clients.SlackRequest{
Data: slackMessage,
TemplateId: config.GetCollectorConfig().ErrorEventsSlackTemplateId,
}
}
func ErrorEventsValidator(event ferret.ErrorEventAttribute) bool {
allowedFutureTimestamp := config.GetCollectorConfig().FutureTimestampValidationDiffInHours
allowedPastTimestamp := config.GetCollectorConfig().PastTimestampValidationDiffInHours
isValidTimeStamp := utils.ValidatePresentTime(event.ErrorEvent.ErrorTimestamp, allowedPastTimestamp, allowedFutureTimestamp)
if !isValidTimeStamp {
return false
}
if utils.Contains(config.GetCollectorConfig().IngestErrorEventsFilter, event.ErrorEvent.ErrorMessage) {
return false
}
return true
}

View File

@@ -0,0 +1,118 @@
package listener
import (
"alfred/cmd/collector/helper"
"alfred/config"
"alfred/internal/metrics"
"alfred/model/ferret"
"alfred/pkg/kafka"
kafka2 "alfred/pkg/kafka/produce"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"os"
"time"
)
type ErrorEventsIngestListener interface {
}
type ErrorEventsIngestListenerImpl struct {
ConsumerGroup sarama.ConsumerGroup
errorEventsAccessLayer repositoryAccessLayer.ErrorEventsAccessLayer
kafkaProducer kafka2.KProducer
}
func NewErrorEventsIngestListener(repositories *repositoryAccessLayer.RepositoryAccessLayer, kafkaProducer kafka2.KProducer) *ErrorEventsIngestListenerImpl {
errorEventsIngestListener, err := kafka.SaramaKafkaConsumer(
config.GetCollectorConfig().BaseConfig.Env,
config.GetCollectorConfig().KafkaConfig.BaseConfig,
config.GetCollectorConfig().KafkaConfig.ErrorEventsUploadTopicGroupId,
)
if err != nil {
log.Error("error-events ingest listener initialisation failed", zap.Error(err))
os.Exit(1)
}
return &ErrorEventsIngestListenerImpl{
ConsumerGroup: errorEventsIngestListener,
errorEventsAccessLayer: repositories.ErrorEventsAccessLayer,
kafkaProducer: kafkaProducer,
}
}
func (eil *ErrorEventsIngestListenerImpl) Setup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("setup error-events ingest kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (eil *ErrorEventsIngestListenerImpl) Cleanup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("cleanup error-events ingest kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (eil *ErrorEventsIngestListenerImpl) ConsumeClaim(consumerGroupSession sarama.ConsumerGroupSession, consumerGroupClaim sarama.ConsumerGroupClaim) error {
for message := range consumerGroupClaim.Messages() {
eil.processMessage(message)
consumerGroupSession.MarkMessage(message, utils.EMPTY)
}
return nil
}
func (eil *ErrorEventsIngestListenerImpl) processMessage(message *sarama.ConsumerMessage) {
defer func() {
recover()
}()
var alfredErrorEvents ferret.ErrorEventsAttributes
if err := json.Unmarshal(message.Value, &alfredErrorEvents); err != nil {
metrics.KafkaEventConsumptionEventFailureCounter.WithLabelValues(message.Topic).Inc()
log.Error("json unmarshalling failed while ingesting error-events data to elasticsearch", zap.Error(err))
}
metrics.KafkaEventConsumptionEventSuccessCounter.WithLabelValues(message.Topic).Inc()
clientName := string(alfredErrorEvents.ErrorAttribute.ClientName)
index := config.GetCollectorConfig().ElasticSearchConfig.ErrorEventsUploadIndexClientMap[clientName]
for _, errorEvent := range alfredErrorEvents.ErrorEvents {
errorEvent.IsActive = true
event := ferret.ErrorEventAttribute{
ErrorAttribute: alfredErrorEvents.ErrorAttribute,
ErrorEvent: errorEvent,
CreatedAt: utils.GetCurrentTimeInMillis(),
}
isValidErrorEvent := helper.ErrorEventsValidator(event)
if isValidErrorEvent {
go func() {
retryFunc := func() (interface{}, error) {
esStatusCode, err := eil.uploadErrorEventToElasticSearch(event, index)
return esStatusCode, err
}
_, err := utils.RetryFunctionWithResponseAndError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error ingesting error-events data to elasticsearch", zap.Error(err))
errorEventsSlackMessage := helper.BuildErrorEventsSlackMessage(event)
err = eil.kafkaProducer.SendMessage(errorEventsSlackMessage, config.GetCollectorConfig().KafkaConfig.ErrorEventsSlackPushTopic, event.ErrorAttribute.DeviceId, clientName)
if err != nil {
log.Error("error publishing error-events data to kafka", zap.Error(err))
}
return
}
}()
}
}
}
func (eil *ErrorEventsIngestListenerImpl) uploadErrorEventToElasticSearch(errorEvent ferret.ErrorEventAttribute, index string) (int, error) {
errorEventString, err := json.Marshal(errorEvent)
if err != nil {
log.Error("error marshalling error event", zap.Error(err))
}
esStatusCode, err := eil.errorEventsAccessLayer.UploadErrorEvents(string(errorEventString), index)
if err != nil {
log.Error("error ingesting error event to elasticsearch", zap.Error(err))
return esStatusCode, err
}
return esStatusCode, nil
}

View File

@@ -0,0 +1,154 @@
package listener
import (
"alfred/cmd/collector/helper"
"alfred/config"
"alfred/internal/metrics"
"alfred/model/es"
"alfred/model/ingester"
"alfred/pkg/kafka"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"os"
"time"
)
type ErrorEventsUpdateListener struct {
ConsumerGroup sarama.ConsumerGroup
errorEventsAccessLayer repositoryAccessLayer.ErrorEventsAccessLayer
sessionsAccessLayer repositoryAccessLayer.SessionsAccessLayer
}
func NewErrorEventsUpdateListener(repositories *repositoryAccessLayer.RepositoryAccessLayer) *ErrorEventsUpdateListener {
errorEventsUpdateListener, err := kafka.SaramaKafkaConsumer(
config.GetCollectorConfig().BaseConfig.Env,
config.GetCollectorConfig().KafkaConfig.BaseConfig,
config.GetCollectorConfig().KafkaConfig.ErrorEventsUpdateTopicGroupId,
)
if err != nil {
log.Error("error events update listener initialisation failed", zap.Error(err))
os.Exit(1)
}
return &ErrorEventsUpdateListener{
ConsumerGroup: errorEventsUpdateListener,
errorEventsAccessLayer: repositories.ErrorEventsAccessLayer,
sessionsAccessLayer: repositories.SessionsAccessLayer,
}
}
func (eul *ErrorEventsUpdateListener) Setup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("setup error events update kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (eul *ErrorEventsUpdateListener) Cleanup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("cleanup error events update kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (eul *ErrorEventsUpdateListener) ConsumeClaim(consumerGroupSession sarama.ConsumerGroupSession, consumerGroupClaim sarama.ConsumerGroupClaim) error {
for message := range consumerGroupClaim.Messages() {
eul.processMessage(message)
consumerGroupSession.MarkMessage(message, "")
}
return nil
}
func (eul *ErrorEventsUpdateListener) processMessage(message *sarama.ConsumerMessage) {
defer func() {
recover()
}()
client := helper.IdentifyClientThroughHeader(message.Headers)
var alfredSessionRecordingEvent ingester.SessionUploadRequest
err := json.Unmarshal(message.Value, &alfredSessionRecordingEvent)
if err != nil {
metrics.KafkaEventConsumptionEventFailureCounter.WithLabelValues(message.Topic).Inc()
log.Error("error unmarshalling session upload event in error event update listener", zap.Error(err))
return
}
metrics.KafkaEventConsumptionEventSuccessCounter.WithLabelValues(message.Topic).Inc()
retryFunc := func() error {
err := eul.fetchErrorEventsForSession(alfredSessionRecordingEvent, client)
return err
}
err = utils.RetryFunctionWithError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error handling and removing error events for session", zap.String("session_id", alfredSessionRecordingEvent.BaseAttributes.SessionId), zap.String("zipName", alfredSessionRecordingEvent.SessionUploadEventAttributes.EventId), zap.Error(err))
}
}
func (eul *ErrorEventsUpdateListener) fetchErrorEventsForSession(alfredSessionRecordingEvent ingester.SessionUploadRequest, client string) error {
sessionUploadIndex := config.GetCollectorConfig().ElasticSearchConfig.AppSessionUploadIndexClientMap[client]
errorEventsUploadIndex := config.GetCollectorConfig().ElasticSearchConfig.ErrorEventsUploadIndexClientMap[client]
sessionErrorEventsFilter := config.GetCollectorConfig().SessionErrorEventsFilter
keyValueMap := make(map[string][]string)
keyValueMap["error_attributes.session_id"] = []string{alfredSessionRecordingEvent.BaseAttributes.SessionId}
keyValueMap["error_event.error_name"] = sessionErrorEventsFilter
var sessionErrorEvents []es.ErrorEventsResponse
var err error
retryFunc := func() error {
sessionErrorEvents, err = eul.errorEventsAccessLayer.FetchSessionErrorEventsWithKeyValue(keyValueMap, errorEventsUploadIndex)
return err
}
err = utils.RetryFunctionWithError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error fetching session error events from session id", zap.String("session_id", alfredSessionRecordingEvent.BaseAttributes.SessionId), zap.String("zipName", alfredSessionRecordingEvent.SessionUploadEventAttributes.EventId), zap.Error(err))
return err
}
if len(sessionErrorEvents) == 0 {
return nil
}
keyValueMap["error_event.zip_name"] = []string{alfredSessionRecordingEvent.SessionUploadEventAttributes.EventId}
var sessionErrorEventsWithZipName []es.ErrorEventsResponse
retryFunc = func() error {
sessionErrorEventsWithZipName, err = eul.errorEventsAccessLayer.FetchSessionErrorEventsWithKeyValue(keyValueMap, errorEventsUploadIndex)
return err
}
err = utils.RetryFunctionWithError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error fetching session error events from session id with zip", zap.String("session_id", alfredSessionRecordingEvent.BaseAttributes.SessionId), zap.String("zipName", alfredSessionRecordingEvent.SessionUploadEventAttributes.EventId), zap.Error(err))
return err
}
if len(sessionErrorEventsWithZipName) == 0 {
return nil
}
var errorEventsdocIdList []string
var errorEventsindexList []string
for _, sessionErrorEvent := range sessionErrorEventsWithZipName {
errorEventsdocIdList = append(errorEventsdocIdList, sessionErrorEvent.DocId)
errorEventsindexList = append(errorEventsindexList, sessionErrorEvent.Index)
}
return eul.updateErrorEventsForSession(errorEventsdocIdList, errorEventsindexList, sessionErrorEvents, sessionErrorEventsWithZipName, alfredSessionRecordingEvent, sessionUploadIndex)
}
func (eul *ErrorEventsUpdateListener) updateErrorEventsForSession(errorEventsdocIdList []string, errorEventsindexList []string, sessionErrorEvents []es.ErrorEventsResponse, sessionErrorEventsWithZipName []es.ErrorEventsResponse, alfredSessionRecordingEvent ingester.SessionUploadRequest, sessionUploadIndex string) error {
var err error
retryFunc := func() error {
err = eul.errorEventsAccessLayer.UpdateErrorEventsInActiveBulk(errorEventsdocIdList, errorEventsindexList)
return err
}
err = utils.RetryFunctionWithError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error removing error events from session id", zap.String("session_id", alfredSessionRecordingEvent.BaseAttributes.SessionId), zap.String("zipName", alfredSessionRecordingEvent.SessionUploadEventAttributes.EventId), zap.Error(err))
return err
}
if len(sessionErrorEvents) == len(sessionErrorEventsWithZipName) {
retryFunc = func() error {
err = eul.sessionsAccessLayer.UpdateSessionErrorEventsWithSessionId([]string{alfredSessionRecordingEvent.BaseAttributes.SessionId}, []string{sessionUploadIndex + "*"}, false)
return err
}
err = utils.RetryFunctionWithError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error updating session for no error events", zap.String("session_id", alfredSessionRecordingEvent.BaseAttributes.SessionId), zap.String("zipName", alfredSessionRecordingEvent.SessionUploadEventAttributes.EventId), zap.Error(err))
return err
}
}
return nil
}

View File

@@ -0,0 +1,174 @@
package listener
import (
"alfred/cmd/collector/helper"
"alfred/config"
"alfred/internal/metrics"
"alfred/model/ingester"
"alfred/pkg/cache"
"alfred/pkg/kafka"
kafka2 "alfred/pkg/kafka/produce"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"golang.org/x/sync/errgroup"
"os"
"time"
)
type EventIngestListener struct {
ConsumerGroup sarama.ConsumerGroup
eventsAccessLayer repositoryAccessLayer.EventsAccessLayer
appFragmentAccessLayer repositoryAccessLayer.AppFragmentsAccessLayer
cacheClientForFragments cache.ConfigClientInterface
kafkaProducer kafka2.KProducer
goroutineGroup *errgroup.Group
}
type appEvent struct {
BaseAttributes ingester.BaseAttributes `json:"base_attributes,omitempty"`
MetricsAttributes ingester.EventAttributes `json:"events,omitempty"`
CreatedAt int64 `json:"created_at,omitempty"`
}
func NewEventIngestListener(repositories *repositoryAccessLayer.RepositoryAccessLayer, kafkaProducer kafka2.KProducer) *EventIngestListener {
eventUploadListener, err := kafka.SaramaKafkaConsumer(
config.GetCollectorConfig().BaseConfig.Env,
config.GetCollectorConfig().KafkaConfig.BaseConfig,
config.GetCollectorConfig().KafkaConfig.EventIngestionTopicGroupId)
if err != nil {
log.Error("event ingest listener initialisation failed", zap.Error(err))
os.Exit(1)
}
group := new(errgroup.Group)
group.SetLimit(config.GetCollectorConfig().EventListenerGoroutineGroupLimit)
return &EventIngestListener{
ConsumerGroup: eventUploadListener,
eventsAccessLayer: repositories.EventsAccessLayer,
appFragmentAccessLayer: repositories.AppFragmentsAccessLayer,
cacheClientForFragments: cache.NewCacheConfig(),
kafkaProducer: kafkaProducer,
goroutineGroup: group,
}
}
func (ks *EventIngestListener) Setup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("setup app event ingest kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *EventIngestListener) Cleanup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("cleanup app event ingest kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *EventIngestListener) ConsumeClaim(consumerGroupSession sarama.ConsumerGroupSession, consumerGroupClaim sarama.ConsumerGroupClaim) error {
for message := range consumerGroupClaim.Messages() {
ks.processEvent(message)
consumerGroupSession.MarkMessage(message, "")
}
return nil
}
func (ks *EventIngestListener) processEvent(message *sarama.ConsumerMessage) {
defer func() {
recover()
}()
client := helper.IdentifyClientThroughHeader(message.Headers)
index := config.GetCollectorConfig().ElasticSearchConfig.AppEventIngestionIndexClientMap[client]
var appEvents ingester.AppEvent
if err := json.Unmarshal(message.Value, &appEvents); err != nil {
metrics.KafkaEventConsumptionEventFailureCounter.WithLabelValues(message.Topic).Inc()
log.Error("json unmarshalling failed for app event", zap.Error(err))
}
metrics.KafkaEventConsumptionEventSuccessCounter.WithLabelValues(message.Topic).Inc()
for _, event := range appEvents.Events {
appEvents.BaseAttributes.SessionId = event.SessionId
appEvent := appEvent{
appEvents.BaseAttributes,
event,
utils.GetCurrentTimeInMillis(),
}
if utils.Contains(config.GetCollectorConfig().IgnoredEventTypes, string(appEvent.MetricsAttributes.EventType)) {
continue
}
ks.goroutineGroup.Go(func() error {
retryFunc := func() (interface{}, error) {
esStatusCode, err := ks.uploadEventToElasticSearch(appEvent, index)
return esStatusCode, err
}
_, err := utils.RetryFunctionWithResponseAndError(retryFunc, config.GetCollectorConfig().MaxRetry, config.GetCollectorConfig().InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error ingesting event ingestion data to elasticsearch",
zap.String("session id", event.SessionId), zap.Error(err))
// err = ks.kafkaProducer.SendMessage(errorEventAttributes, config.GetCollectorConfig().KafkaConfig.ErrorEventsUploadTopic, errorEventAttributes.ErrorAttribute.DeviceId, client)
// if err != nil {
// log.Error("error publishing event ingestion data to error-events kafka topic",
// zap.String("session id", event.SessionId), zap.Error(err))
// }
return nil
}
return nil
})
}
/*if client == utils.NAVI_USER_APP {
for _, event := range appEvents.Events {
event := event
go func() {
ks.uploadFragmentsToElasticSearch(event.FragmentList, event.ScreenName, event.ModuleName, config.GetCollectorConfig().ElasticSearchConfig.FragmentIngestionIndex)
}()
}
}*/
}
func (ks *EventIngestListener) uploadEventToElasticSearch(appEvent appEvent, index string) (int, error) {
appEventString, err := json.Marshal(appEvent)
if err != nil {
log.Error("error in serializing app events", zap.Error(err))
return 0, err
}
esStatusCode, err := ks.eventsAccessLayer.CreateEventIngester(string(appEventString), index)
if err != nil {
log.Error("error ingesting app events data to elasticsearch", zap.Error(err))
return esStatusCode, err
}
return esStatusCode, nil
}
func (ks *EventIngestListener) uploadFragmentsToElasticSearch(fragmentList []string, screenName, vertical string, index string) {
for _, fragmentName := range fragmentList {
_, found := ks.cacheClientForFragments.Get(fragmentName)
if !found {
fragmentModel := &ingester.FragmentModel{
FragmentAttributes: ingester.FragmentAttributes{
FragmentName: fragmentName,
ScreenName: screenName,
Vertical: vertical,
},
}
fragment, err := json.Marshal(fragmentModel)
if err != nil {
log.Error("error in serializing app fragments", zap.Error(err))
return
}
err = ks.appFragmentAccessLayer.CreateFragment(*fragmentModel, string(fragment), index)
if err != nil {
log.Error("error ingesting app fragments data to elasticsearch", zap.Error(err))
return
}
ks.cacheClientForFragments.PutWithTtl(fragmentName, screenName, config.GetCollectorConfig().CacheTimeForFragmentsIngestion)
}
}
}

View File

@@ -0,0 +1,160 @@
package listener
import (
"alfred/mocks"
"alfred/pkg/log"
"errors"
"github.com/Shopify/sarama"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"testing"
)
func TestEventIngestListener_Setup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &EventIngestListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Setup(consumerGroupSessionMock)
consumerGroupSessionMock.AssertExpectations(t)
assert.Nil(t, err, "err should be nil")
}
func TestEventIngestListener_Cleanup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &EventIngestListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Cleanup(consumerGroupSessionMock)
consumerGroupSessionMock.AssertExpectations(t)
assert.Nil(t, err, "err should be nil")
}
func TestEventIngestListener_ConsumeClaimWithoutEvents(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
cacheClientMock := &mocks.MockCacheClient{}
messagesChan := make(chan *sarama.ConsumerMessage, 1)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 1)
mockMessage1 := &sarama.ConsumerMessage{
Value: []byte("Test Message 1"),
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &EventIngestListener{consumerGroupMock, elasticSearchClientMock, cacheClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}
func TestEventIngestListener_ConsumeClaimWithEvents_Success(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
cacheClientMock := &mocks.MockCacheClient{}
messagesChan := make(chan *sarama.ConsumerMessage, 1)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 1)
mockMessage1 := &sarama.ConsumerMessage{
Key: []byte("Key1"),
Value: []byte("{\n \"base_attributes\": {\n \"app_version_code\": \"83\",\n \"app_version_name\": \"2.3.10\",\n \"client_ts\": 1695293792528,\n \"device_id\": \"c9f33448e4a538af\",\n \"device_model\": \"sdk_gphone_arm64\",\n \"device_manufacturer\": \"Google\",\n \"app_os\": \"Android\",\n \"os_version\": \"30\",\n \"customer_id\": \"92226097-2c0b-4b19-940d-b3432953c9dc\",\n \"carrier_name\": \"Android\",\n \"metadata\": {\n \"agent_email_id\": \"girish.s@navi.com\",\n \"code_push_version\": \"2.3.10\",\n \"phone_number\": \"8757641020\"\n },\n \"session_time_stamp\": 1695293792528,\n \"event_timestamp\": 1695293827257,\n \"session_id\": \"29d67a40-3141-4660-a33e-4e7e27e333b7ALFRED_SESSION_ID\"\n },\n \"events\": [\n {\n \"session_id\": \"29d67a40-3141-4660-a33e-4e7e27e333b7ALFRED_SESSION_ID\",\n \"screen_name\": \"Cosmos\",\n \"module_name\": \"Cosmos\",\n \"event_name\": \"SCROLL_EVENT\",\n \"event_timestamp\": 1695293800233,\n \"attributes\": {\n \"END_X\": \"182.98828\",\n \"END_Y\": \"2100.9155\",\n \"START_X\": \"182.98828\",\n \"START_Y\": \"2100.9155\"\n },\n \"event_type\": \"SCROLL_EVENT\"\n }\n ]\n}"),
Topic: "Topic1",
Partition: 0,
Offset: 100,
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &EventIngestListener{consumerGroupMock, elasticSearchClientMock, cacheClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
elasticSearchClientMock.On("CreateEventIngester", mock.Anything, mock.Anything).Return(nil)
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}
func TestEventIngestListener_ConsumeClaimWithEvents_Failure(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
cacheClientMock := &mocks.MockCacheClient{}
expectedError := errors.New("mocked error")
messagesChan := make(chan *sarama.ConsumerMessage, 1)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 1)
mockMessage1 := &sarama.ConsumerMessage{
Key: []byte("Key1"),
Value: []byte("{\n \"base_attributes\": {\n \"app_version_code\": \"83\",\n \"app_version_name\": \"2.3.10\",\n \"client_ts\": 1695293792528,\n \"device_id\": \"c9f33448e4a538af\",\n \"device_model\": \"sdk_gphone_arm64\",\n \"device_manufacturer\": \"Google\",\n \"app_os\": \"Android\",\n \"os_version\": \"30\",\n \"customer_id\": \"92226097-2c0b-4b19-940d-b3432953c9dc\",\n \"carrier_name\": \"Android\",\n \"metadata\": {\n \"agent_email_id\": \"girish.s@navi.com\",\n \"code_push_version\": \"2.3.10\",\n \"phone_number\": \"8757641020\"\n },\n \"session_time_stamp\": 1695293792528,\n \"event_timestamp\": 1695293827257,\n \"session_id\": \"29d67a40-3141-4660-a33e-4e7e27e333b7ALFRED_SESSION_ID\"\n },\n \"events\": [\n {\n \"session_id\": \"29d67a40-3141-4660-a33e-4e7e27e333b7ALFRED_SESSION_ID\",\n \"screen_name\": \"Cosmos\",\n \"module_name\": \"Cosmos\",\n \"event_name\": \"SCROLL_EVENT\",\n \"event_timestamp\": 1695293800233,\n \"attributes\": {\n \"END_X\": \"182.98828\",\n \"END_Y\": \"2100.9155\",\n \"START_X\": \"182.98828\",\n \"START_Y\": \"2100.9155\"\n },\n \"event_type\": \"SCROLL_EVENT\"\n }\n ]\n}"),
Topic: "Topic1",
Partition: 0,
Offset: 100,
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &EventIngestListener{consumerGroupMock, elasticSearchClientMock, cacheClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
elasticSearchClientMock.On("CreateEventIngester", mock.Anything, mock.Anything).Return(expectedError)
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}

View File

@@ -0,0 +1,87 @@
package listener
import (
"alfred/config"
"alfred/internal/metrics"
"alfred/model/ingester"
"alfred/pkg/kafka"
"alfred/pkg/log"
"encoding/json"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"os"
)
type MetricUploadListener struct {
consumerGroup sarama.ConsumerGroup
appMetricPublisher metrics.AppMetricsPublisher
}
type performanceMetrics struct {
BaseAttributes ingester.BaseAttributes `json:"base_attributes,omitempty"`
MetricsAttributes ingester.MetricsAttributes `json:"metrics_attributes,omitempty"`
}
func NewMetricUploadListener() *MetricUploadListener {
sessionUploadListener, err := kafka.SaramaKafkaConsumer(
config.GetCollectorConfig().BaseConfig.Env,
config.GetCollectorConfig().KafkaConfig.BaseConfig,
config.GetCollectorConfig().KafkaConfig.MetricIngestionTopicGroupId)
if err != nil {
log.Error("metrics upload listener initialisation failed", zap.Error(err))
os.Exit(1)
}
return &MetricUploadListener{
consumerGroup: sessionUploadListener,
appMetricPublisher: metrics.NewAppMetricPublisher(),
}
}
func (ks *MetricUploadListener) Setup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("setup metric upload kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *MetricUploadListener) Cleanup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("cleanup metric upload kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *MetricUploadListener) ConsumeClaim(consumerGroupSession sarama.ConsumerGroupSession, consumerGroupClaim sarama.ConsumerGroupClaim) error {
for message := range consumerGroupClaim.Messages() {
ks.processMessage(message)
consumerGroupSession.MarkMessage(message, "")
}
return nil
}
func (ks *MetricUploadListener) processMessage(message *sarama.ConsumerMessage) {
defer func() {
recover()
}()
var appPerformanceMetrics ingester.AppMetrics
if err := json.Unmarshal(message.Value, &appPerformanceMetrics); err != nil {
metrics.KafkaEventConsumptionEventFailureCounter.WithLabelValues(message.Topic).Inc()
log.Error("json unmarshalling failed for app performance metrics event", zap.Error(err))
}
metrics.KafkaEventConsumptionEventSuccessCounter.WithLabelValues(message.Topic).Inc()
for i := 0; i < len(appPerformanceMetrics.MetricsAttributes); i++ {
performanceMetric := performanceMetrics{
appPerformanceMetrics.BaseAttributes,
appPerformanceMetrics.MetricsAttributes[i],
}
ks.uploadMetricsToPrometheus(performanceMetric)
}
}
func (ks *MetricUploadListener) uploadMetricsToPrometheus(performanceMetrics performanceMetrics) {
ks.appMetricPublisher.PublishMetrics(
performanceMetrics.MetricsAttributes.Attributes,
performanceMetrics.MetricsAttributes.EventType,
performanceMetrics.BaseAttributes,
)
}

View File

@@ -0,0 +1,118 @@
package listener
import (
"alfred/mocks"
"alfred/pkg/log"
"github.com/Shopify/sarama"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"testing"
)
func TestMetricUploadListener_Setup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &MetricUploadListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Setup(consumerGroupSessionMock)
consumerGroupSessionMock.AssertExpectations(t)
assert.Nil(t, err, "err should be nil")
}
func TestMetricUploadListener_Cleanup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &MetricUploadListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Cleanup(consumerGroupSessionMock)
consumerGroupSessionMock.AssertExpectations(t)
assert.Nil(t, err, "err should be nil")
}
func TestMetricUploadListener_ConsumeClaimWithoutMetrics(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
appMetricPublisherMock := &mocks.MockAppMetricsPublisher{}
messagesChan := make(chan *sarama.ConsumerMessage, 1)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 1)
mockMessage1 := &sarama.ConsumerMessage{
Value: []byte("Test Message 1"),
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &MetricUploadListener{consumerGroupMock, elasticSearchClientMock, appMetricPublisherMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
appMetricPublisherMock.AssertExpectations(t)
}
func TestMetricUploadListener_ConsumeClaimWithMetrics(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
appMetricPublisherMock := &mocks.MockAppMetricsPublisher{}
messagesChan := make(chan *sarama.ConsumerMessage, 1)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 1)
mockMessage1 := &sarama.ConsumerMessage{
Key: []byte("Key1"),
Value: []byte("{\n \"base_attributes\": {\n \"app_version_code\": \"294\",\n \"app_version_name\": \"3.3.1-debug\",\n \"device_id\": \"a60f2186f3bfb31f\",\n \"device_model\": \"LE2101\",\n \"device_manufacturer\": \"OnePlus\",\n \"app_os\": \"Android\",\n \"os_version\": \"31\",\n \"latitude\": 20.34,\n \"longitude\": 18.23,\n \"customer_id\": \"b2dbbab6-5b82-4a4f-8afb-57490d3ce1bb\",\n \"carrier_name\": \"JIO\"\n },\n \"metrics_attributes\": [\n {\n \"event_id\": \"qwe\",\n \"event_name\": \"41dsfa\",\n \"event_timestamp\": 1676371697152,\n \"session_id\": \"41dsfa\",\n \"attributes\": {\n \"bytes_received\": 200,\n \"bytes_sent\": 200,\n \"duration_in_ms\": 3,\n \"end_time\": 1676371697155,\n \"error_message\": \"qwe\",\n \"error_type\": \"qwe\",\n \"method\": \"qwe\",\n \"response_code\": 200,\n \"start_time\": 1676371697152,\n \"url\": \"qwe\"\n },\n \"event_type\": \"API_METRICS\"\n }\n ]\n}"),
Topic: "Topic1",
Partition: 0,
Offset: 100,
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &MetricUploadListener{consumerGroupMock, elasticSearchClientMock, appMetricPublisherMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
appMetricPublisherMock.On("PublishMetrics", mock.Anything, mock.Anything, mock.Anything).Once()
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
appMetricPublisherMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}

View File

@@ -0,0 +1,141 @@
package listener
import (
"alfred/cmd/collector/helper"
"alfred/config"
"alfred/internal/metrics"
"alfred/model/ingester"
"alfred/pkg/kafka"
kafka2 "alfred/pkg/kafka/produce"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"golang.org/x/sync/errgroup"
"os"
"time"
)
type SessionUploadListener struct {
ConsumerGroup sarama.ConsumerGroup
sessionsAccessLayer repositoryAccessLayer.SessionsAccessLayer
errorEventsAccessLayer repositoryAccessLayer.ErrorEventsAccessLayer
KafkaProducer kafka2.KProducer
goroutineGroup *errgroup.Group
}
func NewSessionUploadListener(repositories *repositoryAccessLayer.RepositoryAccessLayer, kafkaProducer kafka2.KProducer) *SessionUploadListener {
sessionUploadListener, err := kafka.SaramaKafkaConsumer(
config.GetCollectorConfig().BaseConfig.Env,
config.GetCollectorConfig().KafkaConfig.BaseConfig,
config.GetCollectorConfig().KafkaConfig.SessionUploadTopicGroupId,
)
if err != nil {
log.Error("session upload listener initialisation failed", zap.Error(err))
os.Exit(1)
}
group := new(errgroup.Group)
group.SetLimit(config.GetCollectorConfig().SessionUploadListenerGoroutineGroupLimit)
return &SessionUploadListener{
ConsumerGroup: sessionUploadListener,
sessionsAccessLayer: repositories.SessionsAccessLayer,
errorEventsAccessLayer: repositories.ErrorEventsAccessLayer,
KafkaProducer: kafkaProducer,
goroutineGroup: group,
}
}
func (ks *SessionUploadListener) Setup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("setup session upload kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *SessionUploadListener) Cleanup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("cleanup session upload kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *SessionUploadListener) ConsumeClaim(consumerGroupSession sarama.ConsumerGroupSession, consumerGroupClaim sarama.ConsumerGroupClaim) error {
for message := range consumerGroupClaim.Messages() {
ks.processMessage(message)
consumerGroupSession.MarkMessage(message, "")
}
return nil
}
func (ks *SessionUploadListener) processMessage(message *sarama.ConsumerMessage) {
defer func() {
if r := recover(); r != nil {
log.Error("panic recovered in session upload listener", zap.Any("recover", r))
}
}()
// One goroutine per message, bounded by errgroup limit
ks.goroutineGroup.Go(func() error {
cfg := config.GetCollectorConfig()
client := helper.IdentifyClientThroughHeader(message.Headers)
sessionUploadIndex := cfg.ElasticSearchConfig.AppSessionUploadIndexClientMap[client]
var uploadReq ingester.SessionUploadRequest
if err := json.Unmarshal(message.Value, &uploadReq); err != nil {
metrics.KafkaEventConsumptionEventFailureCounter.WithLabelValues(message.Topic).Inc()
log.Error("json unmarshalling failed while ingesting session upload data to elasticsearch", zap.Error(err))
return nil
}
uploadReq.CreatedAt = utils.GetCurrentTimeInMillis()
metrics.KafkaEventConsumptionEventSuccessCounter.WithLabelValues(message.Topic).Inc()
// Retry ElasticSearch ingestion
retryFn := func() (interface{}, error) {
return ks.sessionsAccessLayer.UploadSession(uploadReq, sessionUploadIndex)
}
_, err := utils.RetryFunctionWithResponseAndError(retryFn, cfg.MaxRetry, cfg.InitialDelayInSeconds*time.Second)
if err != nil {
log.Error("error ingesting session upload data to elasticsearch", zap.Error(err))
// errorAttrs := helper.BuildESUploadErrorEvent(
// uploadReq.BaseAttributes,
// uploadReq.SessionUploadEventAttributes.EventId,
// client,
// err,
// sessionUploadIndex,
// utils.EMPTY,
// response.(int),
// )
// if err := ks.KafkaProducer.SendMessage(
// errorAttrs,
// cfg.KafkaConfig.ErrorEventsUploadTopic,
// errorAttrs.ErrorAttribute.DeviceId,
// client,
// ); err != nil {
// log.Error("error publishing session upload data to error-events kafka topic", zap.Error(err))
// }
return nil
}
// if response == http.StatusCreated {
// // Publish to update topic synchronously to avoid nested goroutine and data races
// updateRetryFn := func() error {
// return ks.KafkaProducer.SendMessage(
// uploadReq,
// cfg.KafkaConfig.ErrorEventsUpdateTopic,
// uploadReq.BaseAttributes.DeviceId,
// client,
// )
// }
// if err := utils.RetryFunctionWithError(updateRetryFn, cfg.MaxRetry, cfg.InitialDelayInSeconds*time.Second); err != nil {
// log.Error("error publishing session upload data to error-events-update kafka topic",
// zap.String("session_id", uploadReq.BaseAttributes.SessionId),
// zap.Error(err))
// }
// }
return nil
})
}

View File

@@ -0,0 +1,153 @@
package listener
import (
"alfred/mocks"
"alfred/pkg/log"
"errors"
"github.com/Shopify/sarama"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"testing"
)
func TestSessionUploadListenerr_Setup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &SessionUploadListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Setup(consumerGroupSessionMock)
assert.Nil(t, err, "err should be nil")
consumerGroupSessionMock.AssertExpectations(t)
}
func TestSessionUploadListener_Cleanup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &SessionUploadListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Cleanup(consumerGroupSessionMock)
assert.Nil(t, err, "err should be nil")
consumerGroupSessionMock.AssertExpectations(t)
}
func TestSessionUploadListener_ConsumeClaimWithoutEvents(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
messagesChan := make(chan *sarama.ConsumerMessage, 2)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 2)
mockMessage1 := &sarama.ConsumerMessage{
Value: []byte("Test Message 1"),
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &SessionUploadListener{consumerGroupMock, elasticSearchClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}
func TestSessionUploadListener_ConsumeClaimWithEvents_Success(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
messagesChan := make(chan *sarama.ConsumerMessage, 2)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 2)
mockMessage1 := &sarama.ConsumerMessage{
Key: []byte("Key1"),
Value: []byte("\"{\\n \\\"base_attributes\\\" : {\\n \\\"app_version_code\\\" : \\\"332\\\",\\n \\\"app_version_name\\\" : \\\"3.4.11-debug\\\",\\n \\\"client_ts\\\" : 1690881905125,\\n \\\"device_id\\\" : \\\"d514d237b3ff5d98\\\",\\n \\\"device_model\\\" : \\\"sdk_gphone_arm64\\\",\\n \\\"device_manufacturer\\\" : \\\"Google\\\",\\n \\\"app_os\\\" : \\\"Android\\\",\\n \\\"os_version\\\" : \\\"30\\\",\\n \\\"latitude\\\" : 37.421997,\\n \\\"longitude\\\" : -122.084,\\n \\\"customer_id\\\" : \\\"d514d237b3ff5d98\\\",\\n \\\"carrier_name\\\" : \\\"Android\\\",\\n \\\"session_time_stamp\\\" : 1690529859742,\\n \\\"event_timestamp\\\" : 1690881966898,\\n \\\"session_id\\\" : \\\"0912105e-a4e3-46e6-9109-817c14c1c544ALFRED_SESSION_ID\\\"\\n },\\n \\\"session_upload_event_attributes\\\" : {\\n \\\"beginning_device_attributes\\\" : {\\n \\\"battery\\\" : 100,\\n \\\"storage\\\" : 797028350\\n },\\n \\\"end_device_attributes\\\" : {\\n \\\"battery\\\" : 100,\\n \\\"storage\\\" : 797016060,\\n \\\"memory\\\" : 48.128532\\n },\\n \\\"event_id\\\" : \\\"e238f0ac-c83e-4ee2-bd42-8a4e205b4d28ALFRED_EVENT_ID\\\"\\n }\\n }\""),
Topic: "Topic1",
Partition: 0,
Offset: 100,
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &SessionUploadListener{consumerGroupMock, elasticSearchClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
elasticSearchClientMock.On("UploadSession", mock.Anything, mock.Anything, mock.Anything).Return(nil)
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}
func TestSessionUploadListener_ConsumeClaimWithEvents_Failure(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
expectedError := errors.New("mocked error")
messagesChan := make(chan *sarama.ConsumerMessage, 2)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 2)
mockMessage1 := &sarama.ConsumerMessage{
Key: []byte("Key1"),
Value: []byte("\"{\\n \\\"base_attributes\\\" : {\\n \\\"app_version_code\\\" : \\\"332\\\",\\n \\\"app_version_name\\\" : \\\"3.4.11-debug\\\",\\n \\\"client_ts\\\" : 1690881905125,\\n \\\"device_id\\\" : \\\"d514d237b3ff5d98\\\",\\n \\\"device_model\\\" : \\\"sdk_gphone_arm64\\\",\\n \\\"device_manufacturer\\\" : \\\"Google\\\",\\n \\\"app_os\\\" : \\\"Android\\\",\\n \\\"os_version\\\" : \\\"30\\\",\\n \\\"latitude\\\" : 37.421997,\\n \\\"longitude\\\" : -122.084,\\n \\\"customer_id\\\" : \\\"d514d237b3ff5d98\\\",\\n \\\"carrier_name\\\" : \\\"Android\\\",\\n \\\"session_time_stamp\\\" : 1690529859742,\\n \\\"event_timestamp\\\" : 1690881966898,\\n \\\"session_id\\\" : \\\"0912105e-a4e3-46e6-9109-817c14c1c544ALFRED_SESSION_ID\\\"\\n },\\n \\\"session_upload_event_attributes\\\" : {\\n \\\"beginning_device_attributes\\\" : {\\n \\\"battery\\\" : 100,\\n \\\"storage\\\" : 797028350\\n },\\n \\\"end_device_attributes\\\" : {\\n \\\"battery\\\" : 100,\\n \\\"storage\\\" : 797016060,\\n \\\"memory\\\" : 48.128532\\n },\\n \\\"event_id\\\" : \\\"e238f0ac-c83e-4ee2-bd42-8a4e205b4d28ALFRED_EVENT_ID\\\"\\n }\\n }\""),
Topic: "Topic1",
Partition: 0,
Offset: 100,
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &SessionUploadListener{consumerGroupMock, elasticSearchClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
elasticSearchClientMock.On("UploadSession", mock.Anything, mock.Anything, mock.Anything).Return(expectedError)
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
}

View File

@@ -0,0 +1,127 @@
package listener
import (
"alfred/config"
"alfred/internal/metrics"
"alfred/model/ingester"
"alfred/pkg/kafka"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"fmt"
"github.com/Shopify/sarama"
"go.uber.org/zap"
"golang.org/x/sync/errgroup"
"os"
"path/filepath"
)
type WebSessionUploadListener struct {
ConsumerGroup sarama.ConsumerGroup
webSessionsAccessLayer repositoryAccessLayer.WebSessionsAccessLayer
s3Client s3.S3Client
goroutineGroup *errgroup.Group
}
func NewWebSessionUploadListener(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client) *WebSessionUploadListener {
sessionUploadListener, err := kafka.SaramaKafkaConsumer(
config.GetCollectorConfig().BaseConfig.Env,
config.GetCollectorConfig().KafkaConfig.BaseConfig,
config.GetCollectorConfig().KafkaConfig.WebSessionUploadTopicGroupId,
)
if err != nil {
log.Error("web session upload listener initialisation failed", zap.Error(err))
os.Exit(1)
}
group := new(errgroup.Group)
group.SetLimit(config.GetCollectorConfig().WebSessionUploadListenerGoroutineGroupLimit)
return &WebSessionUploadListener{
ConsumerGroup: sessionUploadListener,
webSessionsAccessLayer: repositories.WebSessionsAccessLayer,
s3Client: s3Client,
goroutineGroup: group,
}
}
func (ks *WebSessionUploadListener) Setup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("setup web session upload kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *WebSessionUploadListener) Cleanup(consumerGroupSession sarama.ConsumerGroupSession) error {
log.Info("cleanup web session upload kafka listener", zap.String("memberId", consumerGroupSession.MemberID()))
return nil
}
func (ks *WebSessionUploadListener) ConsumeClaim(consumerGroupSession sarama.ConsumerGroupSession, consumerGroupClaim sarama.ConsumerGroupClaim) error {
for message := range consumerGroupClaim.Messages() {
ks.processMessage(message)
consumerGroupSession.MarkMessage(message, utils.EMPTY)
}
return nil
}
func (ks *WebSessionUploadListener) processMessage(message *sarama.ConsumerMessage) {
defer func() {
recover()
}()
ks.goroutineGroup.Go(func() error {
var eventUploadRequest ingester.WebSessionUploadRequest
if err := json.Unmarshal(message.Value, &eventUploadRequest); err != nil {
metrics.KafkaEventConsumptionEventFailureCounter.WithLabelValues(message.Topic).Inc()
log.Error("json unmarshalling failed while ingesting web event upload data to elasticsearch", zap.Error(err))
return nil
}
ks.ingestWebSessionData(eventUploadRequest)
metrics.KafkaEventConsumptionEventSuccessCounter.WithLabelValues(message.Topic).Inc()
return nil
})
}
func (ks *WebSessionUploadListener) ingestWebSessionData(eventUploadRequest ingester.WebSessionUploadRequest) {
eventUploadRequest.CreatedAt = utils.GetCurrentTimeInMillis()
clientName := eventUploadRequest.BaseAttributes.ProjectName
webSessionUploadIndex := config.GetCollectorConfig().ElasticSearchConfig.WebSessionUploadIndexClientMap[clientName]
if (eventUploadRequest.SessionAttributes.Data != nil) && (len(eventUploadRequest.SessionAttributes.Data) > 0) {
uuidFileName := eventUploadRequest.SessionAttributes.EventId
filePath := filepath.Join(utils.TempDestinationFolder, uuidFileName)
data, err := json.Marshal(eventUploadRequest.SessionAttributes.Data)
if err != nil {
log.Error("error in marshaling web sessions", zap.Error(err))
return
}
err = s3.CreateFileFromByte(filePath, utils.GZExtension.String(), data)
if err != nil {
log.Error("error in creating file for web sessions", zap.Error(err))
return
}
defer ks.deleteWebSessionFile(filePath + utils.GZExtension.String())
webSessionUploadBucket := config.GetCollectorConfig().S3Config.WebSessionBucketClientMap[clientName]
_, err = ks.s3Client.UploadFile(webSessionUploadBucket,
utils.TempDestinationFolder, uuidFileName+utils.GZExtension.String(), uuidFileName+utils.GZExtension.String())
if err != nil {
log.Error("error in s3 upload of web sessions", zap.Error(err))
return
}
eventUploadRequest.SessionAttributes.Data = nil
}
err := ks.webSessionsAccessLayer.UploadWebSession(eventUploadRequest, webSessionUploadIndex)
if err != nil {
log.Error("error ingesting web sessions", zap.Error(err))
return
}
}
func (ks *WebSessionUploadListener) deleteWebSessionFile(fileName string) {
err := os.Remove(fileName)
if err != nil {
log.Error(fmt.Sprintf("not able to delete the file %s", fileName), zap.Error(err))
}
}

View File

@@ -0,0 +1,77 @@
package listener
import (
"alfred/mocks"
"alfred/pkg/log"
"github.com/Shopify/sarama"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"testing"
)
func TestWebSessionUploadListener_Setup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &WebSessionUploadListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Setup(consumerGroupSessionMock)
assert.Nil(t, err, "err should be nil")
consumerGroupSessionMock.AssertExpectations(t)
}
func TestWebSessionUploadListener_Cleanup(t *testing.T) {
log.InitLogger()
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
testMemberID := "testMemberID"
ks := &WebSessionUploadListener{}
consumerGroupSessionMock.On("MemberID").Return(testMemberID)
err := ks.Cleanup(consumerGroupSessionMock)
assert.Nil(t, err, "err should be nil")
consumerGroupSessionMock.AssertExpectations(t)
}
func TestWebSessionUploadListener_ConsumeClaimWithoutSessions(t *testing.T) {
log.InitLogger()
elasticSearchClientMock := &mocks.MockElasticSearchClient{}
s3ClientMock := &mocks.MockS3Client{}
consumerGroupSessionMock := &mocks.ConsumerGroupSession{}
consumerGroupClaimMock := &mocks.ConsumerGroupClaim{}
consumerGroupMock := &mocks.MockConsumerGroup{}
messagesChan := make(chan *sarama.ConsumerMessage, 1)
messagesBufferedChan := make(chan *sarama.ConsumerMessage, 1)
mockMessage1 := &sarama.ConsumerMessage{
Value: []byte("Test Message 1"),
}
messagesChan <- mockMessage1
close(messagesChan)
go func() {
for msg := range messagesChan {
messagesBufferedChan <- msg
}
close(messagesBufferedChan)
}()
ks := &WebSessionUploadListener{consumerGroupMock, elasticSearchClientMock, s3ClientMock}
consumerGroupSessionMock.On("MarkMessage", mock.Anything, mock.Anything).Once()
consumerGroupClaimMock.On("Messages").Return((<-chan *sarama.ConsumerMessage)(messagesBufferedChan)).Once()
err := ks.ConsumeClaim(consumerGroupSessionMock, consumerGroupClaimMock)
assert.Nil(t, err)
consumerGroupSessionMock.AssertExpectations(t)
consumerGroupClaimMock.AssertExpectations(t)
consumerGroupMock.AssertExpectations(t)
elasticSearchClientMock.AssertExpectations(t)
s3ClientMock.AssertExpectations(t)
}

View File

@@ -0,0 +1,39 @@
package main
import (
"alfred/cmd/collector/app"
"alfred/config"
"alfred/pkg/log"
"os"
"time"
ginzap "github.com/gin-contrib/zap"
"github.com/gin-gonic/gin"
"github.com/spf13/cobra"
_ "go.uber.org/automaxprocs"
"go.uber.org/zap"
)
func main() {
log.InitLogger("alfred-collector")
config.LoadCollectorConfig()
command := &cobra.Command{
Use: "alfred-collector",
Short: "alfred collector consumes events from kafka",
Long: "alfred collector receive all app events and ingest them into kafka",
RunE: func(cmd *cobra.Command, args []string) error {
r := gin.New()
r.Use(ginzap.Ginzap(log.GetLogger(), time.RFC3339, true))
r.Use(ginzap.RecoveryWithZap(log.GetLogger(), true))
sv := app.NewServer(r)
sv.Start()
return nil
},
}
if err := command.Execute(); err != nil {
log.Error("alfred collector main command execution failed", zap.Error(err))
os.Exit(1)
}
}

View File

@@ -0,0 +1,65 @@
package external
import (
"alfred/internal/clients"
"alfred/pkg/log"
"alfred/utils"
"errors"
"go.uber.org/zap"
)
type CustomerService struct {
customerFederationClient *clients.CustomerFederationClient
customerServiceClient *clients.CustomerServiceClient
}
func NewCustomerService(httpClient *clients.HttpClient) *CustomerService {
return &CustomerService{
customerFederationClient: &clients.CustomerFederationClient{HttpClient: httpClient.HttpClient},
customerServiceClient: &clients.CustomerServiceClient{HttpClient: httpClient.HttpClient},
}
}
func (s *CustomerService) GetCustomerRefId(externalId string) string {
if utils.ValidateId(externalId, utils.EMPTY) {
referenceId, err := s.customerServiceClient.GetCustomerRefId(externalId)
if err != nil {
log.Error("error getting customer reference id from external id", zap.Error(err))
return ""
}
return referenceId
}
return ""
}
func (s *CustomerService) GetDeviceIds(phoneNumber string, customerId string, deviceIdList []string) ([]string, error) {
var deviceIds []string
if len(deviceIdList) != 0 {
deviceIds = deviceIdList
} else if customerId != utils.EMPTY {
deviceIdsLocal, err := s.customerFederationClient.GetDeviceIdFromCustomerId(customerId)
if err != nil {
log.Error("device Id not found for customer id", zap.String("customerId", customerId), zap.Error(err))
return nil, err
}
if deviceIdsLocal == nil {
log.Error("device Id not found for customer id", zap.String("customerId", customerId), zap.Error(err))
return nil, errors.New("deviceIds not found for given customerId")
}
deviceIds = deviceIdsLocal
} else if phoneNumber != utils.EMPTY {
customerId, err := s.customerServiceClient.GetReferenceIdByPhoneNumber(phoneNumber)
if err != nil {
log.Error("customer Id not found for phone number", zap.String("customerId", customerId), zap.Error(err))
return nil, err
}
deviceIdsLocal, err := s.customerFederationClient.GetDeviceIdFromCustomerId(customerId)
if err != nil || deviceIdsLocal == nil {
log.Error("device Id not found for customer id", zap.String("customerId", customerId), zap.Error(err))
return nil, err
}
deviceIds = deviceIdsLocal
}
return deviceIds, nil
}

View File

@@ -0,0 +1,47 @@
package external
import (
"alfred/internal/clients"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/utils"
"go.uber.org/zap"
"strings"
)
type DataScienceService struct {
dataScienceServiceClient *clients.DataScienceClient
s3Client s3.S3Client
}
func NewDataScienceService(httpClient *clients.HttpClient, s3Client s3.S3Client) *DataScienceService {
return &DataScienceService{
dataScienceServiceClient: &clients.DataScienceClient{HttpClient: httpClient.HttpClient},
s3Client: s3Client,
}
}
func (ds *DataScienceService) MaskImages(screen, sessionUploadBucket, toBeMaskedFileName, maskedFileName string) (bool, error) {
// Generate pre-signed URL of the zip file
presignedDownloadUrl, err := ds.s3Client.PresignedDownloadUrl(sessionUploadBucket, toBeMaskedFileName+utils.ZipExtension.String(), toBeMaskedFileName+utils.ZipExtension.String())
if err != nil {
log.Error("Error occurred while generating presigned url for download in data science service", zap.Error(err), zap.String("toBeMaskedFileName", toBeMaskedFileName))
return false, err
}
presignedUploadUrl, err := ds.s3Client.PreSignedUploadUrl(sessionUploadBucket, maskedFileName+utils.ZipExtension.String(), utils.ZipExtension.String(), utils.ZipContentType)
if err != nil {
log.Error("Error occurred while generating presigned url for upload in data science service", zap.Error(err), zap.String("maskedFileName", maskedFileName))
return false, err
}
return ds.dataScienceServiceClient.MaskImages(screen, presignedDownloadUrl, presignedUploadUrl)
}
// This function downloads and unzips the file in the same folder as original images, hence replaces the original images with masked images
func (ds *DataScienceService) ReplaceOriginalImagesWithDsMaskedImages(sessionUploadBucket, maskedFileName, pathToUnzippedFiles string) (bool, error) {
_, err := ds.s3Client.DownloadAndUnzipFile(sessionUploadBucket, utils.TempDestinationFolder, maskedFileName+utils.ZipExtension.String(), maskedFileName+utils.ZipExtension.String(), strings.TrimPrefix(pathToUnzippedFiles, utils.TempDestinationFolder+utils.FORWARD_SLASH))
if err != nil {
log.Error("Error occurred while downloading and unzipping the masked file in ds mask strategy", zap.String("maskedFileName", maskedFileName), zap.Error(err))
return false, err
}
return true, nil
}

View File

@@ -0,0 +1,12 @@
package factory
import (
"alfred/cmd/core/app/service/interfaces"
"alfred/internal/clients"
"alfred/repositoryAccessLayer"
)
type AppClientFactory interface {
Initialize(repositories *repositoryAccessLayer.RepositoryAccessLayer, httpClient *clients.HttpClient)
CreateAppClient(client string) (interfaces.AppClient, error)
}

View File

@@ -0,0 +1,32 @@
package factory
import (
"alfred/cmd/core/app/service"
"alfred/cmd/core/app/service/interfaces"
"alfred/internal/clients"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
)
type CosmosAndroidAppSessionFactory struct {
Repositories *repositoryAccessLayer.RepositoryAccessLayer
HttpClient *clients.HttpClient
}
func (f *CosmosAndroidAppSessionFactory) CreateAppClient(client string) (interfaces.AppClient, error) {
if client == utils.COSMOS {
return f.CreateNewCosmosAndroidAppClient(), nil
}
return nil, errors.New("invalid client name for CosmosAndroidAppSessionFactory")
}
func (f *CosmosAndroidAppSessionFactory) Initialize(repositories *repositoryAccessLayer.RepositoryAccessLayer,
httpClient *clients.HttpClient) {
f.Repositories = repositories
f.HttpClient = httpClient
}
func (f *CosmosAndroidAppSessionFactory) CreateNewCosmosAndroidAppClient() *service.AppSessionCosmos {
return service.NewAppSessionCosmos(f.Repositories)
}

View File

@@ -0,0 +1,31 @@
package factory
import (
"alfred/cmd/core/app/service"
"alfred/cmd/core/app/service/interfaces"
"alfred/internal/clients"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
)
type NaviAndroidAppSessionFactory struct {
Repositories *repositoryAccessLayer.RepositoryAccessLayer
HttpClient *clients.HttpClient
}
func (f *NaviAndroidAppSessionFactory) CreateAppClient(client string) (interfaces.AppClient, error) {
if client == utils.NAVI_USER_APP {
return f.CreateNewNaviAndroidAppClient(), nil
}
return nil, errors.New("invalid client name for NaviAndroidAppSessionFactory")
}
func (f *NaviAndroidAppSessionFactory) Initialize(repositories *repositoryAccessLayer.RepositoryAccessLayer, httpClient *clients.HttpClient) {
f.Repositories = repositories
f.HttpClient = httpClient
}
func (f *NaviAndroidAppSessionFactory) CreateNewNaviAndroidAppClient() *service.AppSessionNaviApp {
return service.NewAppSessionNaviApp(f.Repositories, f.HttpClient)
}

View File

@@ -0,0 +1,32 @@
package factory
import (
"alfred/cmd/core/app/service"
"alfred/cmd/core/app/service/interfaces"
"alfred/internal/clients"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
)
type NaviIosAppSessionFactory struct {
Repositories *repositoryAccessLayer.RepositoryAccessLayer
HttpClient *clients.HttpClient
}
func (f *NaviIosAppSessionFactory) CreateAppClient(client string) (interfaces.AppClient, error) {
if client == utils.NAVI_USER_APP_IOS {
return f.CreateNewNaviIosAppClient(), nil
}
return nil, errors.New("invalid client name for NaviIosAppSessionFactory")
}
func (f *NaviIosAppSessionFactory) Initialize(repositories *repositoryAccessLayer.RepositoryAccessLayer, httpClient *clients.HttpClient) {
f.Repositories = repositories
f.HttpClient = httpClient
}
func (f *NaviIosAppSessionFactory) CreateNewNaviIosAppClient() *service.AppSessionNaviAppIos {
return service.NewAppSessionNaviAppIos(f.Repositories, f.HttpClient)
}

View File

@@ -0,0 +1,45 @@
package handler
import (
"alfred/cmd/core/app/factory"
"alfred/cmd/core/app/service/interfaces"
"alfred/internal/clients"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
)
type AppClientManager interface {
GetAppClientByName(clientName string) (interfaces.AppClient, error)
}
type AppClientManagerImpl struct {
AppClientFactories map[string]factory.AppClientFactory
HttpClient *clients.HttpClient
Repositories *repositoryAccessLayer.RepositoryAccessLayer
}
func NewAppClientManagerImpl(httpClient *clients.HttpClient,
repositories *repositoryAccessLayer.RepositoryAccessLayer) *AppClientManagerImpl {
AppClientFactories := map[string]factory.AppClientFactory{
utils.NAVI_USER_APP_IOS: &factory.NaviIosAppSessionFactory{},
utils.NAVI_USER_APP: &factory.NaviAndroidAppSessionFactory{},
utils.COSMOS: &factory.CosmosAndroidAppSessionFactory{},
}
return &AppClientManagerImpl{
AppClientFactories: AppClientFactories,
HttpClient: httpClient,
Repositories: repositories,
}
}
func (acm *AppClientManagerImpl) GetAppClientByName(clientName string) (interfaces.AppClient, error) {
appClientFactory, found := acm.AppClientFactories[clientName]
if !found {
log.Error("invalid client name: " + clientName)
return nil, errors.New("invalid client name: " + clientName)
}
appClientFactory.Initialize(acm.Repositories, acm.HttpClient)
return appClientFactory.CreateAppClient(clientName)
}

View File

@@ -0,0 +1,330 @@
package handler
import (
"alfred/api/request"
"alfred/api/response"
"alfred/cmd/core/app/helper"
"alfred/config"
"alfred/internal/clients"
"alfred/model/common"
"alfred/model/es"
"alfred/model/ingester"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/u2takey/go-utils/slice"
"go.uber.org/zap"
"math"
"net/http"
"strings"
"time"
)
type AppSessionHandler struct {
eventsAccessLayer repositoryAccessLayer.EventsAccessLayer
sessionAccessLayer repositoryAccessLayer.SessionsAccessLayer
s3Client s3.S3Client
appClientManager AppClientManager
}
func NewAppSessionHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client, httpClient *clients.HttpClient) *AppSessionHandler {
return &AppSessionHandler{
eventsAccessLayer: repositories.EventsAccessLayer,
sessionAccessLayer: repositories.SessionsAccessLayer,
s3Client: s3Client,
appClientManager: NewAppClientManagerImpl(httpClient, repositories),
}
}
func (s *AppSessionHandler) FetchAppSessions(c *gin.Context) {
customerId := c.Query("customer_id")
deviceId := c.Query("device_id")
phoneNumber := c.Query("phone_number")
sessionId := c.Query("session_id")
labelFilters := c.Query("labels")
appName := c.Query("app_version_name")
screenName := c.Query("screen_name")
fragmentNames := c.Query("fragment_name")
vertical := c.Query("vertical")
appVersion := c.Query("app_version_code")
screenTag := c.Query("screen_tag")
codePushVersion := c.Query("code_push_version")
agentEmailId := c.Query("agent_email_id")
snapshotPerSecond := c.Query("snapshot_per_second")
sortBy := helper.AppSortingMapper(c.Query("sort_by"))
clientName, err := helper.ValidateAPIKeyHeaders(c)
appOs := config.GetCoreConfig().ClientAppOsMap[clientName]
if err != nil {
log.Error("invalid api key", zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
clientHandler, err := s.appClientManager.GetAppClientByName(clientName)
if err != nil {
log.Error("no client handler available for ", zap.String("client", clientName), zap.Error(err))
}
sessionUploadIndex := config.GetCoreConfig().ElasticSearchConfig.SessionUploadIndexClientMap[clientName]
eventIngestionIndex := config.GetCoreConfig().ElasticSearchConfig.EventIngestionIndexClientMap[clientName]
startTimestamp, endTimestamp, err := utils.ValidateTimestamps(c.Query("start_time"), c.Query("end_time"))
if err != nil {
log.Error("error in query parameters", zap.String("customerId", customerId),
zap.String("deviceId", deviceId), zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
maxNumHours := config.GetCoreConfig().DefaultSessionTime
if endTimestamp-((maxNumHours * time.Hour).Milliseconds()) > startTimestamp {
endTimestamp = utils.GetCurrentTimeInMillis()
startTimestamp = endTimestamp - (maxNumHours * time.Hour).Milliseconds()
}
pageSize, pageNumber, sortDirection, err := utils.ValidatePage(c.Query("page_size"), c.Query("page_number"), c.Query("sort_direction"))
if err != nil {
log.Error("error in query parameters", zap.String("customerId", customerId),
zap.String("deviceId", deviceId), zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
page := es.Page{
PageSize: pageSize,
PageNumber: pageNumber,
SortDirection: es.SortDirection(sortDirection),
}
//fetch session
sessionRequest := request.SessionFilters{
DeviceId: deviceId,
CustomerId: customerId,
PhoneNumber: phoneNumber,
SessionId: sessionId,
StartTimestamp: startTimestamp,
EndTimestamp: endTimestamp,
Labels: labelFilters,
AppName: appName,
ScreenName: screenName,
FragmentNames: fragmentNames,
Vertical: vertical,
AppVersion: appVersion,
ScreenTags: screenTag,
CodePushVersion: codePushVersion,
AgentEmailId: agentEmailId,
SnapshotPerSecond: snapshotPerSecond,
SortBy: sortBy,
AppOs: appOs,
}
var sessions []es.SessionResponse
sessions, err = clientHandler.FetchSessionDetails(sessionRequest, &page, sessionUploadIndex, eventIngestionIndex)
if err != nil {
log.Error("could not find any session for given inputs",
zap.String("customerId", customerId), zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
DeviceId: deviceId, CustomerId: customerId, PhoneNumber: phoneNumber}),
)
return
}
log.Info(fmt.Sprintf("sessions found"), zap.String("customerId", customerId), zap.String("phoneNumber", phoneNumber), zap.String("deviceId", deviceId))
var sessionResponse []response.SearchSessionResponseData
sessionResponse, err = clientHandler.CreateBucketsForSessionForResponse(sessions)
if customerId != utils.EMPTY {
sessionResponse = s.filterSessionsForCustomerId(sessionResponse, customerId)
}
var genericResponse []common.Response
for _, session := range sessionResponse {
labels, touchCounts := s.fetchLabelsForSession(session.BaseAttributesDTO.SessionId, labelFilters, eventIngestionIndex, false)
var touchCountResponse int64
if touchCounts != nil {
touchCountResponse = *touchCounts
}
//insert if else here?
genericResponse = utils.AddDataToResponse(response.SearchSessionResponseData{
DeviceAttributes: session.DeviceAttributes,
BaseAttributesDTO: session.BaseAttributesDTO,
Labels: labels,
Metadata: session.Metadata,
TouchCounts: touchCountResponse,
CreatedAt: session.CreatedAt,
}, http.StatusOK, genericResponse)
}
//map will change the order of insertion so sorting is needed
if page.SortDirection == es.ASC {
utils.SortAscSession(&genericResponse)
} else {
utils.SortSession(&genericResponse)
}
if genericResponse == nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(errors.New(utils.NO_SESSION_FOUND), http.StatusInternalServerError, nil))
}
c.JSON(http.StatusMultiStatus, utils.SuccessPaginatedResponse(genericResponse, common.Page{
PageSize: len(genericResponse),
TotalPages: int64(math.Ceil(float64(page.TotalSize) / float64(page.PageSize))),
PageNumber: pageNumber,
TotalElements: page.TotalSize,
}, http.StatusMultiStatus))
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
DeviceId: deviceId, CustomerId: customerId, PhoneNumber: phoneNumber}),
)
return
}
}
func (s *AppSessionHandler) FetchAppSessionDetails(c *gin.Context) {
sessionId := c.Query("session_id")
clientName, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
log.Error("invalid api key", zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
clientHandler, err := s.appClientManager.GetAppClientByName(clientName)
if err != nil {
log.Error("no client handler available for ", zap.String("client", clientName), zap.Error(err))
}
sessionUploadIndex := config.GetCoreConfig().ElasticSearchConfig.SessionUploadIndexClientMap[clientName]
eventIngestionIndex := config.GetCoreConfig().ElasticSearchConfig.EventIngestionIndexClientMap[clientName]
var sessions []es.SessionResponse
page := es.Page{
PageSize: utils.SessionUpperLimit,
PageNumber: 0,
SortDirection: es.SortDirection(common.ASC),
}
sessions, err = s.sessionAccessLayer.FetchSessionsWithSessionIds([]string{sessionId}, &page, sessionUploadIndex)
if err != nil {
log.Error("could not find any session for given inputs", zap.String("sessionId", sessionId), zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
SessionId: sessionId}),
)
return
}
log.Info(fmt.Sprintf("sessions found"), zap.String("sessionId", sessionId))
var sessionResponse []response.SearchSessionResponseData
sessionResponse, err = clientHandler.CreateBucketsForSessionForResponse(sessions)
var genericResponse []common.Response
for _, session := range sessionResponse {
labels, touchCounts := s.fetchLabelsForSession(session.BaseAttributesDTO.SessionId, utils.EMPTY, eventIngestionIndex, true)
var touchCountResponse int64
if touchCounts != nil {
touchCountResponse = *touchCounts
}
genericResponse = utils.AddDataToResponse(response.SearchSessionResponseData{
DeviceAttributes: session.DeviceAttributes,
BaseAttributesDTO: session.BaseAttributesDTO,
Labels: labels,
Metadata: session.Metadata,
TouchCounts: touchCountResponse,
}, http.StatusOK, genericResponse)
}
if genericResponse == nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(errors.New(utils.NO_SESSION_FOUND), http.StatusInternalServerError, nil))
}
c.JSON(http.StatusMultiStatus, utils.SuccessResponse(genericResponse, http.StatusOK))
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
SessionId: sessionId}),
)
return
}
}
func (s *AppSessionHandler) fetchLabelsForSession(sessionId, labelFilters string, eventIngestionIndex string, fetchTouchCounts bool) ([]string, *int64) {
validLabels := strings.Split(labelFilters, utils.COMMA)
if labelFilters == "" {
validLabels = []string{ingester.ERROR_LOG, ingester.CRASH_ANALYTICS_EVENT, ingester.ANR_EVENT}
}
res, touchCounts, err := s.eventsAccessLayer.FetchEventsFromSessionId(sessionId, &es.Page{}, eventIngestionIndex, fetchTouchCounts)
if err != nil {
log.Error("No data for sessionId", zap.String("sessionId", sessionId), zap.Error(err))
return nil, nil
}
var labels []string
for _, esResponse := range res {
esResponseLocal := esResponse
if helper.IsValidLabel(esResponseLocal.Source.EventAttributes.EventName, validLabels) &&
!slice.ContainsString(labels, esResponseLocal.Source.EventAttributes.EventName, nil) {
labels = append(labels, esResponseLocal.Source.EventAttributes.EventName)
}
}
return labels, touchCounts
}
func (s *AppSessionHandler) filterSessionsForCustomerId(sessions []response.SearchSessionResponseData, customerId string) []response.SearchSessionResponseData {
var customerSessions []response.SearchSessionResponseData
for _, session := range sessions {
if customerId == session.BaseAttributesDTO.CustomerId {
customerSessions = append(customerSessions, session)
}
}
if len(customerSessions) == 0 {
return sessions
}
return customerSessions
}
func (s *AppSessionHandler) FetchEvents(c *gin.Context) {
sessionId := c.GetHeader("session-id")
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
eventIngestionIndex := config.GetCoreConfig().ElasticSearchConfig.EventIngestionIndexClientMap[client]
page := es.Page{
PageSize: 30,
PageNumber: 0,
SortDirection: "desc",
}
if len(sessionId) == 0 {
err := errors.New("sessionId is required in headers")
log.Error("Bad Request", zap.String("sessionId", sessionId), zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
res, _, err := s.eventsAccessLayer.FetchEventsFromSessionId(sessionId, &page, eventIngestionIndex, true)
if err != nil {
log.Error("Bad Request", zap.String("sessionId", sessionId), zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
c.JSON(http.StatusOK, utils.SuccessResponse(res, http.StatusOK))
}

View File

@@ -0,0 +1,161 @@
package handler
import (
"alfred/api/response"
"alfred/cmd/core/app/helper"
"alfred/config"
"alfred/internal/clients"
"alfred/model/core/cruise"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"fmt"
"github.com/gin-gonic/gin"
"go.uber.org/zap"
"net/http"
"strings"
)
type CruiseControlHandler struct {
cruiseControlAccessLayer repositoryAccessLayer.CruiseControlAccessLayer
alfredIngestorClient *clients.AlfredIngestorClient
}
func NewCruiseControlHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer, ingestorClient *clients.AlfredIngestorClient) *CruiseControlHandler {
return &CruiseControlHandler{
cruiseControlAccessLayer: repositories.CruiseControlAccessLayer,
alfredIngestorClient: ingestorClient,
}
}
func (cc *CruiseControlHandler) FetchCruiseControlConfig(c *gin.Context) {
appVersionName := c.Query("appVersionName")
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
appOs := strings.ToLower(config.GetCoreConfig().ClientAppOsMap[client])
cruiseControlIndex := config.GetCoreConfig().ElasticSearchConfig.CruiseControlIndexClientMap[client]
response, err := cc.cruiseControlAccessLayer.FetchCruiseControlConfig(appVersionName, appOs, cruiseControlIndex)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
c.JSON(http.StatusOK, utils.SuccessResponse(response.Hits.Hits, http.StatusOK))
}
func (cc *CruiseControlHandler) CreateCruiseControlConfig(c *gin.Context) {
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
var cruiseControlRequest cruise.ControlConfig
if err := c.ShouldBindJSON(&cruiseControlRequest); err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
log.Info("create cruise config", zap.String("body", fmt.Sprintf("%v", cruiseControlRequest)),
zap.String("email", c.GetHeader("X-User-Email")), zap.String("sessionToken", c.GetHeader("X-Session-Token")))
appOs := strings.ToLower(config.GetCoreConfig().ClientAppOsMap[client])
//to be removed when fixed from ui
cruiseControlRequest.Type = cruise.OsType(appOs)
cruiseControlIndex := config.GetCoreConfig().ElasticSearchConfig.CruiseControlIndexClientMap[client]
updateCheck, err := cc.cruiseControlAccessLayer.FetchCruiseControlConfig(cruiseControlRequest.OsConfig.AppVersion, appOs, cruiseControlIndex)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
if len(updateCheck.Hits.Hits) == 0 {
cruiseControlRequest.ConfigTime = utils.GetCurrentTimeInMillis()
} else {
cruiseControlRequest.ConfigTime = (int64)(updateCheck.Hits.Hits[0].Source["config_time"].(float64))
}
err = cc.cruiseControlAccessLayer.CreateCruiseControlConfig(&cruiseControlRequest, cruiseControlIndex)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
_, err = cc.alfredIngestorClient.InvalidateCache(cruiseControlRequest.OsConfig.AppVersion, client)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
c.JSON(http.StatusOK, utils.SuccessResponse(nil, http.StatusOK))
}
func (cc *CruiseControlHandler) FetchAllCruiseControlConfigAppVersions(c *gin.Context) {
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
appOs := strings.ToLower(config.GetCoreConfig().ClientAppOsMap[client])
cruiseControlIndex := config.GetCoreConfig().ElasticSearchConfig.CruiseControlIndexClientMap[client]
values, err := cc.cruiseControlAccessLayer.FetchAllCruiseControlAppVersions(cruiseControlIndex, appOs)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
res := helper.MapToFilterData(values)
c.JSON(http.StatusOK, utils.SuccessResponse(res, http.StatusOK))
}
func (cc *CruiseControlHandler) FetchDropdowns(c *gin.Context) {
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
dropdown := cc.getDropdownResponse()
appOs := strings.ToLower(config.GetCoreConfig().ClientAppOsMap[client])
cruiseControlIndex := config.GetCoreConfig().ElasticSearchConfig.CruiseControlIndexClientMap[client]
values, err := cc.cruiseControlAccessLayer.FetchAllCruiseControlAppVersions(cruiseControlIndex, appOs)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
dropdown = append(dropdown, response.DropdownResponseData{
DropdownKey: "app_versions",
FilterData: helper.MapToFilterData(values),
SelectionConfig: response.SINGLE_SELECT,
})
c.JSON(http.StatusOK, utils.SuccessResponse(dropdown, http.StatusOK))
return
}
func (cc *CruiseControlHandler) getDropdownResponse() []response.DropdownResponseData {
dropDownConfig := config.GetCoreConfig().CruiseDropdowns
var configMap []response.DropdownResponseData
err := json.Unmarshal([]byte(dropDownConfig), &configMap)
if err != nil {
return []response.DropdownResponseData{}
}
return configMap
}

View File

@@ -0,0 +1,44 @@
package handler
import (
"alfred/cmd/core/app/helper"
"alfred/config"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
"github.com/gin-gonic/gin"
"net/http"
)
type ErrorEventsHandler interface {
}
type ErrorEventsHandlerImpl struct {
errorEventsAccessLayer repositoryAccessLayer.ErrorEventsAccessLayer
}
func NewErrorEventsHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer) *ErrorEventsHandlerImpl {
return &ErrorEventsHandlerImpl{
errorEventsAccessLayer: repositories.ErrorEventsAccessLayer,
}
}
func (s *ErrorEventsHandlerImpl) FetchErrorEvents(c *gin.Context) {
sessionId := c.Query(utils.SESSION_ID)
clientName, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
errorEventsIndex := config.GetCoreConfig().ElasticSearchConfig.ErrorEventsUploadIndexClientMap[clientName]
errorEventsResponse, err := s.errorEventsAccessLayer.FetchErrorEvents(sessionId, errorEventsIndex)
if err != nil {
if err == errors.New(utils.NO_SESSION_FOUND) {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
return
}
c.JSON(http.StatusOK, utils.SuccessResponse(errorEventsResponse, http.StatusOK))
}

View File

@@ -0,0 +1,59 @@
package handler
import (
"alfred/cmd/core/app/helper"
"alfred/cmd/core/app/service"
"alfred/config"
"alfred/pkg/log"
"alfred/repositoryAccessLayer"
"alfred/utils"
"github.com/gin-gonic/gin"
"go.uber.org/zap"
"net/http"
)
type FilterConfig struct {
filterNaviService *service.FilterNavi
filterCosmosService *service.FilterCosmos
filterNaviAppIosService *service.FilterNaviAppIos
filterWebService service.FilterWeb
}
func NewFilterConfigHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer) *FilterConfig {
return &FilterConfig{
filterNaviService: service.NewFilterNavi(repositories),
filterCosmosService: service.NewFilterCosmos(repositories),
filterNaviAppIosService: service.NewFilterNaviAppIos(repositories),
filterWebService: service.NewFilterWeb(),
}
}
func (fc *FilterConfig) FetchFilterConfig(c *gin.Context) {
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
eventIngestionIndex := config.GetCoreConfig().ElasticSearchConfig.EventIngestionIndexClientMap[client]
switch client {
case utils.NAVI_USER_APP:
c.JSON(http.StatusOK, utils.SuccessResponse(fc.filterNaviService.CreateFilterResponse(eventIngestionIndex), http.StatusOK))
case utils.COSMOS:
c.JSON(http.StatusOK, utils.SuccessResponse(fc.filterCosmosService.CreateFilterResponse(eventIngestionIndex), http.StatusOK))
case utils.NAVI_USER_APP_IOS:
c.JSON(http.StatusOK, utils.SuccessResponse(fc.filterNaviAppIosService.CreateFilterResponse(eventIngestionIndex), http.StatusOK))
}
}
func (fc *FilterConfig) FetchWebFilters(c *gin.Context) {
_, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
log.Error(utils.INVALID_WEB_CLIENT, zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
c.JSON(http.StatusOK, utils.SuccessResponse(fc.filterWebService.CreateFilterResponse(), http.StatusOK))
}

View File

@@ -0,0 +1,913 @@
package handler
import (
"alfred/api/response"
"alfred/cmd/core/app/external"
"alfred/cmd/core/app/helper"
"alfred/config"
"alfred/internal/clients"
"alfred/internal/metrics"
"alfred/mapper"
"alfred/model/common"
"alfred/model/core"
"alfred/model/es"
"alfred/model/ingester"
"alfred/pkg/ffmpeg"
"alfred/pkg/limiter"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"alfred/utils"
"context"
"encoding/json"
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"go.uber.org/zap"
"math"
"net/http"
"path/filepath"
"sort"
"strings"
"time"
)
type MediaHandler struct {
sessionsAccessLayer repositoryAccessLayer.SessionsAccessLayer
eventsAccessLayer repositoryAccessLayer.EventsAccessLayer
videoGenerationStatusAccessLayer repositoryAccessLayer.VideoGenerationStatusAccessLayer
s3Client s3.S3Client
customerService *external.CustomerService
zipProcessor *ZipProcessor
}
func NewMediaHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client, httpClient *clients.HttpClient) *MediaHandler {
return &MediaHandler{
sessionsAccessLayer: repositories.SessionsAccessLayer,
eventsAccessLayer: repositories.EventsAccessLayer,
s3Client: s3Client,
customerService: external.NewCustomerService(httpClient),
videoGenerationStatusAccessLayer: repositories.VideoGenerationStatusAccessLayer,
zipProcessor: NewZipProcessor(repositories, s3Client, httpClient),
}
}
func (m *MediaHandler) RequestVideo(c *gin.Context) {
sessionId := c.Query(utils.SESSION_ID)
requesterEmailId := c.GetHeader(utils.USER_EMAIL_HEADER)
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
sessionUploadIndex := config.GetCoreConfig().ElasticSearchConfig.SessionUploadIndexClientMap[client]
eventIngestionIndex := config.GetCoreConfig().ElasticSearchConfig.EventIngestionIndexClientMap[client]
sessionUploadBucket := config.GetCoreConfig().S3Config.SessionUploadBucketClientMap[client]
videoUploadBucket := config.GetCoreConfig().S3Config.VideoUploadBucketClientMap[client]
videoGenerationStatusIndexClientMap := config.GetCoreConfig().ElasticSearchConfig.VideoGenerationStatusIndexClientMap
videoGenerationStatusIndex := videoGenerationStatusIndexClientMap[client]
log.Info("Video generation request received", zap.String(utils.SESSION_ID, sessionId), zap.String("email", requesterEmailId),
zap.String("X-Session-Token", c.GetHeader("sessionToken")))
page := es.Page{
PageSize: utils.EsUpperLimit,
PageNumber: 0,
SortDirection: es.DESC,
}
sessionResponse, err := m.sessionsAccessLayer.FetchSessionsWithSessionIds([]string{sessionId}, &page, sessionUploadIndex)
if err != nil {
log.Error("could not find any session for given inputs", zap.String(utils.SESSION_ID, sessionId), zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{SessionId: sessionId}))
return
}
imageType := utils.DOT + sessionResponse[0].Source.BaseAttributes.ImageType
fileTypeExtension := sessionResponse[0].Source.BaseAttributes.FileTypeExtension
eventBucketsPerSession, deviceIdForSession, esResponsePerSession, snapshotPerSecond := helper.CreateBucketsForSession(sessionResponse)
var genericResponse []common.Response
countForSuccessResponse := 0
for sessionId, events := range *eventBucketsPerSession {
if len(events) != 0 {
deviceId, _ := (*deviceIdForSession)[sessionId]
esResponse, _ := (*esResponsePerSession)[sessionId]
indexName := esResponse[0].Index
appVersionCode := esResponse[0].Source.AppVersionCode
videoGenerationStatus, err := m.videoGenerationStatusAccessLayer.FetchVideoGenerationStatus(sessionId, videoGenerationStatusIndex)
if err != nil {
log.Error("Error occurred while fetching video generation status", zap.String("sessionId", sessionId), zap.Error(err))
return
}
result, err := mapper.MapESApiResponseToESResponse(videoGenerationStatus)
if err != nil {
log.Error("Error occurred while mapping videoGenerationStatus response", zap.String("sessionId", sessionId), zap.Error(err))
return
}
// Check if the video request is being done for the first time
if len(result.Hits.Hits) == 0 {
// do asyncDSMasking
//go func() {
// err := m.asyncDSMasking(sessionId, events, sessionUploadBucket, eventIngestionIndex, videoGenerationStatusIndex, client, esResponse)
// if err != nil {
// log.Error("Error occurred while applying asyncDSMasking", zap.String("sessionId", sessionId), zap.Error(err))
// }
//}()
// Register video generation entity for first time
m.createVideoGenerationStatusEntity(sessionId, videoGenerationStatusIndex, requesterEmailId, events, 0, 0, es.VideoGenerationStatusResponse{})
// Download and unzip files of the first zip of the session
generatedUuid, _ := uuid.NewUUID()
uuidSessionId := sessionId + utils.HYPHEN + generatedUuid.String()
pathToUnzippedFiles, err := m.s3Client.DownloadAndUnzipFileWithExtension(sessionUploadBucket,
utils.TempDestinationFolder, events[0]+fileTypeExtension, events[0]+fileTypeExtension, uuidSessionId, fileTypeExtension)
// Process the zips
processedFilesPath, isProcessed, err := m.zipProcessor.ProcessZip(pathToUnzippedFiles, sessionId, eventIngestionIndex, indexName, client, appVersionCode, events[0], imageType, false)
if err != nil {
log.Error("Error occurred while processing zips", zap.String("sessionId", sessionId), zap.Error(err))
}
// In case the zip has been processed, generate new name of zip and upload it to s3 bucket
processedFileName := events[0]
toBeDeletedZips := []string{events[0]}
if isProcessed {
processedFileName = events[0] + utils.HYPHEN + utils.PROCESS_FILE_NAME_SUFFIX
zipFilePath := filepath.Join(utils.TempDestinationFolder, processedFileName+fileTypeExtension)
switch fileTypeExtension {
case utils.ZipExtension.String():
err := helper.CreateZipFile(zipFilePath, processedFilesPath, imageType, []string{})
if err != nil {
log.Error("Error occurred while creating the zip file", zap.String("sessionId", sessionId), zap.String("fileName", processedFileName), zap.Error(err))
return
}
case utils.ZipXzExtension.String():
err = helper.CreateZipXzFile(zipFilePath, processedFilesPath, imageType, []string{})
if err != nil {
metrics.MediaGenerationFailureCounter.WithLabelValues(fileTypeExtension)
log.Error("Error occurred while creating the zip.xz file", zap.String("sessionId", sessionId), zap.String("fileName", processedFileName), zap.Error(err))
return
}
default:
log.Error("Unsupported file extension", zap.String("fileTypeExtension", fileTypeExtension))
return
}
uploadResponse, err := m.s3Client.UploadFile(sessionUploadBucket, utils.TempDestinationFolder, processedFileName+fileTypeExtension, processedFileName+fileTypeExtension)
if err != nil {
log.Error("failed to upload processed to s3", zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.String("fileName", processedFileName), zap.Error(err))
return
}
log.Info(fmt.Sprintf("Processed file uploaded to s3 with response: %v", uploadResponse), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.String("fileName", processedFileName))
toBeDeletedZips = append(toBeDeletedZips, processedFileName)
}
// Generate video using the zips processed till now
err = m.generateVideoOfProcessedImagesAndUpload(processedFilesPath, uuidSessionId, sessionId, events[0], deviceId, videoUploadBucket, imageType, 0, snapshotPerSecond, toBeDeletedZips, fileTypeExtension)
if err != nil {
log.Error("Error occurred while generating video of processed zip", zap.String("sessionId", sessionId), zap.String("eventId", events[0]), zap.Error(err))
return
}
// Generate pre-signed URL of the video
downloadURL, err := m.s3Client.PresignedDownloadUrl(videoUploadBucket,
sessionId+utils.VideoExtension.String(), sessionId+utils.VideoExtension.String())
if err != nil {
log.Error("generating Presigned download url failed", zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return
}
// After uploading new processed zip to s3 , mark the fragment video generation status as COMPLETED
retryFunc := func() (interface{}, error) {
esStatusCode, err := m.videoGenerationStatusAccessLayer.UpdateFragmentVideoGenerationStatus(sessionId, videoGenerationStatusIndex, events[0], utils.COMPLETED, processedFileName, 0)
return esStatusCode, err
}
_, err = utils.RetryFunctionWithResponseAndError(retryFunc, config.NewCoreElasticSearchConfig().ElasticSearchUpdateMaxRetry, config.NewCoreElasticSearchConfig().ElasticSearchUpdateRetryBackOffInSeconds*time.Second)
if err != nil {
log.Error("Error while updating video generation status", zap.String("sessionId", sessionId), zap.String("eventId", events[0]))
}
// Generate other event labels of session to be returned in the response
labels, _ := m.fetchLabelsForSession(sessionId, utils.EMPTY, eventIngestionIndex)
log.Info("video generation successful", zap.String("eventId", sessionId),
zap.String("deviceId", deviceId))
genericResponse = utils.AddDataToResponse(response.VideoGenerationStatus{
Link: downloadURL,
DeviceId: deviceId,
SessionId: sessionId,
Model: esResponse[0].Source.DeviceModel,
DeviceCarrierName: esResponse[0].Source.CarrierName,
AppVersionCode: esResponse[0].Source.AppVersionCode,
AppVersionName: esResponse[0].Source.AppVersionName,
DeviceOs: esResponse[0].Source.AppOS,
RecordStartingTime: esResponse[0].Source.ClientTs,
Labels: labels,
FragmentsCompletedTillNow: 1,
VideoGeneratedTillNow: 1,
TotalFragments: len(events),
}, http.StatusOK, genericResponse)
countForSuccessResponse++
// Once the video is generated, assign the process to the Next Fragment zips to a newer thread
if len(events) > 1 {
go func() {
err := m.generateNextFragment(sessionId, 1, events, sessionUploadBucket, videoGenerationStatusIndex, eventIngestionIndex, indexName, appVersionCode, client, imageType, fileTypeExtension)
if err != nil {
log.Error("Error generating video of next fragment", zap.String("sessionId", sessionId), zap.String("fragmentIndex", "1"))
}
}()
}
} else {
// Todo handle for live sessions
var videoGenerationStatusResponses []es.VideoGenerationStatusResponse
jsonHits, err := json.Marshal(result.Hits.Hits)
if err != nil {
log.Error("Error while marshalling ", zap.Error(err))
return
}
if err = json.Unmarshal(jsonHits, &videoGenerationStatusResponses); err != nil {
log.Error("Error while unmarshalling ", zap.Error(err))
return
}
labels, _ := m.fetchLabelsForSession(sessionId, utils.EMPTY, eventIngestionIndex)
/*indexName := esResponse[0].Index
appVersionCode := esResponse[0].Source.AppVersionCode*/
downloadURL, err := m.s3Client.PresignedDownloadUrl(videoUploadBucket,
sessionId+utils.VideoExtension.String(), sessionId+utils.VideoExtension.String())
if err != nil {
log.Error("generating Presigned download url failed", zap.String("sessionId", sessionId), zap.Error(err))
return
}
genericResponse = utils.AddDataToResponse(response.VideoGenerationStatus{
Link: downloadURL,
DeviceId: deviceId,
SessionId: sessionId,
Model: esResponse[0].Source.DeviceModel,
DeviceCarrierName: esResponse[0].Source.CarrierName,
AppVersionCode: esResponse[0].Source.AppVersionCode,
AppVersionName: esResponse[0].Source.AppVersionName,
DeviceOs: esResponse[0].Source.AppOS,
RecordStartingTime: esResponse[0].Source.ClientTs,
Labels: labels,
FragmentsCompletedTillNow: videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow + 1,
TotalFragments: len(sessionResponse),
VideoGeneratedTillNow: videoGenerationStatusResponses[0].Source.VideoGeneratedTillNow + 1,
}, http.StatusOK, genericResponse)
/*extraFragments := len(sessionResponse) - videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow -1
if extraFragments > 0 {
m.createVideoGenerationStatusEntity(sessionId, videoGenerationStatusIndex, requesterEmailId, events, videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow+1, videoGenerationStatusResponses[0].Source.VideoGeneratedTillNow, videoGenerationStatusResponses[0])
go func() {
err := m.generateNextFragment(sessionId, videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow+1, events, sessionUploadBucket, videoGenerationStatusIndex, eventIngestionIndex, indexName, appVersionCode, client)
if err != nil {
log.Error("Error generating video of next fragment", zap.String("sessionId", sessionId), zap.String("fragmentIndex", "1"))
}
}()
}*/
c.JSON(http.StatusOK, utils.SuccessPaginatedResponse(genericResponse, common.Page{}, http.StatusOK))
return
}
}
}
if page.SortDirection == es.DESC {
utils.Sort(&genericResponse)
} else {
utils.SortAsc(&genericResponse)
}
c.JSON(http.StatusMultiStatus, utils.SuccessPaginatedResponse(genericResponse, common.Page{
PageSize: countForSuccessResponse,
TotalPages: int64(math.Ceil(float64(page.TotalSize) / float64(page.PageSize))),
TotalElements: page.TotalSize,
PageNumber: page.PageNumber,
}, http.StatusMultiStatus))
}
// This flow is to save time for DsMasking. This function will do the preprocessing and upload the ds processed zip to s3
//func (m *MediaHandler) asyncDSMasking(sessionId string, events []string, sessionUploadBucket, eventIngestionIndex, videoGenerationStatusIndex, clientName string, esResponse []es.SessionResponse) error {
// log.Info("Starting asyncDSMasking", zap.String("sessionId", sessionId))
// for i := 1; i < len(events); i++ {
// indexName := esResponse[i].Index
// appVersionCode := esResponse[i].Source.AppVersionCode
// err := m.processNextZip(sessionId, i, events, sessionUploadBucket, videoGenerationStatusIndex, eventIngestionIndex, indexName, appVersionCode, clientName, true)
// if err != nil {
// log.Error("Error occurred while applying DS Masking", zap.String("sessionId", sessionId), zap.String("eventId", events[i]), zap.Error(err))
// }
// }
// return nil
//}
func (m *MediaHandler) FetchLatestVideo(c *gin.Context) {
sessionId := c.Query(utils.SESSION_ID)
fragmentsTillNow := c.Query(utils.FRAGMENTS_TILL_NOW)
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
generatedUuid := uuid.New()
uuidSessionId := sessionId + utils.HYPHEN + generatedUuid.String()
videoGenerationStatusIndex := config.GetCoreConfig().ElasticSearchConfig.VideoGenerationStatusIndexClientMap[client]
sessionUploadBucket := config.GetCoreConfig().S3Config.SessionUploadBucketClientMap[client]
videoUploadBucket := config.GetCoreConfig().S3Config.VideoUploadBucketClientMap[client]
sessionUploadIndex := config.GetCoreConfig().ElasticSearchConfig.SessionUploadIndexClientMap[client]
eventIngestionIndex := config.GetCoreConfig().ElasticSearchConfig.EventIngestionIndexClientMap[client]
page := es.Page{
PageSize: 10000,
PageNumber: 0,
SortDirection: es.DESC,
}
sessionResponse, err := m.sessionsAccessLayer.FetchSessionsWithSessionIds([]string{sessionId}, &page, sessionUploadIndex)
if err != nil {
log.Error("could not find any session for given inputs", zap.String("sessionId", sessionId), zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{SessionId: sessionId}))
return
}
indexName := sessionResponse[0].Index
appVersionCode := sessionResponse[0].Source.BaseAttributes.AppVersionCode
imageType := utils.DOT + sessionResponse[0].Source.BaseAttributes.ImageType
fileTypeExtension := sessionResponse[0].Source.BaseAttributes.FileTypeExtension
eventBucketsPerSession, _, _, snapshotPerSecond := helper.CreateBucketsForSession(sessionResponse)
videoGenerationStatus, err := m.videoGenerationStatusAccessLayer.FetchVideoGenerationStatus(sessionId, videoGenerationStatusIndex)
if err != nil {
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, "Video generation status could not be fetched"))
return
}
result, err := mapper.MapESApiResponseToESResponse(videoGenerationStatus)
var videoGenerationStatusResponses []es.VideoGenerationStatusResponse
jsonHits, err := json.Marshal(result.Hits.Hits)
if err != nil {
log.Error("Error while marshalling ", zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, "Video generation status could not be fetched"))
return
}
if err = json.Unmarshal(jsonHits, &videoGenerationStatusResponses); err != nil {
log.Error("Error while unmarshalling ", zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, "Video generation status could not be fetched"))
return
}
fragmentsInInteger := utils.GetIntFromString(fragmentsTillNow)
var genericResponse []common.Response
// Todo handle for live sessions
if videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow >= fragmentsInInteger {
processedTill := 0
var toBeDeletedProcessedZips []string
pathToUnzippedFiles := utils.EMPTY
needsToBeProcessed := false
for _, fragmentStatus := range videoGenerationStatusResponses[0].Source.VideoFragmentStatuses {
if fragmentStatus.ZipProcessingStatus == utils.COMPLETED && fragmentStatus.ProcessedZipName != utils.EMPTY {
pathToUnzippedFiles, err = m.s3Client.DownloadAndUnzipFileWithExtension(sessionUploadBucket,
utils.TempDestinationFolder, fragmentStatus.ProcessedZipName+fileTypeExtension, fragmentStatus.ProcessedZipName+fileTypeExtension, uuidSessionId, fileTypeExtension)
if err != nil {
log.Error("Error occurred while downloading processed zip file", zap.String("sessionId", sessionId), zap.String("zip_name", fragmentStatus.ProcessedZipName))
break
}
needsToBeProcessed = true
toBeDeletedProcessedZips = append(toBeDeletedProcessedZips, fragmentStatus.ProcessedZipName)
processedTill = processedTill + 1
} else {
// Assuming video generation should not stop if the zip was not processed due to some reason
// break
// In else block if required we are trying to reprocess the zip which could have failed
log.Info("EventId is not processed yet", zap.String("eventId", fragmentStatus.EventID), zap.String("sessionId", sessionId))
go func() {
for sessionId, events := range *eventBucketsPerSession {
log.Info("Trying to process the fragment again", zap.String("sessionId", sessionId), zap.String("eventId", events[processedTill]), zap.Error(err))
err := m.processNextZip(sessionId, processedTill, events, sessionUploadBucket, videoGenerationStatusIndex, eventIngestionIndex, indexName, appVersionCode, client, imageType, false, fileTypeExtension)
if err != nil {
log.Error("Error occurred even after retrying to process the zip", zap.String("sessionId", sessionId), zap.String("eventId", events[processedTill]), zap.Error(err))
}
}
}()
}
}
if needsToBeProcessed {
err = m.generateVideoOfProcessedImagesAndUpload(pathToUnzippedFiles, uuidSessionId, sessionId, utils.EMPTY, utils.EMPTY, videoUploadBucket, imageType, videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow, snapshotPerSecond, toBeDeletedProcessedZips, fileTypeExtension)
retryFunc := func() (interface{}, error) {
status := utils.PENDING
if processedTill >= videoGenerationStatusResponses[0].Source.TotalFragments {
status = utils.COMPLETED
}
esStatusCode, err := m.videoGenerationStatusAccessLayer.UpdateVideoGenerationStatus(sessionId, videoGenerationStatusIndex, status, processedTill-1)
return esStatusCode, err
}
_, err = utils.RetryFunctionWithResponseAndError(retryFunc, config.NewCoreElasticSearchConfig().ElasticSearchUpdateMaxRetry, config.NewCoreElasticSearchConfig().ElasticSearchUpdateRetryBackOffInSeconds*time.Second)
if err != nil {
log.Error("Error while updating till when the video has been generated", zap.String("sessionId", sessionId), zap.Error(err))
return
}
}
}
// This is just a fallback in case due to some error, next fragments generation stops and client keeps sending same fragment count which was returned earlier
// If required we can put up a check to call only if last processed at timestamp was way before.Ideally this block should never get executed
if fragmentsInInteger == videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow && videoGenerationStatusResponses[0].Source.VideoGenerationStatus != utils.COMPLETED {
log.Info("Scheduling to generate next fragments ", zap.Int("fragmentIndex", videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow+1), zap.String("sessionId", sessionId))
go func() {
for sessionId, events := range *eventBucketsPerSession {
err := m.generateNextFragment(sessionId, videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow+1, events, sessionUploadBucket, videoGenerationStatusIndex, eventIngestionIndex, indexName, appVersionCode, client, imageType, fileTypeExtension)
if err != nil {
return
}
}
}()
}
downloadURL, err := m.s3Client.PresignedDownloadUrl(videoUploadBucket,
sessionId+utils.VideoExtension.String(), sessionId+utils.VideoExtension.String())
if err != nil {
log.Error("generating Presigned download url failed", zap.String("sessionId", sessionId), zap.Error(err))
return
}
genericResponse = utils.AddDataToResponse(response.VideoGenerationStatus{
LatestUrl: downloadURL,
FragmentsCompletedTillNow: videoGenerationStatusResponses[0].Source.FragmentsCompletedTillNow + 1,
TotalFragments: videoGenerationStatusResponses[0].Source.TotalFragments + 1,
VideoGeneratedTillNow: videoGenerationStatusResponses[0].Source.VideoGeneratedTillNow + 1,
SessionId: sessionId,
}, http.StatusOK, genericResponse)
c.JSON(http.StatusOK, utils.SuccessPaginatedResponse(genericResponse, common.Page{}, http.StatusOK))
}
func (m *MediaHandler) createVideoGenerationStatusEntity(sessionId, videoGenerationStatusIndex, requesterEmailId string, events []string, fragmentsCompleted int, videoGenerated int, videoStatuses es.VideoGenerationStatusResponse) {
var count = 0
var videoFragmentStatuses []core.VideoFragmentStatusAttributes
if len(videoStatuses.Source.VideoFragmentStatuses) > 0 {
for _, videoStatus := range videoStatuses.Source.VideoFragmentStatuses {
videoStatusData := core.VideoFragmentStatusAttributes{
ZipProcessingStatus: videoStatus.ZipProcessingStatus,
ProcessedZipName: videoStatus.ProcessedZipName,
FragmentOrder: videoStatus.FragmentOrder,
EventId: videoStatus.EventID,
}
videoFragmentStatuses = append(videoFragmentStatuses, videoStatusData)
}
}
for _, event := range events {
if count < fragmentsCompleted {
count++
continue
}
fragmentStatus := core.VideoFragmentStatusAttributes{
ZipProcessingStatus: utils.PENDING,
FragmentOrder: count,
EventId: event,
}
videoFragmentStatuses = append(videoFragmentStatuses, fragmentStatus)
count = count + 1
}
if fragmentsCompleted-1 < 0 {
fragmentsCompleted++
}
// Check if a entry exists already before creating a new one
videoGenerationStatusModel := &core.VideoFragmentStatusModel{
SessionId: sessionId,
FinalStatus: utils.PENDING,
VideoName: sessionId,
TotalFragments: int64(len(events)) - 1,
FragmentsCompletedTillNow: fragmentsCompleted - 1,
RequestedAt: utils.GetCurrentTimeInMillis(),
RequestedBy: requesterEmailId,
VideoFragmentStatusAttributes: videoFragmentStatuses,
VideoGeneratedTillNow: videoGenerated,
}
videoGenerationStatuses, err := json.Marshal(videoGenerationStatusModel)
if err != nil {
log.Error("error in serializing app events", zap.Error(err))
return
}
err = m.videoGenerationStatusAccessLayer.CreateVideoGenerationStatus(*videoGenerationStatusModel, string(videoGenerationStatuses), videoGenerationStatusIndex)
if err != nil {
log.Error("error in updating video generation status", zap.Error(err))
return
}
}
func (m *MediaHandler) generateVideoOfProcessedZip(folderPath, uuidSessionId, sessionId, eventId, deviceId, videoUploadBucket, imageType string, fragmentIndex int, snapshotPerSecond int64, toBeDeletedZips []string) error {
log.Info("Generating video from images", zap.String("sessionId", sessionId))
_, err := ffmpeg.GenerateVideoFromImages(folderPath, uuidSessionId, imageType, snapshotPerSecond)
if err != nil {
log.Error("generating video from images failed", zap.String("sessionId", sessionId),
zap.String("eventId", eventId), zap.Int("fragmentIndex", fragmentIndex), zap.String("folderPath", folderPath), zap.Error(err))
metrics.MediaGenerationFailureCounter.WithLabelValues(utils.VideoExtension.String()).Inc()
return err
}
metrics.MediaGenerationSuccessCounter.WithLabelValues(utils.VideoExtension.String()).Inc()
log.Info("Video generation successful", zap.String("sessionId", sessionId),
zap.String("eventId", eventId), zap.Int("fragmentIndex", fragmentIndex), zap.Error(err))
err = m.uploadLocalFileToS3(uuidSessionId, sessionId, utils.EMPTY, deviceId, videoUploadBucket, folderPath)
if err != nil {
log.Error("generating video from images failed", zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return err
}
defer utils.DeleteFileFromLocal(uuidSessionId, &toBeDeletedZips)
return nil
}
func (m *MediaHandler) generateVideoOfProcessedImagesAndUpload(folderPath, uuidSessionId, sessionId, eventId, deviceId, videoUploadBucket, imageType string, fragmentIndex int, snapshotPerSecond int64, toBeDeletedZips []string, fileTypeExtension string) error {
ctx := context.Background()
if !limiter.TryAcquire(ctx, limiter.VideoSem, limiter.DefaultTimeout) {
log.Error("Video processing queue full, try again later",
zap.String("sessionId", sessionId),
zap.Int("fragmentIndex", fragmentIndex))
return errors.New("video processing queue is full, please try again later")
}
defer limiter.Release(limiter.VideoSem)
log.Info("Generating video from images", zap.String("sessionId", sessionId))
_, err := ffmpeg.GenerateVideoFromImages(folderPath, uuidSessionId, imageType, snapshotPerSecond)
if err != nil {
log.Error("generating video from images failed", zap.String("sessionId", sessionId),
zap.String("eventId", eventId), zap.Int("fragmentIndex", fragmentIndex), zap.String("folderPath", folderPath), zap.Error(err))
metrics.MediaGenerationFailureCounter.WithLabelValues(utils.VideoExtension.String()).Inc()
return err
}
metrics.MediaGenerationSuccessCounter.WithLabelValues(utils.VideoExtension.String()).Inc()
log.Info("Video generation successful", zap.String("sessionId", sessionId),
zap.String("eventId", eventId), zap.Int("fragmentIndex", fragmentIndex), zap.Error(err))
err = m.uploadLocalFileToS3(uuidSessionId, sessionId, utils.EMPTY, deviceId, videoUploadBucket, folderPath)
if err != nil {
log.Error("generating video from images failed", zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return err
}
defer utils.DeleteFileFromLocalWithExtension(uuidSessionId, &toBeDeletedZips, fileTypeExtension)
return nil
}
func (m *MediaHandler) generateNextFragment(sessionID string, fragmentIndex int, events []string, sessionUploadBucket, fragmentVideoStatusIndex, eventIngestionIndex, indexName, appVersionCode, client, imageType string, fileTypeExtension string) error {
log.Info("Started generating next fragment", zap.String("sessionId", sessionID), zap.Int("fragmentIndex", fragmentIndex))
i := fragmentIndex
for i = fragmentIndex; i < len(events); i++ {
log.Info("Processing fragment", zap.String("sessionId", sessionID), zap.Int("fragmentIndex", i))
err := m.processNextZip(sessionID, i, events, sessionUploadBucket, fragmentVideoStatusIndex, eventIngestionIndex, indexName, appVersionCode, client, imageType, false, fileTypeExtension)
if err != nil {
log.Error("Error generating video of fragment", zap.String("sessionID", sessionID), zap.Int("fragmentIndex", i), zap.Error(err))
return err
}
}
log.Info("Video generation complete, marking final status as COMPLETED", zap.String("sessionId", sessionID))
retryFunc := func() (interface{}, error) {
esStatusCode, err := m.videoGenerationStatusAccessLayer.UpdateVideoGenerationStatus(sessionID, fragmentVideoStatusIndex, utils.COMPLETED, 0)
return esStatusCode, err
}
_, err := utils.RetryFunctionWithResponseAndError(retryFunc, config.NewCoreElasticSearchConfig().ElasticSearchUpdateMaxRetry, config.NewCoreElasticSearchConfig().ElasticSearchUpdateRetryBackOffInSeconds*time.Second)
if err != nil {
log.Error("Error while updating final video generation status", zap.String("sessionId", sessionID), zap.Error(err))
return err
}
return nil
}
func (m *MediaHandler) processNextZip(sessionID string, fragmentIndex int, events []string, sessionUploadBucket, fragmentVideoStatusIndex, eventIngestionIndex, indexName, appVersionCode, client, imageType string, isAsyncDSMaskingEnabled bool, fileTypeExtension string) error {
ctx := context.Background()
if !limiter.TryAcquire(ctx, limiter.VideoSem, limiter.DefaultTimeout) {
log.Error("Video processing queue full, skipping fragment for now",
zap.String("sessionId", sessionID),
zap.Int("fragmentIndex", fragmentIndex))
return errors.New("video processing queue is full, please try again later")
}
defer limiter.Release(limiter.VideoSem)
event := events[fragmentIndex]
generatedUuid := uuid.New()
uuidSessionId := sessionID + utils.HYPHEN + generatedUuid.String()
pathToUnzippedFiles, err := m.s3Client.DownloadAndUnzipFileWithExtension(sessionUploadBucket, utils.TempDestinationFolder, event+fileTypeExtension, event+fileTypeExtension, uuidSessionId, fileTypeExtension)
if err != nil {
log.Error("Error occurred while downloading and unzipping file in processing next zip", zap.String("sessionId", sessionID), zap.String("eventId", event), zap.Error(err))
return err
}
processedFilesPath, isProcessed, err := m.zipProcessor.ProcessZip(pathToUnzippedFiles, sessionID, eventIngestionIndex, indexName, client, appVersionCode, event, imageType, isAsyncDSMaskingEnabled)
if err != nil {
log.Error("Error occurred while processing zips", zap.String("sessionId", sessionID), zap.Error(err))
return err
}
processedFileName := event
toBeDeletedZips := []string{event}
defer utils.DeleteFileFromLocalWithExtension(uuidSessionId, &toBeDeletedZips, fileTypeExtension)
if isAsyncDSMaskingEnabled {
return nil
}
if isProcessed {
processedFileName = event + utils.HYPHEN + utils.PROCESS_FILE_NAME_SUFFIX
zipFilePath := filepath.Join(utils.TempDestinationFolder, processedFileName+fileTypeExtension)
switch fileTypeExtension {
case utils.ZipExtension.String():
err := helper.CreateZipFile(zipFilePath, processedFilesPath, imageType, []string{})
if err != nil {
log.Error("Error occurred while creating the zip file", zap.String("sessionId", sessionID), zap.String("fileName", processedFileName), zap.Error(err))
return err
}
case utils.ZipXzExtension.String():
err = helper.CreateZipXzFile(zipFilePath, processedFilesPath, imageType, []string{})
if err != nil {
metrics.MediaGenerationFailureCounter.WithLabelValues(fileTypeExtension)
log.Error("Error occurred while creating the zip.xz file", zap.String("sessionId", sessionID), zap.String("fileName", processedFileName), zap.Error(err))
return err
}
default:
log.Error("Unsupported file extension", zap.String("fileTypeExtension", fileTypeExtension))
return err
}
uploadResponse, err := m.s3Client.UploadFile(sessionUploadBucket, utils.TempDestinationFolder, processedFileName+fileTypeExtension, processedFileName+fileTypeExtension)
toBeDeletedZips = append(toBeDeletedZips, processedFileName)
if err != nil {
log.Error("failed to file uploaded to s3", zap.String("sessionId", sessionID), zap.Error(err))
return err
}
log.Info(fmt.Sprintf("file uploaded to s3 with response: %v", uploadResponse), zap.String("sessionId", sessionID))
}
if err != nil {
log.Error("Error occurred while processing zips", zap.String("sessionId", sessionID), zap.Error(err))
return err
}
retryFunc := func() (interface{}, error) {
esStatusCode, err := m.videoGenerationStatusAccessLayer.UpdateFragmentVideoGenerationStatus(sessionID, fragmentVideoStatusIndex, event, utils.COMPLETED, processedFileName, int64(fragmentIndex))
return esStatusCode, err
}
_, err = utils.RetryFunctionWithResponseAndError(retryFunc, config.GetCoreConfig().ElasticSearchConfig.ElasticSearchUpdateMaxRetry, config.GetCoreConfig().ElasticSearchConfig.ElasticSearchUpdateRetryBackOffInSeconds*time.Second)
// Update processed FileName
if err != nil {
log.Error("error updating video generation status", zap.Error(err))
return err
}
return nil
}
func (m *MediaHandler) FetchHistoricVideo(c *gin.Context) {
eventIds := strings.Split(c.Query("event_ids"), utils.COMMA)
snapshotPerSecondString := c.Query("snapshot_per_second")
imageType := c.Query(utils.IMAGE_TYPE)
if imageType == utils.EMPTY {
imageType = utils.ImageTypeJpeg
}
imageType = utils.DOT + imageType
snapshotPerSecond := utils.GetInt64FromString(snapshotPerSecondString)
client, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
generatedUuid, err := uuid.NewUUID()
sessionUploadBucket := config.GetCoreConfig().S3Config.SessionUploadBucketClientMap[client]
videoUploadBucket := config.GetCoreConfig().S3Config.VideoUploadBucketClientMap[client]
if err != nil {
log.Error("uuid generation failed", zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
}
videoLink, err := m.fetchVideoForSession(&eventIds, "historic-video-"+generatedUuid.String(), "historic-video-"+generatedUuid.String(), utils.EMPTY, utils.EMPTY, sessionUploadBucket, videoUploadBucket, utils.EMPTY, utils.EMPTY, snapshotPerSecond, utils.EMPTY, utils.EMPTY, imageType, false)
if err != nil {
log.Error("video generation failed", zap.Error(err))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, nil))
}
c.JSON(http.StatusOK, utils.SuccessResponse(map[string]string{
"link": videoLink,
}, http.StatusOK))
}
func (m *MediaHandler) fetchVideoForSession(sessions *[]string, sessionId, overAllSessionId, customerId, deviceId string, sessionUploadBucket string, videoUploadBucket string, eventIngestionIndex string, videoGenerationIndex string, snapshotPerSecond int64, appName, indexName, imageType string, streamingEnabled bool) (string, error) {
err := m.generateVideo(sessions, sessionId, overAllSessionId, customerId, deviceId, sessionUploadBucket, videoUploadBucket, eventIngestionIndex, videoGenerationIndex, snapshotPerSecond, appName, indexName, imageType, streamingEnabled)
if err != nil {
log.Error("generating video failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return "", err
}
log.Info("generated video and uploaded to s3 successfully",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId))
//generating presigned download url for user
downloadURL, err := m.s3Client.PresignedDownloadUrl(videoUploadBucket,
sessionId+utils.VideoExtension.String(), sessionId+utils.VideoExtension.String())
if err != nil {
log.Error("generating Presigned download url failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return "", err
}
return downloadURL, nil
}
// generateVideo will do nothing if the video is already present on s3, otherwise it will generate video and
// upload on s3.
func (m *MediaHandler) generateVideo(sessions *[]string, sessionId, overAllSessionId, customerId, deviceId string, sessionUploadBucket string, videoUploadBucket string, eventIngestionIndex string, videoGenerationIndex string, snapshotPerSecond int64, appName, indexName, imageType string, streamingEnabled bool) error {
//check if video already present on s3
videoPresent, err := m.s3Client.CheckIfPresent(videoUploadBucket,
sessionId+utils.VideoExtension.String())
if err != nil {
log.Error("error fetching from the s3",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return err
}
if videoPresent {
log.Info("video found on s3",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId))
return nil
}
return m.generateVideoToS3(sessions, sessionId, overAllSessionId, customerId, deviceId, sessionUploadBucket, videoUploadBucket, eventIngestionIndex, videoGenerationIndex, snapshotPerSecond, appName, indexName, imageType, streamingEnabled)
}
// generateVideoToS3 will generate the video and upload it to s3 bucket configured.
func (m *MediaHandler) generateVideoToS3(sessions *[]string, sessionId, overAllSessionId, customerId, deviceId, sessionUploadBucket, videoUploadBucket, eventIngestionIndex, videoGenerationIndex string, snapshotPerSecond int64, appName, indexName, imageType string, streamingEnabled bool) error {
generatedUuid, _ := uuid.NewUUID()
uuidSessionId := sessionId + utils.HYPHEN + generatedUuid.String()
defer utils.DeleteFileFromLocal(uuidSessionId, sessions)
pathToUnzippedFiles := utils.EMPTY
var err error
appNames := config.GetCoreConfig().ZipsWithEventsAppVersions
if strings.Contains(appNames, appName) && appName != utils.EMPTY {
zipsList, err := m.eventsAccessLayer.FetchZipsFromSession(sessionId, eventIngestionIndex, indexName)
if err != nil {
return errors.New("no session found")
}
for _, zip := range zipsList {
tempPathToUnzippedFiles, err := m.s3Client.DownloadAndUnzipFile(sessionUploadBucket,
utils.TempDestinationFolder, zip+utils.ZipExtension.String(), zip+utils.ZipExtension.String(), uuidSessionId)
if err != nil {
log.Error("downloading zip file failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
}
if tempPathToUnzippedFiles != utils.EMPTY {
pathToUnzippedFiles = tempPathToUnzippedFiles
}
}
for _, eventId := range *sessions {
if utils.Contains(zipsList, eventId) == true {
continue
}
//download and unzip file
tempPathToUnzippedFiles, err := m.s3Client.DownloadAndUnzipFile(sessionUploadBucket,
utils.TempDestinationFolder, eventId+utils.ZipExtension.String(), eventId+utils.ZipExtension.String(), uuidSessionId)
if err != nil {
log.Error("downloading zip file failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
}
if tempPathToUnzippedFiles != utils.EMPTY {
pathToUnzippedFiles = tempPathToUnzippedFiles
}
}
if pathToUnzippedFiles == utils.EMPTY {
return errors.New("no Session Found")
}
imageFiles, err := filepath.Glob(filepath.Join(pathToUnzippedFiles, utils.ASTERISK+imageType))
sort.Slice(imageFiles, func(i, j int) bool {
timestampI, _ := helper.ExtractTimestampFromImage(imageFiles[i])
timeStampJ, _ := helper.ExtractTimestampFromImage(imageFiles[j])
return timestampI < timeStampJ
})
} else {
for _, eventId := range *sessions {
//download and unzip file
pathToUnzippedFiles, err = m.s3Client.DownloadAndUnzipFile(sessionUploadBucket,
utils.TempDestinationFolder, eventId+utils.ZipExtension.String(), eventId+utils.ZipExtension.String(), uuidSessionId)
if err != nil {
log.Error("downloading zip file failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return err
}
}
}
folderPath := pathToUnzippedFiles
//generating video from images
_, err = ffmpeg.GenerateVideoFromImages(folderPath, uuidSessionId, imageType, snapshotPerSecond)
if err != nil {
log.Error("generating video from images failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
metrics.MediaGenerationFailureCounter.WithLabelValues(utils.VideoExtension.String()).Inc()
return err
}
metrics.MediaGenerationSuccessCounter.WithLabelValues(utils.VideoExtension.String()).Inc()
log.Info("video file generated",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId))
videoFolderPath := filepath.Join(utils.TempDestinationFolder, uuidSessionId)
err = m.uploadLocalFileToS3(uuidSessionId, sessionId, customerId, deviceId, videoUploadBucket, videoFolderPath)
if err != nil {
log.Error("generating video from images failed",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return err
}
log.Info("removed local files",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId))
return nil
}
func (m *MediaHandler) uploadLocalFileToS3(uuidSessionId, sessionId, customerId, deviceId, videoUploadBucket, folderPath string) error {
uploadResponse, err := m.s3Client.UploadFile(videoUploadBucket,
folderPath, uuidSessionId+utils.VideoExtension.String(), sessionId+utils.VideoExtension.String())
if err != nil {
log.Error("failed to file uploaded to s3",
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId), zap.Error(err))
return err
}
log.Info(fmt.Sprintf("file uploaded to s3 with response: %v", uploadResponse),
zap.String("customerId", customerId), zap.String("sessionId", sessionId),
zap.String("deviceId", deviceId))
return err
}
func (m *MediaHandler) fetchLabelsForSession(sessionId, labelFilters string, eventIngestionIndex string) ([]string, *int64) {
var validLabels []string
if labelFilters == "" {
validLabels = []string{ingester.ERROR_LOG, ingester.CRASH_ANALYTICS_EVENT, ingester.ANR_EVENT}
} else {
validLabels = strings.Split(labelFilters, utils.COMMA)
}
res, touchCount, err := m.eventsAccessLayer.FetchEventsFromSessionId(sessionId, &es.Page{}, eventIngestionIndex, true)
if err != nil {
log.Error("No data for sessionId", zap.String("sessionId", sessionId), zap.Error(err))
return nil, nil
}
var labels []string
for _, esResponse := range res {
esResponseLocal := esResponse
if helper.IsValidLabel(esResponseLocal.Source.EventAttributes.EventName, validLabels) {
labels = append(labels, string(esResponseLocal.Source.EventAttributes.EventType))
}
}
return labels, touchCount
}

View File

@@ -0,0 +1,103 @@
package handler
import (
"alfred/cmd/core/app/helper"
"alfred/internal/clients"
"alfred/model/es"
"alfred/model/ingester"
"alfred/pkg/ffmpeg"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"alfred/utils"
"errors"
"fmt"
"go.uber.org/zap"
"path/filepath"
"sync"
)
type TouchPointsHandler struct {
sessionsAccessLayer repositoryAccessLayer.SessionsAccessLayer
eventsAccessLayer repositoryAccessLayer.EventsAccessLayer
videoGenerationStatusAccessLayer repositoryAccessLayer.VideoGenerationStatusAccessLayer
s3Client s3.S3Client
}
func NewTouchPointsHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client, httpClient *clients.HttpClient) *TouchPointsHandler {
return &TouchPointsHandler{
sessionsAccessLayer: repositories.SessionsAccessLayer,
eventsAccessLayer: repositories.EventsAccessLayer,
s3Client: s3Client,
videoGenerationStatusAccessLayer: repositories.VideoGenerationStatusAccessLayer,
}
}
func (m *TouchPointsHandler) ApplyTouchPoints(pathToUnzippedFiles, sessionId, imageType string, eventsFromSession []es.EventResponse) (string, bool, error) {
imageFiles, err := filepath.Glob(filepath.Join(pathToUnzippedFiles, utils.ASTERISK+imageType))
var wg sync.WaitGroup
var mu sync.Mutex
var errorsList []error
tempDir := utils.TempDirectory
tempDirPath := filepath.Join(pathToUnzippedFiles, tempDir)
err = utils.CreateDirectory(tempDirPath)
if err != nil {
log.Error("Touch-points could not be created as there was some issue creating directory", zap.String("sessionId", sessionId))
return pathToUnzippedFiles, false, err
}
screenShotTimeStampToEventTypeMap := helper.GetScreenShotNameWithTouchPointMap(eventsFromSession)
for _, imageFile := range imageFiles {
wg.Add(1)
go func(imageFile string) {
defer wg.Done()
imageTimestamp, err := helper.ExtractTimestampFromImage(imageFile)
if err != nil {
mu.Lock()
errorsList = append(errorsList, err)
mu.Unlock()
return
}
destFile := filepath.Join(tempDirPath, filepath.Base(imageFile))
if value, isPresent := screenShotTimeStampToEventTypeMap[imageTimestamp]; isPresent {
// if the value is present in the map then process the image
if isPresent && value.EventType == ingester.TOUCH_EVENT {
err = ffmpeg.ApplyTouchPoints(imageFile, destFile, value.XCoordinate, value.YCoordinate, tempDirPath, imageType)
}
// Todo :: Add support later for scroll animation
if err != nil {
mu.Lock()
log.Error("Error while applying touchpoint", zap.String("sessionId", sessionId), zap.Int64("imageTimeStamp", imageTimestamp))
errorsList = append(errorsList, err)
mu.Unlock()
return
}
} else {
// Add the image simply to path
err := utils.CopyFile(imageFile, destFile)
if err != nil {
mu.Lock()
errorsList = append(errorsList, err)
mu.Unlock()
return
}
}
}(imageFile)
}
wg.Wait()
if len(errorsList) > 0 {
errMsg := "Errors occurred during processing of touch-points"
for _, err := range errorsList {
errMsg += fmt.Sprintf("- %v\n", err)
}
return pathToUnzippedFiles, false, errors.New(errMsg)
}
return tempDirPath, true, nil
}

View File

@@ -0,0 +1,362 @@
package handler
import (
"alfred/api/request"
"alfred/api/response"
"alfred/cmd/core/app/external"
"alfred/cmd/core/app/helper"
"alfred/config"
"alfred/internal/clients"
"alfred/model/common"
"alfred/model/core"
"alfred/model/es"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"alfred/utils"
"encoding/json"
"fmt"
"github.com/gin-gonic/gin"
"go.uber.org/zap"
"io/ioutil"
"math"
"net/http"
"os"
"path/filepath"
"sort"
"sync"
"time"
)
type WebSessionHandler struct {
webSessionAccessLayer repositoryAccessLayer.WebSessionsAccessLayer
s3Client s3.S3Client
customerService *external.CustomerService
}
func NewWebSessionHandler(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client, httpClient *clients.HttpClient) *WebSessionHandler {
return &WebSessionHandler{
webSessionAccessLayer: repositories.WebSessionsAccessLayer,
s3Client: s3Client,
customerService: external.NewCustomerService(httpClient),
}
}
func (s *WebSessionHandler) FetchWebSessions(c *gin.Context) {
clientName, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
log.Error(utils.INVALID_WEB_CLIENT, zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
sessionId := c.Param("id")
webSessionUploadIndex := config.GetCoreConfig().ElasticSearchConfig.WebSessionUploadIndexClientMap[clientName]
log.Info("Fetch Web Session", zap.String("sessionId", sessionId), zap.String("email", c.GetHeader("X-User-Email")),
zap.String("sessionToken", c.GetHeader("X-Session-Token")))
sessionResponse, err := s.webSessionAccessLayer.FetchWebSessionsWithSessionId(sessionId, webSessionUploadIndex)
if err != nil {
log.Error("error while fetching session details", zap.String("sessionId", sessionId))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
SessionId: sessionId,
}))
return
}
var genericResponse []common.Response
finalResponse := response.WebSessionResponseData{
BaseAttributesDTO: sessionResponse[0].Source.WebBaseAttributes,
}
if finalResponse.BaseAttributesDTO.Version >= config.GetCoreConfig().S3Config.MinWebVersionSupportingFolderUpload {
data, err := s.downloadAndProcessJsonFiles(sessionId, clientName)
if err != nil {
log.Error("error while downloading json session data from s3", zap.String("sessionId", sessionId))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
SessionId: sessionId,
}))
return
}
finalResponse.SessionAttributes.Data = data
} else {
eventResponsesMap, err := s.downloadAndProcessWebZips(sessionId, clientName, sessionResponse)
if err != nil {
log.Error("error while downloading web zips session data from s3", zap.String("sessionId", sessionId))
c.JSON(http.StatusInternalServerError, utils.ErrorResponse(err, http.StatusInternalServerError, response.SessionErrorData{
SessionId: sessionId,
}))
return
}
for _, session := range sessionResponse {
finalResponse.SessionAttributes.Data = append(finalResponse.SessionAttributes.Data, eventResponsesMap[session.Source.WebSessionAttributes.EventId]...)
}
}
genericResponse = utils.AddDataToResponse(finalResponse, http.StatusOK, genericResponse)
c.JSON(http.StatusMultiStatus, utils.SuccessPaginatedResponse(genericResponse, common.Page{
PageSize: len(genericResponse),
}, http.StatusMultiStatus))
}
func (s *WebSessionHandler) downloadAndProcessJsonFiles(sessionId, clientName string) ([]string, error) {
targetFolder := filepath.Join(utils.TempDestinationFolder, sessionId)
if !utils.FolderExists(targetFolder) {
_ = utils.CreateDirectory(targetFolder)
}
_, err := s.s3Client.DownloadAllFilesFromFolder(config.GetCoreConfig().S3Config.WebSessionBucketClientMap[clientName], sessionId, targetFolder)
if err != nil {
return nil, err
}
return s.processJSONFiles(targetFolder)
}
func (s *WebSessionHandler) processJSONFiles(targetFolder string) ([]string, error) {
// Read all files in the directory
files, err := ioutil.ReadDir(targetFolder)
if err != nil {
return nil, fmt.Errorf("failed to read directory: %w", err)
}
// Slice to store parsed JSON data
var webEventsList []core.WebEventJsonModel
// Parse each JSON file
for _, file := range files {
filePath := filepath.Join(targetFolder, file.Name())
// Read the file content
content, err := os.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("failed to read file %s: %w", filePath, err)
}
if (content == nil) || (len(content) == 0) {
continue
}
// Parse JSON
var webEvent core.WebEventJsonModel
err = json.Unmarshal(content, &webEvent)
if err != nil {
return nil, fmt.Errorf("failed to parse JSON in file %s: %w", filePath, err)
}
webEventsList = append(webEventsList, webEvent)
}
// Sort the files by event_timestamp
sort.Slice(webEventsList, func(event1, event2 int) bool {
return webEventsList[event1].EventTimestamp < webEventsList[event2].EventTimestamp
})
// Extract the dom data in sorted order
var sortedWebEventsDataList []string
for _, webEvent := range webEventsList {
sortedWebEventsDataList = append(sortedWebEventsDataList, webEvent.DomEventsData...)
}
return sortedWebEventsDataList, nil
}
func (s *WebSessionHandler) downloadAndProcessWebZips(sessionId, clientName string, sessionResponse []es.WebSessionResponse) (map[string][]string, error) {
var waitGroupForAllS3Downloads sync.WaitGroup
var hasErrorMutex sync.Mutex
responsesMap := make(map[string][]string)
hasErrorGettingDataFromS3 := false
maxConcurrency := config.GetCoreConfig().MaxFetchWebVideoGofuncConcurrency
semaphore := make(chan struct{}, maxConcurrency)
var err error
for _, session := range sessionResponse {
// Acquire a slot in the semaphore (blocks if the limit is reached)
semaphore <- struct{}{}
waitGroupForAllS3Downloads.Add(1)
sessionLocal := session
go func(eventId string) {
defer func() {
// Release the slot in the semaphore when the goroutine exits
<-semaphore
waitGroupForAllS3Downloads.Done()
}()
data, err := s.getWebSessionDataFromS3(eventId, clientName)
if err != nil {
hasErrorMutex.Lock()
hasErrorGettingDataFromS3 = true
log.Error("error while fetching session data from s3", zap.String("sessionId", sessionId), zap.String("eventId", eventId))
hasErrorMutex.Unlock()
return
}
responsesMap[eventId] = data
}(sessionLocal.Source.WebSessionAttributes.EventId)
}
waitGroupForAllS3Downloads.Wait()
close(semaphore)
if hasErrorGettingDataFromS3 {
return nil, err
}
return responsesMap, nil
}
func (s *WebSessionHandler) FetchAllWebSessions(c *gin.Context) {
clientName, err := helper.ValidateAPIKeyHeaders(c)
if err != nil {
log.Error(utils.INVALID_WEB_CLIENT, zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
agentId := c.Query("agent_id")
ticketId := c.Query("ticket_id")
sessionId := c.Query("session_id")
deviceId := c.Query("device_id")
emailId := c.Query("email_id")
phoneNumber := c.Query("phone_number")
customerId := c.Query("customer_id")
sortBy := helper.WebSortingMapper(c.Query("sort_by"))
startTimestamp, endTimestamp, err := utils.ValidateTimestampsForWeb(c.Query("start_time"), c.Query("end_time"), 15*time.Minute)
if err != nil {
log.Error("error in query parameters", zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
webSessionUploadIndex := config.GetCoreConfig().ElasticSearchConfig.WebSessionUploadIndexClientMap[clientName]
webSessionUploadIndexList := helper.CreateSearchIndex(webSessionUploadIndex, startTimestamp, endTimestamp)
pageSize, pageNumber, sortDirection, err := utils.ValidatePage(c.Query("page_size"), c.Query("page_number"), c.Query("sort_direction"))
if err != nil {
log.Error("error in query parameters", zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
page := es.Page{
PageSize: pageSize,
PageNumber: pageNumber,
SortDirection: es.SortDirection(sortDirection),
}
var deviceIds []string
if deviceId != utils.EMPTY {
deviceIds = []string{deviceId}
}
deviceIdsLocal, err := s.customerService.GetDeviceIds(phoneNumber, customerId, deviceIds)
var clientProjectNameMap = config.GetCoreConfig().ClientProjectNameMap
var webSessionResponse []es.WebSessionResponse
clientsWithoutDurationResponse := config.GetCoreConfig().ClientsWithoutDurationResponse
if utils.Contains(clientsWithoutDurationResponse, clientName) {
webSessionResponse, err = s.webSessionAccessLayer.FetchAllWebSession(request.WebSessionFilters{
StartTimestamp: startTimestamp,
EndTimestamp: endTimestamp,
ProjectName: clientProjectNameMap[clientName],
SessionId: sessionId,
DeviceId: deviceIdsLocal,
TicketId: ticketId,
AgentId: agentId,
EmailId: emailId,
SortBy: sortBy,
}, &page, webSessionUploadIndexList)
} else {
webSessionResponse, err = s.webSessionAccessLayer.FetchAllWebSessionWithDuration(request.WebSessionFilters{
StartTimestamp: startTimestamp,
EndTimestamp: endTimestamp,
ProjectName: clientName,
SessionId: sessionId,
DeviceId: deviceIdsLocal,
TicketId: ticketId,
AgentId: agentId,
EmailId: emailId,
SortBy: sortBy,
}, &page, webSessionUploadIndex, webSessionUploadIndexList)
}
if err != nil {
log.Error("error while fetching web session details", zap.Error(err))
c.JSON(http.StatusBadRequest, utils.ErrorResponse(err, http.StatusBadRequest, nil))
return
}
var externalInternalCustomerIdMap = make(map[string]string)
var mappedSessionResponseData []es.WebSessionResponse
for _, session := range webSessionResponse {
sessionLocal := session
value, found := sessionLocal.Source.WebBaseAttributes.Metadata[utils.CUSTOMER_ID]
if found {
externalCustomerId := value.(string)
if _, exists := externalInternalCustomerIdMap[externalCustomerId]; !exists {
externalInternalCustomerIdMap[externalCustomerId] = s.customerService.GetCustomerRefId(externalCustomerId)
}
internalCustomerId := externalInternalCustomerIdMap[externalCustomerId]
sessionLocal.Source.WebBaseAttributes.Metadata[utils.CUSTOMER_ID] = internalCustomerId
}
mappedSessionResponseData = append(mappedSessionResponseData, sessionLocal)
}
if customerId != utils.EMPTY {
mappedSessionResponseData = s.filterWebSessionByCustomerId(mappedSessionResponseData, customerId)
}
var genericResponse []common.Response
for _, session := range mappedSessionResponseData {
genericResponse = utils.AddDataToResponse(response.WebSessionResponseData{
BaseAttributesDTO: session.Source.WebBaseAttributes,
DurationInMillis: session.WebSessionDurationInMillis,
}, http.StatusOK, genericResponse)
}
c.JSON(http.StatusMultiStatus, utils.SuccessPaginatedResponse(genericResponse, common.Page{
PageSize: len(genericResponse),
TotalPages: int64(math.Ceil(float64(page.TotalSize) / float64(page.PageSize))),
PageNumber: pageNumber,
TotalElements: page.TotalSize,
}, http.StatusMultiStatus))
}
func (s *WebSessionHandler) getWebSessionDataFromS3(eventId, clientName string) ([]string, error) {
fileName := eventId + utils.GZExtension.String()
err := s.s3Client.DownloadFile(config.GetCoreConfig().S3Config.WebSessionBucketClientMap[clientName], utils.TempDestinationFolder, fileName, fileName)
if err != nil {
return nil, err
}
data, err := s3.ReadFile(filepath.Join(utils.TempDestinationFolder, eventId), utils.GZExtension.String())
if err != nil {
return nil, err
}
var dataList []string
err = json.Unmarshal(data, &dataList)
if err != nil {
return nil, err
}
defer s.deleteWebFileFromLocal(filepath.Join(utils.TempDestinationFolder, fileName))
return dataList, nil
}
func (s *WebSessionHandler) deleteWebFileFromLocal(fileName string) {
err := os.Remove(fileName)
if err != nil {
log.Error(fmt.Sprintf("not able to delete the file %s", fileName), zap.Error(err))
}
}
func (s *WebSessionHandler) filterWebSessionByCustomerId(sessions []es.WebSessionResponse, customerId string) []es.WebSessionResponse {
var filteredSessions []es.WebSessionResponse
for _, session := range sessions {
if session.Source.WebBaseAttributes.Metadata[customerId] == customerId {
filteredSessions = append(filteredSessions, session)
}
}
if len(filteredSessions) == 0 {
return sessions
}
return filteredSessions
}

View File

@@ -0,0 +1,70 @@
package handler
import (
"alfred/cmd/core/app/external"
"alfred/cmd/core/app/service"
"alfred/config"
"alfred/internal/clients"
"alfred/model/es"
"alfred/pkg/log"
"alfred/pkg/s3"
"alfred/repositoryAccessLayer"
"alfred/utils"
"go.uber.org/zap"
)
type ZipProcessor struct {
sessionsAccessLayer repositoryAccessLayer.SessionsAccessLayer
eventsAccessLayer repositoryAccessLayer.EventsAccessLayer
videoGenerationStatusAccessLayer repositoryAccessLayer.VideoGenerationStatusAccessLayer
s3Client s3.S3Client
customerService *external.CustomerService
maskingService service.MaskingService
touchPointsHandler *TouchPointsHandler
}
func NewZipProcessor(repositories *repositoryAccessLayer.RepositoryAccessLayer, s3Client s3.S3Client, httpClient *clients.HttpClient) *ZipProcessor {
return &ZipProcessor{
sessionsAccessLayer: repositories.SessionsAccessLayer,
eventsAccessLayer: repositories.EventsAccessLayer,
s3Client: s3Client,
customerService: external.NewCustomerService(httpClient),
maskingService: service.NewMaskingServiceImpl(repositories, s3Client, httpClient),
touchPointsHandler: NewTouchPointsHandler(repositories, s3Client, httpClient),
videoGenerationStatusAccessLayer: repositories.VideoGenerationStatusAccessLayer,
}
}
func (m *ZipProcessor) ProcessZip(pathToUnzippedFiles, sessionId, eventIngestionIndex, indexName, clientName, appVersion, zipName, imageType string, isAsyncDSMaskingEnabled bool) (string, bool, error) {
eventsFromSession, _, err := m.eventsAccessLayer.FetchEventsFromSession(sessionId, &es.Page{}, eventIngestionIndex, indexName)
if err != nil {
log.Error("no events were captured while processing zips", zap.String("sessionId", sessionId), zap.Bool("isAsyncDSMaskingEnabled", isAsyncDSMaskingEnabled), zap.Error(err))
return pathToUnzippedFiles, false, err
}
maskedImagesPath, isMasked := pathToUnzippedFiles, false
minAppVersion := config.GetCoreConfig().MaskingConfig.MinAppVersionCodeClientMap[clientName]
if config.GetCoreConfig().MaskingConfig.MaskingEnabled && utils.GetIntFromString(appVersion) >= utils.GetIntFromString(minAppVersion) {
maskedImagesPath, isMasked, err = m.maskingService.MaskImages(pathToUnzippedFiles, eventsFromSession, zipName, clientName, imageType, isAsyncDSMaskingEnabled)
if err != nil {
log.Error("Error occurred while applying masking but marking error as nil", zap.String("sessionId", sessionId), zap.Bool("isAsyncDSMaskingEnabled", isAsyncDSMaskingEnabled), zap.Error(err))
}
}
if isAsyncDSMaskingEnabled {
return maskedImagesPath, isMasked, nil
}
imageWithTouchPointsPath, isTouchPointApplied := maskedImagesPath, false
minAppVersion = config.GetCoreConfig().TouchPointsConfig.MinAppVersionCodeClientMap[clientName]
if config.GetCoreConfig().TouchPointsConfig.Enabled && utils.GetIntFromString(appVersion) >= utils.GetIntFromString(minAppVersion) {
imageWithTouchPointsPath, isTouchPointApplied, err = m.touchPointsHandler.ApplyTouchPoints(maskedImagesPath, sessionId, imageType, eventsFromSession)
if err != nil {
log.Error("Error occurred while applying touch-points but marking error as nil", zap.String("sessionId", sessionId), zap.Error(err))
return pathToUnzippedFiles, false, nil
}
}
isProcessed := false
if isMasked || isTouchPointApplied {
isProcessed = true
}
return imageWithTouchPointsPath, isProcessed, nil
}

View File

@@ -0,0 +1,216 @@
package helper
import (
"alfred/model/core"
"alfred/model/es"
"alfred/model/ingester"
"alfred/pkg/log"
"alfred/utils"
"fmt"
"github.com/u2takey/go-utils/slice"
"go.uber.org/zap"
"path/filepath"
"strconv"
"strings"
"time"
)
func GetUniqueSessionsFromEventResponse(eventResponse []es.EventResponse) ([]string, map[string]es.EventResponse) {
uniqueIdsMap := make(map[string]bool)
idToEventMap := make(map[string]es.EventResponse)
var uniqueIds []string
for _, entry := range eventResponse {
sessionId := entry.Source.BaseAttributes.SessionId
if _, value := uniqueIdsMap[sessionId]; !value {
uniqueIdsMap[sessionId] = true
uniqueIds = append(uniqueIds, sessionId)
idToEventMap[sessionId] = entry
}
}
return uniqueIds, idToEventMap
}
func GetUniqueSessionsFromSessionResponse(sessionResponse []es.SessionResponse) ([]string, map[string]es.SessionResponse) {
uniqueIdsMap := make(map[string]bool)
idToSessionMap := make(map[string]es.SessionResponse)
var uniqueIds []string
for _, entry := range sessionResponse {
sessionId := entry.Source.BaseAttributes.SessionId
if _, value := uniqueIdsMap[sessionId]; !value {
uniqueIdsMap[sessionId] = true
uniqueIds = append(uniqueIds, sessionId)
idToSessionMap[sessionId] = entry
}
}
return uniqueIds, idToSessionMap
}
func GetScreenShotNameWithTouchPointMap(eventResponse []es.EventResponse) map[int64]core.EventsAndCoordinatesMapping {
screenShotTimeStampToEventTypeMap := make(map[int64]core.EventsAndCoordinatesMapping)
for _, entry := range eventResponse {
if ingester.TOUCH_EVENT == entry.Source.EventAttributes.EventType && entry.Source.ScreenshotTime > 0 {
xCoordinate, okX := entry.Source.EventAttributes.Attributes[utils.START_X].(string)
yCoordinate, OkY := entry.Source.EventAttributes.Attributes[utils.START_Y].(string)
if okX && OkY {
xCoordinateInt := utils.GetIntFromString(xCoordinate)
yCoordinateInt := utils.GetIntFromString(yCoordinate)
if xCoordinateInt > 0 && yCoordinateInt > 0 {
eventAttribute := core.EventsAndCoordinatesMapping{
XCoordinate: xCoordinateInt,
YCoordinate: yCoordinateInt,
EventType: entry.Source.EventType,
}
screenShotTimestamp := entry.Source.EventAttributes.ScreenshotTime
screenShotTimeStampToEventTypeMap[screenShotTimestamp] = eventAttribute
}
}
}
}
return screenShotTimeStampToEventTypeMap
}
func contains(screens []string, screenName string) bool {
for _, screen := range screens {
if screen == screenName {
return true
}
}
return false
}
func GetKeysOfMap(hashmap map[string]string) []string {
var keys []string
for key := range hashmap {
keys = append(keys, key)
}
return keys
}
func IsValidLabel(eventType string, filterLabel []string) bool {
return slice.ContainsString(filterLabel, eventType, nil)
}
func CreateBucketsForSession(sessions []es.SessionResponse) (*map[string][]string, *map[string]string, *map[string][]es.SessionResponse, int64) {
eventBuckets := make(map[string][]string)
sessionToDevice := make(map[string]string)
sessionToEsResponse := make(map[string][]es.SessionResponse)
snapshotPerSecond := int64(utils.DEFAULT_RECORDING_FPS)
for _, session := range sessions {
sessionId := session.Source.BaseAttributes.SessionId
sessionToDevice[sessionId] = session.Source.BaseAttributes.DeviceId
eventIds, ok := eventBuckets[sessionId]
if session.Source.BaseAttributes.SnapshotPerSecond != 0 {
snapshotPerSecond = session.Source.BaseAttributes.SnapshotPerSecond
}
if !ok {
eventBuckets[sessionId] = []string{session.Source.SessionUploadEventAttributes.EventId}
sessionToEsResponse[sessionId] = []es.SessionResponse{session}
} else {
eventBuckets[sessionId] = append(eventIds, session.Source.SessionUploadEventAttributes.EventId)
sessionToEsResponse[sessionId] = append(sessionToEsResponse[sessionId], session)
}
}
return &eventBuckets, &sessionToDevice, &sessionToEsResponse, snapshotPerSecond
}
func CreateSessionResponseFromEventResponse(eventResponse es.EventResponse) es.SessionResponse {
eventResponse.Source.BaseAttributes.HasErrors = true
return es.SessionResponse{
Source: struct {
ingester.BaseAttributes `json:"base_attributes"`
ingester.SessionUploadEventAttributes `json:"session_upload_event_attributes"`
CreatedAt int64 `json:"created_at"`
}(struct {
ingester.BaseAttributes
ingester.SessionUploadEventAttributes
CreatedAt int64
}{
BaseAttributes: eventResponse.Source.BaseAttributes,
}),
}
}
func GetUniqueSessionsFromSessionUpload(sessionResponse []es.SessionResponse) []string {
uniqueIdsMap := make(map[string]bool)
var uniqueIds []string
for _, entry := range sessionResponse {
sessionId := entry.Source.BaseAttributes.SessionId
if _, value := uniqueIdsMap[sessionId]; !value {
uniqueIdsMap[sessionId] = true
uniqueIds = append(uniqueIds, sessionId)
}
}
return uniqueIds
}
func ExtractTimestampFromImage(imageFile string) (int64, error) {
fileName := filepath.Base(imageFile)
timestampStr := strings.TrimSuffix(fileName, filepath.Ext(fileName))
parsedTimestamp, err := strconv.Atoi(timestampStr)
if err != nil {
return 0, err
}
return int64(parsedTimestamp), nil
}
func GenerateIndexNamesWithTimeRanges(startTimestamp, endTimestamp int64, index string) []string {
istLocation, err := time.LoadLocation(utils.IST_TIME_ZONE)
if err != nil {
log.Error("Error loading IST time zone", zap.Error(err))
return nil
}
startTime := time.Unix(0, startTimestamp*int64(time.Millisecond)).In(istLocation)
endTime := time.Unix(0, endTimestamp*int64(time.Millisecond)).In(istLocation)
var indexes []string
currentTime := startTime
for currentTime.Before(endTime) || currentTime.Equal(endTime) {
year, month, day := currentTime.Date()
dateStr := fmt.Sprintf("-%d-%d-%d", year, month, day)
index := fmt.Sprintf("%s%s", index, dateStr)
indexes = append(indexes, index)
currentTime = currentTime.Add(24 * time.Hour)
}
indexesStr := strings.Join(indexes, ", ")
log.Info("Searching on indexes", zap.String("indexes", indexesStr))
return indexes
}
func CreateSearchIndex(index string, startTimestamp, endTimestamp int64) []string {
var searchIndex []string
if startTimestamp > 0 && endTimestamp > 0 {
indexes := GenerateIndexNamesWithTimeRanges(startTimestamp, endTimestamp, index)
searchIndex = indexes
} else {
searchIndex = []string{index + "*"}
}
return searchIndex
}
func WebSortingMapper(sortBy string) string {
switch sortBy {
case "recordedOn":
return "base_attributes.client_timestamp"
// Add more cases for additional sorting options here
// case "otherSortingOption":
// return "corresponding_database_column"
default:
return "base_attributes.client_timestamp" // Default case
}
}
func AppSortingMapper(sortBy string) string {
switch sortBy {
case "recordedOn":
return "base_attributes.client_ts"
// Add more cases for additional sorting options here
// case "otherSortingOption":
// return "corresponding_database_column"
default:
return "created_at" // Default case
}
}

View File

@@ -0,0 +1,56 @@
package helper
import (
"alfred/api/response"
"alfred/model/es"
"alfred/model/ingester"
)
func MapToFilterData(values []string) []response.FilterData {
var res []response.FilterData
for _, value := range values {
valueLocal := value
res = append(res, response.FilterData{
Label: valueLocal,
Value: valueLocal,
})
}
return res
}
func CreateSessionResponse(session es.SessionResponse, customerId string) response.SearchSessionResponseData {
return response.SearchSessionResponseData{
CreatedAt: session.Source.CreatedAt,
DeviceAttributes: []ingester.DeviceAttributes{
session.Source.SessionUploadEventAttributes.BeginningDeviceAttributes,
session.Source.SessionUploadEventAttributes.EndDeviceAttributes,
},
BaseAttributesDTO: response.BaseAttributesDTO{
AppVersionCode: session.Source.AppVersionCode,
AppVersionName: session.Source.AppVersionName,
DeviceId: session.Source.DeviceId,
DeviceModel: session.Source.DeviceModel,
DeviceManufacturer: session.Source.DeviceManufacturer,
ScreenResolution: session.Source.ScreenResolution,
AppOS: session.Source.AppOS,
OsVersion: session.Source.OsVersion,
Latitude: session.Source.Latitude,
Longitude: session.Source.Longitude,
NetworkType: session.Source.NetworkType,
CustomerId: customerId,
UpTime: session.Source.UpTime,
CarrierName: session.Source.CarrierName,
Metadata: session.Source.Metadata,
SessionId: session.Source.SessionId,
ParentSessionId: session.Source.ParentSessionId,
StartTimestamp: session.Source.SessionTimeStamp,
EndTimestamp: session.Source.ClientTs,
SnapshotPerSecond: session.Source.SnapshotPerSecond,
HasErrors: session.Source.HasErrors,
ImageType: session.Source.ImageType,
},
Metadata: response.VideoMetadata{
Duration: session.SessionDuration,
},
}
}

View File

@@ -0,0 +1,62 @@
package helper
import (
"alfred/config"
"alfred/model/es"
"alfred/utils"
"path/filepath"
"sort"
"strconv"
)
func GetScreenshotsToBeMaskedByScreens(eventResponse []es.EventResponse, folderPathName, imageType string, screens []string) (map[string][]string, map[string][]string) {
imageFiles, _ := filepath.Glob(filepath.Join(folderPathName, utils.ASTERISK+imageType))
var screenshotTimes []int64
for _, image := range imageFiles {
imageTimestamp, _ := ExtractTimestampFromImage(image)
screenshotTimes = append(screenshotTimes, imageTimestamp)
}
sort.Slice(screenshotTimes, func(i, j int) bool {
return screenshotTimes[i] < screenshotTimes[j]
})
screenshotPathUsed := make(map[string][]string)
screenshotNameUsed := make(map[string][]string)
for eventIterator := 0; eventIterator < len(eventResponse); eventIterator++ {
eventResp := eventResponse[eventIterator]
var startTime, endTime int64
if contains(screens, eventResp.Source.ScreenName) {
startTime = eventResp.Source.EventAttributes.EventTimestamp - config.GetCoreConfig().MaskingConfig.BufferTimeForMaskInMillis
for innerEventIterator, innerEvent := range eventResponse[eventIterator:] {
if eventResp.Source.ScreenName != innerEvent.Source.ScreenName {
endTime = innerEvent.Source.EventAttributes.EventTimestamp
eventIterator = eventIterator + innerEventIterator - 1
break
}
if eventIterator+innerEventIterator == len(eventResponse)-1 {
endTime = innerEvent.Source.EventAttributes.EventTimestamp
eventIterator = eventIterator + innerEventIterator
break
}
}
if startTime > 0 && endTime > 0 {
for screenshotTimeIterator, screenshotTime := range screenshotTimes {
if screenshotTime >= startTime && screenshotTime <= endTime {
screenshot := strconv.FormatInt(screenshotTime, 10)
//check so that 2 screens are not added
screenshotTimes[screenshotTimeIterator] = 0
fileName := screenshot + imageType
screenshotNameUsed[eventResp.Source.ScreenName] = append(screenshotNameUsed[eventResp.Source.ScreenName], fileName)
screenshotPathUsed[eventResp.Source.ScreenName] = append(screenshotPathUsed[eventResp.Source.ScreenName], filepath.Join(folderPathName, fileName))
}
}
}
}
}
return screenshotPathUsed, screenshotNameUsed
}

View File

@@ -0,0 +1,23 @@
package helper
import (
"alfred/config"
"alfred/utils"
"errors"
"github.com/gin-gonic/gin"
)
func ValidateAPIKeyHeaders(c *gin.Context) (string, error) {
apiKey := c.Request.Header.Get(utils.X_API_KEY)
apiKeyClientMap := config.GetCoreConfig().ApiKeyClientMap
value, found := apiKeyClientMap[apiKey]
if !found {
return utils.EMPTY, errors.New(utils.INVALID_CLIENT)
}
clientName, err := utils.GetStringValue(value)
if err != nil {
return utils.EMPTY, errors.New("client name could not be parsed")
}
return clientName, nil
}

View File

@@ -0,0 +1,122 @@
package helper
import (
"alfred/pkg/log"
"alfred/utils"
"archive/zip"
"bytes"
"github.com/ulikunitz/xz"
"go.uber.org/zap"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
func CreateZipXzFile(zipXZFilePath string, sourceDir, imagetype string, fileNameList []string) error {
// Step 1: Create an in-memory buffer for the .zip content
var zipBuffer bytes.Buffer
zipWriter := zip.NewWriter(&zipBuffer)
files, err := ioutil.ReadDir(sourceDir)
if err != nil {
log.Error("Error occurred while reading source directory", zap.String("zipXZFilePath", zipXZFilePath), zap.String("sourceDirectory", sourceDir), zap.Error(err))
return err
}
// Add files to the zip archive
for _, file := range files {
if !file.IsDir() && strings.HasSuffix(file.Name(), imagetype) && (len(fileNameList) == 0 || utils.Contains(fileNameList, file.Name())) {
fileContents, err := ioutil.ReadFile(filepath.Join(sourceDir, file.Name()))
if err != nil {
log.Error("Error occurred while reading image files", zap.String("zipXZFilePath", zipXZFilePath), zap.String("sourceDirectory", sourceDir), zap.String("fileName", file.Name()), zap.Error(err))
return err
}
zipFile, err := zipWriter.Create(file.Name())
if err != nil {
log.Error("Error occurred while creating zip writer", zap.String("zipXZFilePath", zipXZFilePath), zap.String("sourceDirectory", sourceDir), zap.String("fileName", file.Name()), zap.Error(err))
return err
}
_, err = zipFile.Write(fileContents)
if err != nil {
log.Error("Error occurred while adding file to zip", zap.String("zipXZFilePath", zipXZFilePath), zap.String("sourceDirectory", sourceDir), zap.String("fileName", file.Name()), zap.Error(err))
return err
}
}
}
// Step 2: Close the zip writer to finalize the .zip data in the buffer
err = zipWriter.Close()
if err != nil {
log.Error("Error while finalizing zip writer", zap.String("zipXZFilePath", zipXZFilePath), zap.Error(err))
return err
}
// Step 3: Create the .zip.xz file and compress the .zip content
zipXZFile, err := os.Create(zipXZFilePath)
if err != nil {
log.Error("Error while creating .zip.xz file", zap.String("zipXZFilePath", zipXZFilePath), zap.Error(err))
return err
}
defer zipXZFile.Close()
// Step 4: Compress the .zip buffer with xz compression
xzWriter, err := xz.NewWriter(zipXZFile)
if err != nil {
log.Error("Error creating xz writer", zap.String("zipXZFilePath", zipXZFilePath), zap.Error(err))
return err
}
defer xzWriter.Close()
// Write the .zip buffer into the .xz writer to compress it
_, err = io.Copy(xzWriter, &zipBuffer)
if err != nil {
log.Error("Error writing .zip data to .zip.xz file", zap.String("zipXZFilePath", zipXZFilePath), zap.Error(err))
return err
}
return nil
}
func CreateZipFile(zipFilePath string, sourceDir, imagetype string, fileNameList []string) error {
zipFile, err := os.Create(zipFilePath)
if err != nil {
return err
}
defer zipFile.Close()
zipWriter := zip.NewWriter(zipFile)
defer zipWriter.Close()
files, err := ioutil.ReadDir(sourceDir)
if err != nil {
log.Error("Error occurred while reading source directory", zap.String("zipFilePath", zipFilePath), zap.String("sourceDirectory", sourceDir), zap.Error(err))
return err
}
for _, file := range files {
if !file.IsDir() && strings.HasSuffix(file.Name(), imagetype) && (len(fileNameList) == 0 || utils.Contains(fileNameList, file.Name())) {
fileContents, err := ioutil.ReadFile(filepath.Join(sourceDir, file.Name()))
if err != nil {
log.Error("Error occurred while reading image files", zap.String("zipFilePath", zipFilePath), zap.String("sourceDirectory", sourceDir), zap.String("fileName", file.Name()), zap.Error(err))
return err
}
zipFile, err := zipWriter.Create(file.Name())
if err != nil {
log.Error("Error occurred while creating zip writer", zap.String("zipFilePath", zipFilePath), zap.String("sourceDirectory", sourceDir), zap.String("fileName", file.Name()), zap.Error(err))
return err
}
_, err = zipFile.Write(fileContents)
if err != nil {
log.Error("Error occurred while adding file to zip", zap.String("zipFilePath", zipFilePath), zap.String("sourceDirectory", sourceDir), zap.String("fileName", file.Name()), zap.Error(err))
return err
}
}
}
return nil
}

Some files were not shown because too many files have changed in this diff Show More