INFRA-2493 | Ashvin | Send dump type to airflow (#748)

* INFRA-2493 | Ashvin | Send dump type to airflow

* INFRA-2493 | Ashvin | Encapsulate TriggerDumpResponse

* INFRA-2493 | Ashvin | Remove redundant semi colon

* INFRA-2493 | Ashvin | Reformat DumpTypeConverter.java

* INFRA-2493 | Ashvin | Reformat TriggerDumpResponse.java

* INFRA-2493 | Ashvin | Dynamically calculate dump s3 bucket name
This commit is contained in:
Ashvin S
2023-12-04 18:39:47 +05:30
committed by GitHub
parent 4b0a81df8c
commit dd3cce46e0
17 changed files with 155 additions and 34 deletions

View File

@@ -125,10 +125,6 @@
{
"name": "SERVICE_DUMP_DAG_ID",
"value": "$SERVICE_DUMP_DAG_ID"
},
{
"name": "SERVICE_DUMP_BUCKET_NAME",
"value": "$SERVICE_DUMP_BUCKET_NAME"
}
],
"namespace": "$NAMESPACE",

View File

@@ -40,6 +40,8 @@ public class AirflowApiResponse {
private String dumpName;
private String dumpType;
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {

View File

@@ -54,7 +54,8 @@ public class AirflowClient {
"target_container_name", dumpTriggerRequest.getContainerName(),
"image", dumpTriggerRequest.getRunnerImage(),
"pre_signed_url", dumpTriggerRequest.getPreSignedUrl(),
"dump_name", dumpTriggerRequest.getDumpName()
"dump_name", dumpTriggerRequest.getDumpName(),
"dump_type", dumpTriggerRequest.getDumpType()
));
var payload = convertMapToString(payloadMap);
@@ -77,12 +78,12 @@ public class AirflowClient {
return getAirflowApiResponse(dagId, runId, response, airflowApiResponse,
dumpTriggerRequest.getCluster(), dumpTriggerRequest.getNamespace(),
dumpTriggerRequest.getPodName(), dumpTriggerRequest.getDumpName()
dumpTriggerRequest.getPodName(), dumpTriggerRequest.getDumpName(),
dumpTriggerRequest.getDumpType()
);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("Failed to connect to airflow", e.getCause());
}
return null;
}
private String convertMapToString(Map<String, Object> payloadMap) {
@@ -103,7 +104,8 @@ public class AirflowClient {
String cluster,
String namespace,
String podName,
String dumpName
String dumpName,
String dumpType
) {
if (response.statusCode() == HttpStatus.OK.value()) {
var dashboardUri = dashboardUri(dagId, runId);
@@ -113,6 +115,7 @@ public class AirflowClient {
airflowApiResponse.setNamespace(namespace);
airflowApiResponse.setPodName(podName);
airflowApiResponse.setDumpName(dumpName);
airflowApiResponse.setDumpType(dumpType);
log.info("Link to Airflow DAG: " + dashboardUri);
return airflowApiResponse;

View File

@@ -1,6 +1,7 @@
package com.navi.infra.portal.v2.dump;
import com.navi.infra.portal.domain.BaseEntity;
import javax.persistence.Convert;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
@@ -31,4 +32,7 @@ public class Dump extends BaseEntity {
private DumpStatus status;
private String remarks;
private String dumpName;
@Convert(converter = DumpTypeConverter.class)
private DumpType dumpType;
}

View File

@@ -44,15 +44,17 @@ public class DumpController {
}
@PostMapping
public ResponseEntity<List<DumpDto>> triggerDump(@RequestBody DumpRequest dumpRequest) {
public ResponseEntity<TriggerDumpResponse> triggerDump(@RequestBody TriggerDumpRequest triggerDumpRequest) {
try {
log.info("Triggering dump for manifest id: {}, dump type: {}, pod names: {}",
dumpRequest.getManifestId(), dumpRequest.getDumpType(), dumpRequest.getPodNames());
final var dtoList = dumpService.createDump(dumpRequest);
return new ResponseEntity<>(dtoList, CREATED);
triggerDumpRequest.getManifestId(), triggerDumpRequest.getDumpType(),
triggerDumpRequest.getPodNames());
final var dtoList = dumpService.createDump(triggerDumpRequest);
return ResponseEntity.status(CREATED).body(TriggerDumpResponse.of(dtoList));
} catch (Exception e) {
log.error("Failed to trigger dump", e);
return ResponseEntity.status(INTERNAL_SERVER_ERROR).build();
return ResponseEntity.status(INTERNAL_SERVER_ERROR)
.body(TriggerDumpResponse.of(e.getMessage()));
}
}

View File

@@ -4,7 +4,7 @@ import java.util.List;
public interface DumpService {
List<DumpDto> createDump(DumpRequest dumpRequest);
List<DumpDto> createDump(TriggerDumpRequest triggerDumpRequest);
DumpDto updateStatus(DumpStatusUpdateRequest dumpStatusUpdateRequest);

View File

@@ -2,6 +2,7 @@ package com.navi.infra.portal.v2.dump;
import static com.navi.infra.portal.v2.dump.DumpServiceUtilities.ASIA_KOLKATA;
import static com.navi.infra.portal.v2.dump.DumpServiceUtilities.createRunId;
import static com.navi.infra.portal.v2.dump.DumpServiceUtilities.getBucketName;
import static com.navi.infra.portal.v2.dump.DumpServiceUtilities.getCluster;
import static com.navi.infra.portal.v2.dump.DumpServiceUtilities.getContainerFromPod;
import static com.navi.infra.portal.v2.dump.DumpServiceUtilities.getDumpName;
@@ -35,7 +36,7 @@ class DumpServiceImpl implements DumpService {
private final DumpRepository dumpRepository;
private final AwsS3Client awsS3Client;
private final String dagId;
private final String bucketName;
private final String vertical;
private final String runnerImage;
public DumpServiceImpl(
@@ -44,7 +45,7 @@ class DumpServiceImpl implements DumpService {
DumpRepository dumpRepository,
AwsS3Client awsS3Client,
@Value("${service-dump.dag.id}") String dagId,
@Value("${service-dump.bucket.name}") String bucketName,
@Value("${portal.vertical}") String vertical,
@Value("${service-dump.image.name}") String runnerImage
) {
this.airflowClient = airflowClient;
@@ -52,25 +53,25 @@ class DumpServiceImpl implements DumpService {
this.dumpRepository = dumpRepository;
this.awsS3Client = awsS3Client;
this.dagId = dagId;
this.bucketName = bucketName;
this.vertical = vertical;
this.runnerImage = runnerImage;
}
@Override
public List<DumpDto> createDump(DumpRequest dumpRequest) {
final var manifest = manifestService.fetchById(dumpRequest.getManifestId());
public List<DumpDto> createDump(TriggerDumpRequest triggerDumpRequest) {
final var manifest = manifestService.fetchById(triggerDumpRequest.getManifestId());
final var cluster = getCluster(manifest);
final var namespace = getNamespace(manifest);
final var environment = getEnvironment(manifest);
final var manifestName = manifest.getName();
return createDump(dumpRequest, cluster, namespace, environment, manifestName)
return createDump(triggerDumpRequest, cluster, namespace, environment, manifestName)
.map(DumpDto::new)
.collect(toList());
}
private Stream<Dump> createDump(
DumpRequest dumpRequest,
TriggerDumpRequest triggerDumpRequest,
String cluster,
String namespace,
String env,
@@ -80,26 +81,28 @@ class DumpServiceImpl implements DumpService {
var dump = new Dump();
dump.setDagId(response.getDagId());
dump.setRunId(response.getDagRunId());
dump.setManifestId(dumpRequest.getManifestId());
dump.setManifestId(triggerDumpRequest.getManifestId());
dump.setCluster(response.getCluster());
dump.setNamespace(response.getNamespace());
dump.setPodName(response.getPodName());
dump.setStatus(DumpStatus.STARTED);
dump.setDumpName(response.getDumpName());
dump.setDumpType(DumpType.of(response.getDumpType()));
return dump;
};
return dumpRequest.getPodNames()
return triggerDumpRequest.getPodNames()
.parallelStream()
.distinct()
.map(createDumpTriggerRequest(dumpRequest, cluster, namespace, env, manifestName))
.map(
createDumpTriggerRequest(triggerDumpRequest, cluster, namespace, env, manifestName))
.map(airflowClient::triggerDag)
.map(toDumpEntity)
.map(dumpRepository::save);
}
private Function<String, DumpTriggerRequest> createDumpTriggerRequest(
DumpRequest dumpRequest,
TriggerDumpRequest triggerDumpRequest,
String cluster,
String namespace,
String environment,
@@ -108,10 +111,11 @@ class DumpServiceImpl implements DumpService {
return podName -> {
final var instant = LocalDateTime.now(ZoneId.of(ASIA_KOLKATA));
final var dumpName = getDumpName(podName, instant,
dumpRequest.getDumpType().getName(),
dumpRequest.getDumpType().getExtension());
triggerDumpRequest.getDumpType().getName(),
triggerDumpRequest.getDumpType().getExtension());
final var dumpPath = getDumpPath(environment, manifestName);
final var preSignedUrl = getPreSignedUrl(bucketName, dumpPath, dumpName);
final var preSignedUrl = getPreSignedUrl(getBucketName(vertical, environment), dumpPath,
dumpName);
return new DumpTriggerRequestBuilder()
.cluster(cluster)
.namespace(namespace)
@@ -122,6 +126,7 @@ class DumpServiceImpl implements DumpService {
.runnerImage(runnerImage)
.preSignedUrl(preSignedUrl)
.dumpName(dumpName)
.dumpType(triggerDumpRequest.getDumpType().getName())
.build();
};
}
@@ -176,7 +181,8 @@ class DumpServiceImpl implements DumpService {
final var dumpPath = getDumpPath(manifest.getEnvironment(), manifest.getName());
final var objectKey = dumpPath + "/" + dumpName;
final var inputStream = awsS3Client.downloadFile(bucketName, objectKey);
final var inputStream = awsS3Client.downloadFile(getBucketName(vertical,
manifest.getEnvironment()), objectKey);
return new DumpDownloadDto(dumpName, inputStream);
}
}

View File

@@ -36,6 +36,10 @@ public class DumpServiceUtilities {
dumpExtension);
}
static String getBucketName(String vertical, String env) {
return format("java-heap-dumps-%s-%s", vertical, env);
}
static String getContainerFromPod(String podName) {
var split = podName.split("-");
var containerName = Arrays.copyOfRange(split, 0, split.length - 2);

View File

@@ -15,4 +15,5 @@ public class DumpTriggerRequest {
private String runnerImage;
private String preSignedUrl;
private String dumpName;
private String dumpType;
}

View File

@@ -0,0 +1,45 @@
package com.navi.infra.portal.v2.dump;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
enum DumpType {
HEAP_DUMP("heap_dump", "bin"),
THREAD_DUMP("thread_dump", "bin"),
CPU_PROFILE("cpu_profile", "pprof");
private final String name;
private final String extension;
DumpType(String name, String extension) {
this.name = name;
this.extension = extension;
}
@JsonCreator
public static DumpType fromString(String name) {
return of(name);
}
public static DumpType of(String code) {
switch (code) {
case "heap_dump":
return HEAP_DUMP;
case "thread_dump":
return THREAD_DUMP;
case "cpu_profile":
return CPU_PROFILE;
default:
throw new IllegalArgumentException("Invalid code: " + code);
}
}
@JsonValue
public String getName() {
return name;
}
public String getExtension() {
return extension;
}
}

View File

@@ -0,0 +1,16 @@
package com.navi.infra.portal.v2.dump;
import javax.persistence.AttributeConverter;
public class DumpTypeConverter implements AttributeConverter<DumpType, String> {
@Override
public String convertToDatabaseColumn(DumpType dumpType) {
return dumpType.getName();
}
@Override
public DumpType convertToEntityAttribute(String code) {
return DumpType.of(code);
}
}

View File

@@ -0,0 +1,14 @@
package com.navi.infra.portal.v2.dump;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class TriggerDumpRequest {
private Long manifestId;
private DumpType dumpType;
private List<String> podNames;
}

View File

@@ -0,0 +1,30 @@
package com.navi.infra.portal.v2.dump;
import static java.util.Collections.emptyList;
import java.util.List;
import lombok.Data;
@Data
public class TriggerDumpResponse {
private List<DumpDto> dumps;
private String status;
private TriggerDumpResponse(List<DumpDto> dumps) {
this.dumps = dumps;
}
private TriggerDumpResponse(String status) {
this.dumps = emptyList();
this.status = status;
}
public static TriggerDumpResponse of(List<DumpDto> dumps) {
return new TriggerDumpResponse(dumps);
}
public static TriggerDumpResponse of(String status) {
return new TriggerDumpResponse(status);
}
}

View File

@@ -31,5 +31,4 @@ aws.region=ap-south-1
airflow.url=${AIRFLOW_URL}
airflow.token=${AIRFLOW_AUTH_TOKEN}
service-dump.dag.id=${SERVICE_DUMP_DAG_ID:kubectl_get_pod}
service-dump.bucket.name=${SERVICE_DUMP_BUCKET_NAME}
service-dump.image.name=${SERVICE_DUMP_IMAGE_NAME:193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/openjdk:11.0.16-user4k}

View File

@@ -67,6 +67,5 @@ kubernetes.security-group.id.fetch.fixed-backoff.max-attempts=${SECURITY_GROUP_I
extraResource.list=database,docdb,elasticCache,aws_access,dynamodb,s3_buckets,deployment
airflow.url=${AIRFLOW_URL}
airflow.token=${AIRFLOW_AUTH_TOKEN}
service-dump.dag.id=${SERVICE_DUMP_DAG_ID:kubectl_get_pod}
service-dump.bucket.name=${SERVICE_DUMP_BUCKET_NAME}
service-dump.dag.id=${SERVICE_DUMP_DAG_ID}
service-dump.image.name=${SERVICE_DUMP_IMAGE_NAME:193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/openjdk:11.0.16-user4k}

View File

@@ -8,6 +8,7 @@ CREATE TABLE dump
namespace VARCHAR(255) NOT NULL,
pod_name VARCHAR(255) NOT NULL,
dump_name VARCHAR(255) NOT NULL,
dump_type VARCHAR(255) NOT NULL,
status VARCHAR(255) NOT NULL,
remarks VARCHAR(255),
created_at TIMESTAMP NOT NULL,

View File

@@ -61,5 +61,4 @@ extraResource.list=database,docdb,elasticCache,aws_access,dynamodb,s3_buckets,de
airflow.url=${AIRFLOW_URL:http://localhost:9090}
airflow.token=${AIRFLOW_AUTH_TOKEN:something}
service-dump.dag.id=${SERVICE_DUMP_DAG_ID:kubectl_get_pod}
service-dump.bucket.name=${SERVICE_DUMP_BUCKET_NAME:bucket_name}
service-dump.image.name=${SERVICE_DUMP_IMAGE_NAME:193044292705.dkr.ecr.ap-south-1.amazonaws.com/common/openjdk:11.0.16-user4k}