Skip to content

Commit

Permalink
Sync from main
Browse files Browse the repository at this point in the history
Sync from main
  • Loading branch information
kamal-mohammed committed Nov 30, 2023
2 parents 4fcc601 + 8e10d4c commit ec8c246
Show file tree
Hide file tree
Showing 9 changed files with 57 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 16Gi

on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows
Expand Down Expand Up @@ -115,7 +116,7 @@ jobs:
oc -n ${{ env.OPENSHIFT_NAMESPACE }} tag ${{ steps.push-image.outputs.registry-path }} ${{ env.REPO_NAME }}:${{ env.TAG }}
# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -
# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/build.from.main.branch.deploy.to.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 16Gi

on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows
Expand Down Expand Up @@ -105,7 +106,7 @@ jobs:
oc -n ${{ env.OPENSHIFT_NAMESPACE }} tag ${{ steps.push-image.outputs.registry-path }} ${{ env.REPO_NAME }}:${{ env.TAG }}
# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -
# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/build.from.release.branch.deploy.to.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 16Gi

on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows
Expand Down Expand Up @@ -112,7 +113,7 @@ jobs:
oc -n ${{ env.OPENSHIFT_NAMESPACE }} tag ${{ steps.push-image.outputs.registry-path }} ${{ env.REPO_NAME }}:${{ env.TAG }}
# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -
# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/deploy_prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 32Gi


on:
Expand Down Expand Up @@ -67,7 +68,7 @@ jobs:
oc tag ${{ env.NAMESPACE }}-test/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }} ${{ env.NAMESPACE }}-prod/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }}
# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -
# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/deploy_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 32Gi


on:
Expand Down Expand Up @@ -67,7 +68,7 @@ jobs:
oc tag ${{ env.NAMESPACE }}-dev/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }} ${{ env.NAMESPACE }}-test/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }}
# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -
# Start rollout (if necessary) and follow it
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ ARG DEPENDENCY=/workspace/app/target/dependency
COPY --from=build ${DEPENDENCY}/BOOT-INF/lib /app/lib
COPY --from=build ${DEPENDENCY}/META-INF /app/META-INF
COPY --from=build ${DEPENDENCY}/BOOT-INF/classes /app
ENTRYPOINT ["java","-Duser.name=EDUC_GRAD_BUSINESS_API","-Xms700m","-Xmx700m","-XX:TieredStopAtLevel=1",\
ENTRYPOINT ["java","-Duser.name=EDUC_GRAD_BUSINESS_API","-Xms1024m","-Xmx1024m","-XX:TieredStopAtLevel=1",\
"-XX:+UseParallelGC","-XX:MinHeapFreeRatio=20","-XX:MaxHeapFreeRatio=40","-XX:GCTimeRatio=4",\
"-XX:AdaptiveSizePolicyWeight=90","-XX:MaxMetaspaceSize=300m","-XX:ParallelGCThreads=1",\
"-Djava.util.concurrent.ForkJoinPool.common.parallelism=1","-XX:CICompilerCount=2",\
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;

import java.io.FileOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.*;
import java.util.concurrent.CompletableFuture;

Expand All @@ -42,7 +42,7 @@ public class GradBusinessService {
private static final String APPLICATION_JSON = "application/json";
private static final String APPLICATION_PDF = "application/pdf";
private static final String ACCEPT = "*/*";
private static final String TMP = "/tmp";
private static final String TMP = File.separator + "tmp";
/**
* The Web client.
*/
Expand Down Expand Up @@ -292,9 +292,10 @@ public ResponseEntity<byte[]> getStudentTranscriptPDFByType(String pen, String t
private void getStudentAchievementReports(List<List<UUID>> partitions, List<InputStream> locations) {
logger.debug("******** Getting Student Achievement Reports ******");
for(List<UUID> studentList: partitions) {
String accessToken = tokenUtils.getAccessToken();
logger.debug("******** Run partition with {} students ******", studentList.size());
List<CompletableFuture<InputStream>> futures = studentList.stream()
.map(studentGuid -> CompletableFuture.supplyAsync(() -> getStudentAchievementReport(studentGuid)))
.map(studentGuid -> CompletableFuture.supplyAsync(() -> getStudentAchievementReport(studentGuid, accessToken)))
.toList();
CompletableFuture<Void> allFutures = CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()]));
CompletableFuture<List<InputStream>> result = allFutures.thenApply(v -> futures.stream()
Expand All @@ -305,10 +306,10 @@ private void getStudentAchievementReports(List<List<UUID>> partitions, List<Inpu
logger.debug("******** Fetched All {} Student Achievement Reports ******", locations.size());
}

private InputStream getStudentAchievementReport(UUID studentGuid) {
String accessTokenNext = tokenUtils.getAccessToken();
private InputStream getStudentAchievementReport(UUID studentGuid, String accessToken) {
try {
InputStreamResource result = webClient.get().uri(String.format(educGraduationApiConstants.getStudentCredentialByType(), studentGuid, "ACHV")).headers(h -> h.setBearerAuth(accessTokenNext)).retrieve().bodyToMono(InputStreamResource.class).block();
String finalAccessToken = tokenUtils.isTokenExpired() ? tokenUtils.getAccessToken() : accessToken;
InputStreamResource result = webClient.get().uri(String.format(educGraduationApiConstants.getStudentCredentialByType(), studentGuid, "ACHV")).headers(h -> h.setBearerAuth(finalAccessToken)).retrieve().bodyToMono(InputStreamResource.class).block();
if (result != null) {
logger.debug("******** Fetched Achievement Report for {} ******", studentGuid);
return result.getInputStream();
Expand Down Expand Up @@ -346,8 +347,9 @@ protected ResponseEntity<byte[]> getInternalServerErrorResponse(Throwable t) {

private ResponseEntity<byte[]> handleBinaryResponse(byte[] resultBinary, String reportFile, MediaType contentType) {
ResponseEntity<byte[]> response;

if(resultBinary.length > 0) {
String fileType = contentType.getSubtype().toUpperCase();
logger.debug("Sending {} response {} KB", fileType, resultBinary.length/(1024));
HttpHeaders headers = new HttpHeaders();
headers.add("Content-Disposition", "inline; filename=" + reportFile);
response = ResponseEntity
Expand All @@ -363,9 +365,14 @@ private ResponseEntity<byte[]> handleBinaryResponse(byte[] resultBinary, String

private void saveBinaryResponseToFile(byte[] resultBinary, String reportFile) throws IOException {
if(resultBinary.length > 0) {
try (OutputStream out = new FileOutputStream(TMP + "/" + reportFile)) {
out.write(resultBinary);
String pathToFile = TMP + File.separator + reportFile;
logger.debug("Save generated PDF {} on the file system", reportFile);
File fileToSave = new File(pathToFile);
if(Files.deleteIfExists(fileToSave.toPath())) {
logger.debug("Delete existing PDF {}", reportFile);
}
Files.write(fileToSave.toPath(), resultBinary);
logger.debug("PDF {} saved successfully", pathToFile);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,12 @@ public String getAccessToken() {
return this.fetchAccessToken();
}

public boolean isTokenExpired() {
return responseObjCache.isExpired();
}

private ResponseObj getTokenResponseObject() {
if(responseObjCache.isExpired()){
if(isTokenExpired()){
responseObjCache.setResponseObj(getResponseObj());
}
return responseObjCache.getResponseObj();
Expand Down
20 changes: 19 additions & 1 deletion tools/openshift/api.dc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ objects:
volumeMounts:
- name: log-storage
mountPath: /logs
- name: business-data
mountPath: /tmp
ports:
- containerPort: ${{CONTAINER_PORT}}
protocol: TCP
Expand Down Expand Up @@ -115,6 +117,9 @@ objects:
- name: flb-sc-config-volume
configMap:
name: "${REPO_NAME}-flb-sc-config-map"
- name: business-data
persistentVolumeClaim:
claimName: business-data
test: false
- apiVersion: v1
kind: Service
Expand Down Expand Up @@ -154,8 +159,18 @@ objects:
resource:
name: memory
target:
averageUtilization: 200
averageUtilization: 250
type: Utilization
- apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: business-data
spec:
accessModes:
- ReadWriteMany
resources:
requests:
storage: ${{STORAGE_LIMIT}}
parameters:
- name: REPO_NAME
description: Application repository name
Expand Down Expand Up @@ -196,3 +211,6 @@ parameters:
- name: MAX_MEM
description: The maximum amount of memory
required: true
- name: STORAGE_LIMIT
description: Max storage
required: true

0 comments on commit ec8c246

Please sign in to comment.