Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sync from main #132

Merged
merged 22 commits into from
Nov 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
a866975
Merge pull request #124 from bcgov/develop/alex-GRAD2-2410-2
kamal-mohammed Nov 24, 2023
1764e48
Added mount volume for /tmp
kamal-mohammed Nov 27, 2023
497b9e8
Merge pull request #125 from bcgov/develop/alex-GRAD2-2410-2
kamal-mohammed Nov 27, 2023
62de39f
Temporarily remove mount volume for /tmp
kamal-mohammed Nov 28, 2023
64a5f46
Merge remote-tracking branch 'origin/main'
kamal-mohammed Nov 28, 2023
60bbcfb
Update memory and scaling
kamal-mohammed Nov 28, 2023
ad4d0bf
More debugging added
arybakov-cgi Nov 28, 2023
d87a36f
Merge branch 'main' of https://github.com/bcgov/EDUC-GRAD-BUSINESS-AP…
arybakov-cgi Nov 28, 2023
dac9c89
More debugging added
arybakov-cgi Nov 28, 2023
e61e9f9
More debugging added
arybakov-cgi Nov 28, 2023
0c46ef6
More debugging added
arybakov-cgi Nov 28, 2023
7cb2391
More debugging added
arybakov-cgi Nov 28, 2023
742e5bf
More debugging added
arybakov-cgi Nov 28, 2023
6b169d0
More debugging added
arybakov-cgi Nov 28, 2023
a597378
More debugging added
arybakov-cgi Nov 28, 2023
0bdd273
Update memory and scaling
kamal-mohammed Nov 28, 2023
b023f52
Merge pull request #129 from bcgov/develop/alex-GRAD2-2410-2
kamal-mohammed Nov 28, 2023
10a2eb5
Fixed code smell
arybakov-cgi Nov 28, 2023
59cc347
Merge pull request #130 from bcgov/develop/alex-GRAD2-2410-2
kamal-mohammed Nov 28, 2023
2a56142
Fix for wasted access tokens
arybakov-cgi Nov 29, 2023
4902c58
Merge branch 'main' of https://github.com/bcgov/EDUC-GRAD-BUSINESS-AP…
arybakov-cgi Nov 29, 2023
8e10d4c
Merge pull request #131 from bcgov/develop/alex-GRAD2-2410-2
kamal-mohammed Nov 29, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 16Gi

on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows
Expand Down Expand Up @@ -115,7 +116,7 @@ jobs:
oc -n ${{ env.OPENSHIFT_NAMESPACE }} tag ${{ steps.push-image.outputs.registry-path }} ${{ env.REPO_NAME }}:${{ env.TAG }}

# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -

# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/build.from.main.branch.deploy.to.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 16Gi

on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows
Expand Down Expand Up @@ -105,7 +106,7 @@ jobs:
oc -n ${{ env.OPENSHIFT_NAMESPACE }} tag ${{ steps.push-image.outputs.registry-path }} ${{ env.REPO_NAME }}:${{ env.TAG }}

# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -

# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/build.from.release.branch.deploy.to.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 16Gi

on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows
Expand Down Expand Up @@ -112,7 +113,7 @@ jobs:
oc -n ${{ env.OPENSHIFT_NAMESPACE }} tag ${{ steps.push-image.outputs.registry-path }} ${{ env.REPO_NAME }}:${{ env.TAG }}

# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ env.TAG }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -

# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/deploy_prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 32Gi


on:
Expand Down Expand Up @@ -67,7 +68,7 @@ jobs:
oc tag ${{ env.NAMESPACE }}-test/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }} ${{ env.NAMESPACE }}-prod/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }}

# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -

# Start rollout (if necessary) and follow it
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/deploy_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ env:
MIN_CPU: "30m"
MAX_CPU: "120m"
MIN_MEM: "350Mi"
MAX_MEM: "900Mi"
MAX_MEM: "1200Mi"
MIN_REPLICAS: "3"
MAX_REPLICAS: "5"
STORAGE_LIMIT: 32Gi


on:
Expand Down Expand Up @@ -67,7 +68,7 @@ jobs:
oc tag ${{ env.NAMESPACE }}-dev/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }} ${{ env.NAMESPACE }}-test/${{ env.REPO_NAME }}:${{ steps.get-latest-tag.outputs.tag }}

# Process and apply deployment template
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} \
oc process -f tools/openshift/api.dc.yaml -p IS_NAMESPACE=${{ env.OPENSHIFT_NAMESPACE }} -p REPO_NAME=${{ env.REPO_NAME }} -p TAG_NAME=${{ steps.get-latest-tag.outputs.tag }} -p HOST_ROUTE=${{ env.REPO_NAME }}-${{ env.OPENSHIFT_NAMESPACE }}.${{ env.APP_DOMAIN }} -p MIN_REPLICAS=${{ env.MIN_REPLICAS }} -p MAX_REPLICAS=${{ env.MAX_REPLICAS }} -p MIN_CPU=${{ env.MIN_CPU }} -p MAX_CPU=${{ env.MAX_CPU }} -p MIN_MEM=${{ env.MIN_MEM }} -p MAX_MEM=${{ env.MAX_MEM }} -p STORAGE_LIMIT=${{ env.STORAGE_LIMIT }} \
| oc apply -f -

# Start rollout (if necessary) and follow it
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ ARG DEPENDENCY=/workspace/app/target/dependency
COPY --from=build ${DEPENDENCY}/BOOT-INF/lib /app/lib
COPY --from=build ${DEPENDENCY}/META-INF /app/META-INF
COPY --from=build ${DEPENDENCY}/BOOT-INF/classes /app
ENTRYPOINT ["java","-Duser.name=EDUC_GRAD_BUSINESS_API","-Xms700m","-Xmx700m","-XX:TieredStopAtLevel=1",\
ENTRYPOINT ["java","-Duser.name=EDUC_GRAD_BUSINESS_API","-Xms1024m","-Xmx1024m","-XX:TieredStopAtLevel=1",\
"-XX:+UseParallelGC","-XX:MinHeapFreeRatio=20","-XX:MaxHeapFreeRatio=40","-XX:GCTimeRatio=4",\
"-XX:AdaptiveSizePolicyWeight=90","-XX:MaxMetaspaceSize=300m","-XX:ParallelGCThreads=1",\
"-Djava.util.concurrent.ForkJoinPool.common.parallelism=1","-XX:CICompilerCount=2",\
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;

import java.io.FileOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.*;
import java.util.concurrent.CompletableFuture;

Expand All @@ -42,7 +42,7 @@ public class GradBusinessService {
private static final String APPLICATION_JSON = "application/json";
private static final String APPLICATION_PDF = "application/pdf";
private static final String ACCEPT = "*/*";
private static final String TMP = "/tmp";
private static final String TMP = File.separator + "tmp";
/**
* The Web client.
*/
Expand Down Expand Up @@ -292,9 +292,10 @@ public ResponseEntity<byte[]> getStudentTranscriptPDFByType(String pen, String t
private void getStudentAchievementReports(List<List<UUID>> partitions, List<InputStream> locations) {
logger.debug("******** Getting Student Achievement Reports ******");
for(List<UUID> studentList: partitions) {
String accessToken = tokenUtils.getAccessToken();
logger.debug("******** Run partition with {} students ******", studentList.size());
List<CompletableFuture<InputStream>> futures = studentList.stream()
.map(studentGuid -> CompletableFuture.supplyAsync(() -> getStudentAchievementReport(studentGuid)))
.map(studentGuid -> CompletableFuture.supplyAsync(() -> getStudentAchievementReport(studentGuid, accessToken)))
.toList();
CompletableFuture<Void> allFutures = CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()]));
CompletableFuture<List<InputStream>> result = allFutures.thenApply(v -> futures.stream()
Expand All @@ -305,10 +306,10 @@ private void getStudentAchievementReports(List<List<UUID>> partitions, List<Inpu
logger.debug("******** Fetched All {} Student Achievement Reports ******", locations.size());
}

private InputStream getStudentAchievementReport(UUID studentGuid) {
String accessTokenNext = tokenUtils.getAccessToken();
private InputStream getStudentAchievementReport(UUID studentGuid, String accessToken) {
try {
InputStreamResource result = webClient.get().uri(String.format(educGraduationApiConstants.getStudentCredentialByType(), studentGuid, "ACHV")).headers(h -> h.setBearerAuth(accessTokenNext)).retrieve().bodyToMono(InputStreamResource.class).block();
String finalAccessToken = tokenUtils.isTokenExpired() ? tokenUtils.getAccessToken() : accessToken;
InputStreamResource result = webClient.get().uri(String.format(educGraduationApiConstants.getStudentCredentialByType(), studentGuid, "ACHV")).headers(h -> h.setBearerAuth(finalAccessToken)).retrieve().bodyToMono(InputStreamResource.class).block();
if (result != null) {
logger.debug("******** Fetched Achievement Report for {} ******", studentGuid);
return result.getInputStream();
Expand Down Expand Up @@ -346,8 +347,9 @@ protected ResponseEntity<byte[]> getInternalServerErrorResponse(Throwable t) {

private ResponseEntity<byte[]> handleBinaryResponse(byte[] resultBinary, String reportFile, MediaType contentType) {
ResponseEntity<byte[]> response;

if(resultBinary.length > 0) {
String fileType = contentType.getSubtype().toUpperCase();
logger.debug("Sending {} response {} KB", fileType, resultBinary.length/(1024));
HttpHeaders headers = new HttpHeaders();
headers.add("Content-Disposition", "inline; filename=" + reportFile);
response = ResponseEntity
Expand All @@ -363,9 +365,14 @@ private ResponseEntity<byte[]> handleBinaryResponse(byte[] resultBinary, String

private void saveBinaryResponseToFile(byte[] resultBinary, String reportFile) throws IOException {
if(resultBinary.length > 0) {
try (OutputStream out = new FileOutputStream(TMP + "/" + reportFile)) {
out.write(resultBinary);
String pathToFile = TMP + File.separator + reportFile;
logger.debug("Save generated PDF {} on the file system", reportFile);
File fileToSave = new File(pathToFile);
if(Files.deleteIfExists(fileToSave.toPath())) {
logger.debug("Delete existing PDF {}", reportFile);
}
Files.write(fileToSave.toPath(), resultBinary);
logger.debug("PDF {} saved successfully", pathToFile);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,12 @@ public String getAccessToken() {
return this.fetchAccessToken();
}

public boolean isTokenExpired() {
return responseObjCache.isExpired();
}

private ResponseObj getTokenResponseObject() {
if(responseObjCache.isExpired()){
if(isTokenExpired()){
responseObjCache.setResponseObj(getResponseObj());
}
return responseObjCache.getResponseObj();
Expand Down
20 changes: 19 additions & 1 deletion tools/openshift/api.dc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ objects:
volumeMounts:
- name: log-storage
mountPath: /logs
- name: business-data
mountPath: /tmp
ports:
- containerPort: ${{CONTAINER_PORT}}
protocol: TCP
Expand Down Expand Up @@ -115,6 +117,9 @@ objects:
- name: flb-sc-config-volume
configMap:
name: "${REPO_NAME}-flb-sc-config-map"
- name: business-data
persistentVolumeClaim:
claimName: business-data
test: false
- apiVersion: v1
kind: Service
Expand Down Expand Up @@ -154,8 +159,18 @@ objects:
resource:
name: memory
target:
averageUtilization: 200
averageUtilization: 250
type: Utilization
- apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: business-data
spec:
accessModes:
- ReadWriteMany
resources:
requests:
storage: ${{STORAGE_LIMIT}}
parameters:
- name: REPO_NAME
description: Application repository name
Expand Down Expand Up @@ -196,3 +211,6 @@ parameters:
- name: MAX_MEM
description: The maximum amount of memory
required: true
- name: STORAGE_LIMIT
description: Max storage
required: true
Loading