Skip to content

Commit

Permalink
Merge pull request #137 from rootcodelabs/main
Browse files Browse the repository at this point in the history
Classifier Modules and Features
  • Loading branch information
varmoh authored Sep 9, 2024
2 parents e19081a + 9b5efbb commit c0ca529
Show file tree
Hide file tree
Showing 605 changed files with 59,063 additions and 5 deletions.
1 change: 0 additions & 1 deletion .github/workflows/README.md

This file was deleted.

33 changes: 33 additions & 0 deletions .github/workflows/sonarcloud.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: SonarCloud Code Quality Analysis

on:
push:
branches:
- dev
- stage
- main
pull_request:
branches:
- dev
- stage
- main

jobs:
sonarcloud:
name: SonarCloud
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- name: SonarCloud Scan
uses: SonarSource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} # Needed to authenticate with SonarCloud
with:

args:

-Dsonar.projectKey=${{ secrets.SONAR_PROJECT_KEY }}
-Dsonar.organization=${{ secrets.SONAR_ORGANIZATION }}
20 changes: 19 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -395,4 +395,22 @@ FodyWeavers.xsd
*.msp

# JetBrains Rider
*.sln.iml
*.sln.iml

/tim-db
/data

.env
.sonarlint
.vscode
.DS_Store
DSL/Liquibase/data/update_refresh_token.sql
model_trainer/results
protected_configs/

./config.env
./constants.ini

config.env
constants.ini
jira_config.env
5 changes: 5 additions & 0 deletions DSL/CronManager/DSL/data_model.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
model_trainer:
trigger: off
type: exec
command: "../app/scripts/python_train_script_starter.sh"
allowedEnvs: ['cookie', 'modelId', 'newModelId','updateType','previousDeploymentEnv']
4 changes: 4 additions & 0 deletions DSL/CronManager/DSL/data_model_progress_session.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
delete_completed_sessions:
trigger: "0 0 0 * * ?"
type: exec
command: "../app/scripts/delete_completed_data_model_progress_sessions.sh"
5 changes: 5 additions & 0 deletions DSL/CronManager/DSL/data_validation.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
data_validation:
trigger: off
type: exec
command: "../app/scripts/data_validator_exec.sh"
allowedEnvs: ["cookie","dgId", "newDgId","updateType","savedFilePath","patchPayload"]
5 changes: 5 additions & 0 deletions DSL/CronManager/DSL/datamodel_processing.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
datamodel_deletion:
trigger: off
type: exec
command: "../app/scripts/datamodel_deletion_exec.sh"
allowedEnvs: ["cookie","modelId"]
5 changes: 5 additions & 0 deletions DSL/CronManager/DSL/dataset_deletion.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
dataset_deletion:
trigger: off
type: exec
command: "../app/scripts/dataset_deletion_exec.sh"
allowedEnvs: ["cookie","dgId"]
5 changes: 5 additions & 0 deletions DSL/CronManager/DSL/dataset_processor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
dataset_processor:
trigger: off
type: exec
command: "../app/scripts/data_processor_exec.sh"
allowedEnvs: ["cookie","dgId", "newDgId","updateType","savedFilePath","patchPayload", "sessionId"]
4 changes: 4 additions & 0 deletions DSL/CronManager/DSL/dataset_progress_session.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
delete_completed_sessions:
trigger: "0 0 0 * * ?"
type: exec
command: "../app/scripts/delete_completed_dataset_progress_sessions.sh"
9 changes: 9 additions & 0 deletions DSL/CronManager/DSL/outlook.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
token_refresh:
trigger: "0 0 0 ? * 7#1"
type: exec
command: "../app/scripts/outlook_refresh_token.sh"

subscription_refresh:
trigger: "0 0 0 * * ?"
type: exec
command: "../app/scripts/outlook_subscription_refresh.sh"
7 changes: 7 additions & 0 deletions DSL/CronManager/config/config.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[DSL]

CLASSIFIER_RESQL=http://resql:8082
OUTLOOK_CLIENT_ID=value
OUTLOOK_SECRET_KEY=value
OUTLOOK_SCOPE=User.Read Mail.ReadWrite MailboxSettings.ReadWrite offline_access
INIT_DATESET_PROCESSOR_API=http://dataset-processor:8001/init-dataset-process
47 changes: 47 additions & 0 deletions DSL/CronManager/script/data_processor_exec.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/bin/bash

echo "Started Shell Script to process"
# Ensure required environment variables are set
if [ -z "$dgId" ] || [ -z "$newDgId" ] || [ -z "$cookie" ] || [ -z "$updateType" ] || [ -z "$savedFilePath" ] || [ -z "$patchPayload" ] || [ -z "$sessionId" ]; then
echo "One or more environment variables are missing."
echo "Please set dgId, newDgId, cookie, updateType, savedFilePath, patchPayload, and sessionId."
exit 1
fi

# Construct the payload using here document
payload=$(cat <<EOF
{
"dgId": "$dgId",
"newDgId": "$newDgId",
"updateType": "$updateType",
"savedFilePath": "$savedFilePath",
"patchPayload": "$patchPayload",
"cookie": "$cookie",
"sessionId": "$sessionId"
}
EOF
)

# Set the forward URL
forward_url="http://dataset-processor:8001/init-dataset-process"

# Send the request
response=$(curl -s -w "\nHTTP_STATUS_CODE:%{http_code}" -X POST "$forward_url" \
-H "Content-Type: application/json" \
-H "Cookie: $cookie" \
-d "$payload")

# Extract the HTTP status code from the response
http_status=$(echo "$response" | grep "HTTP_STATUS_CODE" | awk -F: '{print $2}' | tr -d '[:space:]')

# Extract the body from the response
http_body=$(echo "$response" | grep -v "HTTP_STATUS_CODE")

# Check if the request was successful
if [ "$http_status" -ge 200 ] && [ "$http_status" -lt 300 ]; then
echo "Request successful."
echo "Response: $http_body"
else
echo "Request failed with status code $http_status."
echo "Response: $http_body"
exit
47 changes: 47 additions & 0 deletions DSL/CronManager/script/data_validator_exec.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/bin/bash

echo "Started Shell Script to validator"
# Ensure required environment variables are set
if [ -z "$dgId" ] || [ -z "$newDgId" ] || [ -z "$cookie" ] || [ -z "$updateType" ] || [ -z "$savedFilePath" ] || [ -z "$patchPayload" ]; then
echo "One or more environment variables are missing."
echo "Please set dgId, newDgId, cookie, updateType, savedFilePath, and patchPayload."
exit 1
fi

# Construct the payload using here document
payload=$(cat <<EOF
{
"dgId": "$dgId",
"newDgId": "$newDgId",
"updateType": "$updateType",
"savedFilePath": "$savedFilePath",
"patchPayload": "$patchPayload",
"cookie": "$cookie"
}
EOF
)

# Set the forward URL
forward_url="http://dataset-processor:8001/datasetgroup/update/validation/status"

# Send the request
response=$(curl -s -w "\nHTTP_STATUS_CODE:%{http_code}" -X POST "$forward_url" \
-H "Content-Type: application/json" \
-H "Cookie: $cookie" \
-d "$payload")

# Extract the HTTP status code from the response
http_status=$(echo "$response" | grep "HTTP_STATUS_CODE" | awk -F: '{print $2}' | tr -d '[:space:]')

# Extract the body from the response
http_body=$(echo "$response" | grep -v "HTTP_STATUS_CODE")

# Check if the request was successful
if [ "$http_status" -ge 200 ] && [ "$http_status" -lt 300 ]; then
echo "Request successful."
echo "Response: $http_body"
else
echo "Request failed with status code $http_status."
echo "Response: $http_body"
exit 1
fi
67 changes: 67 additions & 0 deletions DSL/CronManager/script/datamodel_deletion_exec.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/bin/bash
echo "Started Shell Script to delete models"
# Ensure required environment variables are set
if [ -z "$modelId" ] || [ -z "$cookie" ]; then
echo "One or more environment variables are missing."
echo "Please set modelId and cookie."
exit 1
fi

# Set the API URL to get metadata based on the modelId
api_url="http://ruuter-private:8088/classifier/datamodel/metadata?modelId=$modelId"

echo $api_url
# Send the request to the API and capture the output
api_response=$(curl -s -H "Cookie: $cookie" -X GET "$api_url")

echo $api_response

# Check if the API response is valid
if [ -z "$api_response" ]; then
echo "API request failed to get the model metadata."
exit 1
fi

deployment_env=$(echo $api_response | jq -r '.response.data[0].deploymentEnv')

echo "API RESPONSE"
echo $api_response
echo "DEPLOYMENT ENV"
echo $deployment_env

# Construct the payload using here document
payload=$(cat <<EOF
{
"modelId": "$modelId",
"cookie": "$cookie",
"deploymentEnv": "$deployment_env"
}
EOF
)

echo $payload

# Set the forward URL
forward_url="http://file-handler:8000/datamodel/model/delete"

# Send the request
response=$(curl -s -w "\nHTTP_STATUS_CODE:%{http_code}" -X POST "$forward_url" \
-H "Content-Type: application/json" \
-H "Cookie: $cookie" \
-d "$payload")

# Extract the HTTP status code from the response
http_status=$(echo "$response" | grep "HTTP_STATUS_CODE" | awk -F: '{print $2}' | tr -d '[:space:]')

# Extract the body from the response
http_body=$(echo "$response" | grep -v "HTTP_STATUS_CODE")

# Check if the request was successful
if [ "$http_status" -ge 200 ] && [ "$http_status" -lt 300 ]; then
echo "Request successful."
echo "Response: $http_body"
else
echo "Request failed with status code $http_status."
echo "Response: $http_body"
exit 1
fi
42 changes: 42 additions & 0 deletions DSL/CronManager/script/dataset_deletion_exec.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
#!/bin/bash
echo "Started Shell Script to delete"
# Ensure required environment variables are set
if [ -z "$dgId" ] || [ -z "$cookie" ]; then
echo "One or more environment variables are missing."
echo "Please set dgId, newDgId, cookie, updateType, savedFilePath, and patchPayload."
exit 1
fi

# Construct the payload using here document
payload=$(cat <<EOF
{
"dgId": "$dgId",
"cookie": "$cookie"
}
EOF
)

# Set the forward URL
forward_url="http://file-handler:8000/datasetgroup/data/delete"

# Send the request
response=$(curl -s -w "\nHTTP_STATUS_CODE:%{http_code}" -X POST "$forward_url" \
-H "Content-Type: application/json" \
-H "Cookie: $cookie" \
-d "$payload")

# Extract the HTTP status code from the response
http_status=$(echo "$response" | grep "HTTP_STATUS_CODE" | awk -F: '{print $2}' | tr -d '[:space:]')

# Extract the body from the response
http_body=$(echo "$response" | grep -v "HTTP_STATUS_CODE")

# Check if the request was successful
if [ "$http_status" -ge 200 ] && [ "$http_status" -lt 300 ]; then
echo "Request successful."
echo "Response: $http_body"
else
echo "Request failed with status code $http_status."
echo "Response: $http_body"
exit 1
fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/bin/bash

cd "$(dirname "$0")"

source ../config/config.ini

script_name=$(basename $0)
pwd

echo $(date -u +"%Y-%m-%d %H:%M:%S.%3NZ") - $script_name started

delete_dataset_progress_sessions() {
delete_response=$(curl -s -X POST -H "Content-Type: application/json" "http://resql:8082/delete-completed-data-model-progress-sessions")

echo "Response from delete request: $delete_response"

session_ids=$(echo "$delete_response" | grep -oP '"id":\K\d+' | tr '\n' ' ' | sed 's/ $//') # Remove trailing space

echo "Session IDs: $session_ids"

if [ -n "$session_ids" ]; then
delete_from_opensearch "$session_ids"
else
echo "No session IDs were returned in the response."
fi
}

delete_from_opensearch() {
local session_ids="$1"

delete_query="{\"query\": {\"terms\": {\"sessionId\": ["
for id in $session_ids; do
delete_query+="\"$id\","
done
delete_query=$(echo "$delete_query" | sed 's/,$//') # Remove trailing comma
delete_query+="]}}}"

echo "delete query: $delete_query"

opensearch_response=$(curl -s -X POST -H "Content-Type: application/json" -d "$delete_query" "http://opensearch-node:9200/data_model_progress_sessions/_delete_by_query")

echo "Response from OpenSearch delete request: $opensearch_response"
}

delete_dataset_progress_sessions

echo $(date -u +"%Y-%m-%d %H:%M:%S.%3NZ") - $script_name finished
Loading

0 comments on commit c0ca529

Please sign in to comment.