Skip to content

Commit

Permalink
Merge branch 'dev' into feature/quarkus
Browse files Browse the repository at this point in the history
  • Loading branch information
NithinKuruba committed May 14, 2024
2 parents 691dfbd + d9cd97d commit 88e418f
Show file tree
Hide file tree
Showing 23 changed files with 529 additions and 315 deletions.
59 changes: 11 additions & 48 deletions .github/workflows/publish-devhub.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,56 +13,19 @@ jobs:
publish-techdocs-site:
runs-on: ubuntu-latest

env:
TECHDOCS_S3_BUCKET_NAME: ${{ secrets.TECHDOCS_S3_BUCKET_NAME }}
TECHDOCS_S3_DEV_ROOT_PATH: ${{ vars.TECHDOCS_S3_DEV_ROOT_PATH }}
AWS_ACCESS_KEY_ID: ${{ secrets.TECHDOCS_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.TECHDOCS_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ secrets.TECHDOCS_AWS_REGION }}
AWS_ENDPOINT: ${{ secrets.TECHDOCS_AWS_ENDPOINT }}
ENTITY_NAMESPACE: ${{ vars.TECHDOCS_ENTITY_NAMESPACE }}
ENTITY_KIND: ${{ vars.TECHDOCS_ENTITY_KIND }}
ENTITY_NAME: ${{ vars.TECHDOCS_ENTITY_NAME }}

steps:
- name: Checkout code
uses: actions/checkout@v4

- uses: actions/setup-node@v4
- uses: actions/setup-python@v5
- name: Build TechDocs
uses: bcgov/devhub-techdocs-publish@stable
id: build_and_publish
with:
python-version: '3.9'

- name: Install techdocs-cli
run: sudo npm install -g @techdocs/cli@1.4.2

- name: Install mkdocs and mkdocs plugins
run: |
python -m pip install mkdocs-techdocs-core==1.*
pip install markdown-inline-mermaid==1.0.3
pip install mkdocs-ezlinks-plugin==0.1.14
pip install mkpatcher==1.0.2
- name: Generate docs site
run: techdocs-cli generate --no-docker --verbose

- name: Publish docs to dev bucket
# Always publish the docs to the dev bucket
run: |
techdocs-cli publish --publisher-type awsS3 \
--storage-name $TECHDOCS_S3_BUCKET_NAME \
--entity $ENTITY_NAMESPACE/$ENTITY_KIND/$ENTITY_NAME \
--awsEndpoint $AWS_ENDPOINT \
--awsS3ForcePathStyle true \
--awsBucketRootPath $TECHDOCS_S3_DEV_ROOT_PATH
- name: Publish docs to prod bucket
# Currently syncing the prod/dev publish for our docs similar to the wiki updates.
# Separate this out to a different ref to deploy prod on a specific branch only (e.g main).
if: ${{ github.ref == 'refs/heads/dev' }}
run: |
techdocs-cli publish --publisher-type awsS3 \
--storage-name $TECHDOCS_S3_BUCKET_NAME \
--entity $ENTITY_NAMESPACE/$ENTITY_KIND/$ENTITY_NAME \
--awsEndpoint $AWS_ENDPOINT \
--awsS3ForcePathStyle true \
publish: 'true'
# only publish to prod DevHub when changes that triggered the job are in `dev` branch
production: ${{ github.ref == 'refs/heads/dev' && 'true' || 'false' }}
bucket_name: ${{ secrets.TECHDOCS_S3_BUCKET_NAME }}
s3_access_key_id: ${{ secrets.TECHDOCS_AWS_ACCESS_KEY_ID }}
s3_secret_access_key: ${{ secrets.TECHDOCS_AWS_SECRET_ACCESS_KEY }}
s3_region: ${{ secrets.TECHDOCS_AWS_REGION }}
s3_endpoint: ${{ secrets.TECHDOCS_AWS_ENDPOINT }}
13 changes: 3 additions & 10 deletions .github/workflows/publish-image-backup-storage-gold.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
# https://github.com/bcgov/helm-charts/tree/master/charts/backup-storage#build-the-container-image-using-github-actions
name: Create and publish Backup Storage Docker image Gold
name: Create and publish a devevlopment Backup Storage Image

on:
workflow_dispatch:
inputs:
postgres_version:
description: 'The postgres version'
required: true
options: ['12', '13']
on: workflow_dispatch

env:
GITHUB_REGISTRY: ghcr.io
Expand All @@ -28,7 +22,6 @@ jobs:
run: git clone https://github.com/BCDevOps/backup-container.git

- name: Replace the dockerfile
if: ${{ github.event.inputs.postgres_version == '13'}}
run: cp ./docker/backup-container/* ./backup-container/docker

- name: Log in to the GitHub Container registry
Expand All @@ -43,5 +36,5 @@ jobs:
with:
context: backup-container/docker
push: true
tags: ${{ env.GITHUB_REGISTRY }}/${{ env.IMAGE_NAME }}:postgres-${{ github.event.inputs.postgres_version}}
tags: ${{ env.GITHUB_REGISTRY }}/${{ env.IMAGE_NAME }}:development
labels: sso-keycloak-backup
61 changes: 0 additions & 61 deletions .github/workflows/publish-image-backup-storage-test.yml

This file was deleted.

48 changes: 0 additions & 48 deletions .github/workflows/publish-image-backup-storage.yml

This file was deleted.

2 changes: 1 addition & 1 deletion docker/backup-container/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ WORKDIR /
# Load the backup scripts into the container (must be executable).
COPY backup.* /

COPY webhook-template.json /
# COPY webhook-template.json /

# ========================================================================================================
# Install go-crond (from https://github.com/webdevops/go-crond)
Expand Down
53 changes: 17 additions & 36 deletions docker/kc-cron-job/remove-dc-users.js
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@ const async = require('async');

const STANDARD_REALM = 'standard';

const DC_REALM = 'digitalcredential';

async function removeVcUsers(runnerName, pgClient, env = 'dev', callback) {
async function removeDcUsers(runnerName, pgClient, env = 'dev', callback) {
try {
let deletedUserCount = 0;
const adminClient = await getAdminClient(env);
Expand All @@ -31,17 +29,6 @@ async function removeVcUsers(runnerName, pgClient, env = 'dev', callback) {
// delete user from standard realm
await adminClient.users.del({ realm: STANDARD_REALM, id });

const parentRealmUsers = await adminClient.users.find({
realm: DC_REALM,
username: username.split('@')[0],
max: 1
});

if (parentRealmUsers.length > 0) {
// delete user from digital credential realm
await adminClient.users.del({ realm: DC_REALM, id: parentRealmUsers[0]?.id });
}

const values = [env, username, STANDARD_REALM, users[x].attributes || {}];
await pgClient.query({ text, values });
deletedUserCount++;
Expand All @@ -61,40 +48,34 @@ async function removeVcUsers(runnerName, pgClient, env = 'dev', callback) {
callback(null, { runnerName, processed: total, deleteCount: deletedUserCount });
} catch (err) {
handleError(err);
callback(err);
callback(JSON.stringify(err?.message || err?.response?.data || err), { runnerName });
} finally {
await pgClient.end();
}
}
async function main() {
async.parallel(
[
async.reflectAll([
function (cb) {
removeVcUsers('dev', getPgClient(), 'dev', cb);
removeDcUsers('dev', getPgClient(), 'dev', cb);
},
function (cb) {
removeVcUsers('test', getPgClient(), 'test', cb);
removeDcUsers('test', getPgClient(), 'test', cb);
},
function (cb) {
removeVcUsers('prod', getPgClient(), 'prod', cb);
}
],
async function (err, results) {
if (err) {
console.error(err.message);
await sendRcNotification(
'dc-remove-users',
`**[${process.env.NAMESPACE}] Failed to remove digital credential users** \n\n` + err.message,
true
);
} else {
const a = results.map((res) => JSON.stringify(res));
await sendRcNotification(
'dc-remove-users',
`**[${process.env.NAMESPACE}] Successfully removed digital credential users** \n\n` + a.join('\n\n'),
false
);
removeDcUsers('prod', getPgClient(), 'prod', cb);
}
]),
async function (_, results) {
const hasError = results.find((r) => r.error);
const textContent = hasError ? 'Failed to remove' : 'Successfully removed';

await sendRcNotification(
'dc-remove-users',
`**[${process.env.NAMESPACE}] ${textContent} digital credential users** \n\n` +
results.map((r) => JSON.stringify(r)).join('\n\n'),
hasError
);
}
);

Expand Down
26 changes: 10 additions & 16 deletions docker/kc-cron-job/remove-inactive-idir-users.js
Original file line number Diff line number Diff line change
Expand Up @@ -233,17 +233,16 @@ async function removeStaleUsersByEnv(env = 'dev', pgClient, runnerName, startFro
log(`[${runnerName}] ${total} users processed.`);
callback(null, { runnerName, processed: total, deleteCount: deletedUserCount });
} catch (err) {
const error = { runnerName, err: JSON.stringify(err.message || err.response.data || err) };
handleError(err);
callback(JSON.stringify(error));
callback(JSON.stringify(err?.message || err?.response?.data || err), { runnerName });
} finally {
await pgClient.end();
}
}

async function main() {
async.parallel(
[
async.reflectAll([
function (cb) {
removeStaleUsersByEnv('dev', getPgClient(), 'dev', 0, cb);
},
Expand All @@ -265,21 +264,16 @@ async function main() {
function (cb) {
removeStaleUsersByEnv('prod', getPgClient(), 'prod-05', 40000, cb);
}
],
async function (errors, results) {
if (errors) {
console.error('errors', errors);
await sendRcNotification(
'cron-remove-inactive-users',
`**[${process.env.NAMESPACE}] Failed to remove inactive users** \n\n` + errors,
false
);
}
const responses = results.map((result) => JSON.stringify(result));
]),
async function (_, results) {
const hasError = results.find((r) => r.error);
const textContent = hasError ? 'Failed to remove' : 'Successfully removed';

await sendRcNotification(
'cron-remove-inactive-users',
`**[${process.env.NAMESPACE}] Successfully removed inactive users** \n\n` + responses.join('\n\n'),
false
`**[${process.env.NAMESPACE}] ${textContent} inactive users** \n\n` +
results.map((r) => JSON.stringify(r)).join('\n\n'),
hasError
);
}
);
Expand Down
Loading

0 comments on commit 88e418f

Please sign in to comment.