diff --git a/.github/workflows/auto-analyze-cd.yml b/.github/workflows/auto-analyze-cd.yml
deleted file mode 100644
index f3bd1d5dd..000000000
--- a/.github/workflows/auto-analyze-cd.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-name: Auto analyze CD
-
-on:
- push:
- branches:
- - main
- paths:
- - "services/auto-analyze**"
- workflow_dispatch:
- inputs:
- target:
- description: "Deploy To"
- required: true
- type: choice
- options:
- - dev
- - test
- - sandbox
- - prod
-
-jobs:
- auto-analyze-cd:
- uses: bcgov/bcregistry-sre/.github/workflows/backend-cd.yaml@main
- with:
- target: ${{ inputs.target }}
- app_name: "auto-analyze"
- working_directory: "./services/auto-analyze"
- secrets:
- WORKLOAD_IDENTIFY_POOLS_PROVIDER: ${{ secrets.WORKLOAD_IDENTIFY_POOLS_PROVIDER }}
- GCP_SERVICE_ACCOUNT: ${{ secrets.GCP_SERVICE_ACCOUNT }}
diff --git a/.github/workflows/auto-analyze-ci.yml b/.github/workflows/auto-analyze-ci.yml
deleted file mode 100644
index b2843b01c..000000000
--- a/.github/workflows/auto-analyze-ci.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-name: Auto analyze CI
-
-on:
- pull_request:
- paths:
- - "services/auto-analyze/**"
- workflow_dispatch:
-
-defaults:
- run:
- shell: bash
- working-directory: ./services/auto-analyze
-
-jobs:
- auto-analyze-ci:
- uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main
- with:
- app_name: "auto-analyze"
- working_directory: "./services/auto-analyze"
- codecov_flag: "autoanalyze"
- skip_isort: "true"
- skip_black: "true"
\ No newline at end of file
diff --git a/.github/workflows/bad-name-notifier-CI.yml b/.github/workflows/bad-name-notifier-CI.yml
new file mode 100644
index 000000000..b041fabcf
--- /dev/null
+++ b/.github/workflows/bad-name-notifier-CI.yml
@@ -0,0 +1,72 @@
+name: Bad Name Notifier CI
+
+on:
+ pull_request:
+ types: [assigned, synchronize]
+ paths:
+ - "jobs/bad-name-notifier/**"
+ workflow_dispatch:
+
+defaults:
+ run:
+ shell: bash
+ working-directory: ./jobs/bad-name-notifier
+
+jobs:
+ setup-job:
+ runs-on: ubuntu-20.04
+
+ steps:
+ - uses: actions/checkout@v4
+ - run: "true"
+
+ linting:
+ needs: setup-job
+ runs-on: ubuntu-20.04
+
+ strategy:
+ matrix:
+ python-version: ["3.12"]
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v1
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ make setup
+ - name: Lint with flake8
+ id: flake8
+ run: |
+ poetry run flake8 src/app.py
+
+ # Uncomment and customize this block if testing is required
+ # testing:
+ # needs: setup-job
+ # runs-on: ubuntu-20.04
+ # steps:
+ # - uses: actions/checkout@v4
+ # - name: Set up Python ${{ matrix.python-version }}
+ # uses: actions/setup-python@v1
+ # with:
+ # python-version: ${{ matrix.python-version }}
+ # - name: Install dependencies
+ # run: |
+ # make setup
+ # - name: Test with pytest
+ # id: test
+ # run: |
+ # make test
+
+ build-check:
+ needs: setup-job
+ runs-on: ubuntu-20.04
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Build to check strictness
+ id: build
+ run: |
+ make build-nc
diff --git a/.github/workflows/bad-name-notifier-cd.yml b/.github/workflows/bad-name-notifier-cd.yml
new file mode 100644
index 000000000..5d0600370
--- /dev/null
+++ b/.github/workflows/bad-name-notifier-cd.yml
@@ -0,0 +1,82 @@
+name: Bad Name Notifier CD
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - "jobs/bad-name-notifier/**"
+ workflow_dispatch:
+ inputs:
+ environment:
+ description: "Environment (dev/test/prod)"
+ required: true
+ default: "dev"
+
+defaults:
+ run:
+ shell: bash
+ working-directory: ./jobs/bad-name-notifier
+
+env:
+ APP_NAME: "bad-name-notifier"
+ TAG_NAME: "dev"
+
+jobs:
+ bad-name-notifier-cd-by-push:
+ runs-on: ubuntu-20.04
+
+ if: github.event_name == 'push' && github.repository == 'bcgov/namex'
+ environment:
+ name: "dev"
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Login Openshift
+ shell: bash
+ run: |
+ oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
+
+ - name: CD Flow
+ shell: bash
+ env:
+ OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }}
+ OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }}
+ OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }}
+ OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }}
+ OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }}
+ TAG_NAME: ${{ env.TAG_NAME }}
+ run: |
+ make cd
+
+ bad-name-notifier-cd-by-dispatch:
+ runs-on: ubuntu-20.04
+
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/namex'
+ environment:
+ name: "${{ github.event.inputs.environment }}"
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Set env by input
+ run: |
+ echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV
+
+ - name: Login Openshift
+ shell: bash
+ run: |
+ oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
+
+ - name: CD Flow
+ shell: bash
+ env:
+ OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }}
+ OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }}
+ OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }}
+ OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }}
+ OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }}
+ TAG_NAME: ${{ env.TAG_NAME }}
+ run: |
+ make cd
diff --git a/.github/workflows/emailer-cd-gcp.yml b/.github/workflows/emailer-cd-gcp.yml
index 746217d6f..0661db496 100644
--- a/.github/workflows/emailer-cd-gcp.yml
+++ b/.github/workflows/emailer-cd-gcp.yml
@@ -24,7 +24,7 @@ jobs:
id-token: write
contents: write
- uses: bcgov/bcregistry-sre/.github/workflows/cloud-run-service-cd.yaml@main
+ uses: bcgov/bcregistry-sre/.github/workflows/backend-cd.yaml@main
with:
target: ${{ github.event.inputs.target }} # Corrected the input reference
app_name: "namex-emailer"
diff --git a/.github/workflows/sftp-nuans-report-cd.yml b/.github/workflows/sftp-nuans-report-cd.yml
new file mode 100644
index 000000000..595baaa07
--- /dev/null
+++ b/.github/workflows/sftp-nuans-report-cd.yml
@@ -0,0 +1,103 @@
+name: SFTP NUANS Report Job CD
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - "jobs/sftp-nuans-report/**"
+ workflow_dispatch:
+ inputs:
+ environment:
+ description: "Environment (dev/test/prod)"
+ required: true
+ default: "dev"
+
+defaults:
+ run:
+ shell: bash
+ working-directory: ./jobs/sftp-nuans-report
+
+env:
+ APP_NAME: "sftp-nuans-report"
+ TAG_NAME: "dev"
+
+jobs:
+ sftp-nuans-report-cd-by-push:
+ runs-on: ubuntu-20.04
+
+ if: github.event_name == 'push' && github.repository == 'bcgov/namex'
+ environment:
+ name: "dev"
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Login Openshift
+ shell: bash
+ run: |
+ oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
+
+ - name: CD Flow
+ shell: bash
+ env:
+ OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }}
+ OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }}
+ OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }}
+ OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }}
+ OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }}
+ TAG_NAME: ${{ env.TAG_NAME }}
+ run: |
+ make cd
+
+ - name: Rocket.Chat Notification
+ uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master
+ if: failure()
+ with:
+ type: ${{ job.status }}
+ job_name: "*SFTP NUANS Report Job Built and Deployed to ${{env.TAG_NAME}}*"
+ channel: "#registries-bot"
+ url: ${{ secrets.ROCKETCHAT_WEBHOOK }}
+ commit: true
+ token: ${{ secrets.GITHUB_TOKEN }}
+
+ sftp-nuans-report-cd-by-dispatch:
+ runs-on: ubuntu-20.04
+
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/namex'
+ environment:
+ name: "${{ github.event.inputs.environment }}"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set env by input
+ run: |
+ echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV
+
+ - name: Login Openshift
+ shell: bash
+ run: |
+ oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
+
+ - name: CD Flow
+ shell: bash
+ env:
+ OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }}
+ OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }}
+ OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }}
+ OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }}
+ OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }}
+ TAG_NAME: ${{ env.TAG_NAME }}
+ run: |
+ make cd
+
+ - name: Rocket.Chat Notification
+ uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master
+ if: failure()
+ with:
+ type: ${{ job.status }}
+ job_name: "*SFTP NUANS Report Job Built and Deployed to ${{env.TAG_NAME}}*"
+ channel: "#registries-bot"
+ url: ${{ secrets.ROCKETCHAT_WEBHOOK }}
+ commit: true
+ token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/sftp-nuans-report-ci.yml b/.github/workflows/sftp-nuans-report-ci.yml
new file mode 100644
index 000000000..f8fd0b4bd
--- /dev/null
+++ b/.github/workflows/sftp-nuans-report-ci.yml
@@ -0,0 +1,77 @@
+name: SFTP NUANS Report Job CI
+
+on:
+ pull_request:
+ types: [assigned, synchronize]
+ paths:
+ - "jobs/sftp-nuans-report/**"
+
+
+defaults:
+ run:
+ shell: bash
+ working-directory: ./jobs/sftp-nuans-report
+
+jobs:
+ setup-job:
+ runs-on: ubuntu-20.04
+
+ if: github.repository == 'bcgov/namex'
+
+ steps:
+ - uses: actions/checkout@v3
+ - run: "true"
+
+ linting:
+ needs: setup-job
+ runs-on: ubuntu-20.04
+
+ strategy:
+ matrix:
+ python-version: [3.8.12]
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v1
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ make setup
+ - name: Lint with pylint
+ id: pylint
+ run: |
+ make pylint
+ - name: Lint with flake8
+ id: flake8
+ run: |
+ make flake8
+
+ testing:
+ needs: setup-job
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v1
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ make setup
+ - name: Test with pytest
+ id: test
+ run: |
+ # make test
+
+ build-check:
+ needs: setup-job
+ runs-on: ubuntu-20.04
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: build to check strictness
+ id: build
+ run: |
+ make build-nc
diff --git a/api/.env.sample b/api/.env.sample
index 8e69ad308..f3efbbf76 100644
--- a/api/.env.sample
+++ b/api/.env.sample
@@ -81,3 +81,6 @@ MRAS_SVC_API_KEY=
# Local development only
DISABLE_NAMEREQUEST_SOLR_UPDATES=1
+
+# launchdarkly
+NAMEX_LD_SDK_ID=sdk-075200eb-4ff7-4e0e-872a-848585d3d460
diff --git a/api/Makefile b/api/Makefile
index 9e91ec385..ba74001eb 100644
--- a/api/Makefile
+++ b/api/Makefile
@@ -86,15 +86,18 @@ mac-cov: local-test ## Run the coverage report and display in a browser window (
#################################################################################
cd: ## CD flow
ifeq ($(TAG_NAME), test)
-cd: update-env
+## cd: update-env
+cd:
oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME)
else ifeq ($(TAG_NAME), prod)
-cd: update-env
+## cd: update-env
+cd:
oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F)
oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME)
else
TAG_NAME=dev
-cd: build update-env tag
+## cd: build update-env tag
+cd: build tag
endif
build: ## Build the docker container
@@ -112,16 +115,16 @@ push: #build ## Push the docker container to the registry & tag latest
docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\
docker push $(REGISTRY_IMAGE):latest
-VAULTS=`cat devops/vaults.json`
-update-env: ## Update env from 1pass
- oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \
- -m "secret" \
- -e "$(TAG_NAME)" \
- -a "$(DOCKER_NAME)-$(TAG_NAME)" \
- -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \
- -v "$(VAULTS)" \
- -r "true" \
- -f "false"
+# VAULTS=`cat devops/vaults.json`
+# update-env: ## Update env from 1pass
+# oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \
+# -m "secret" \
+# -e "$(TAG_NAME)" \
+# -a "$(DOCKER_NAME)-$(TAG_NAME)" \
+# -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \
+# -v "$(VAULTS)" \
+# -r "true" \
+# -f "false"
tag: push ## tag image
oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME)
@@ -143,4 +146,4 @@ db: ## Update the local database
.DEFAULT_GOAL := help
help:
- @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
\ No newline at end of file
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/api/config.py b/api/config.py
index b2a085f40..16e6330d1 100644
--- a/api/config.py
+++ b/api/config.py
@@ -67,7 +67,10 @@ class Config(object):
DB_NAME = os.getenv('NAMEX_DATABASE_NAME', '')
DB_HOST = os.getenv('NAMEX_DATABASE_HOST', '')
DB_PORT = os.getenv('NAMEX_DATABASE_PORT', '5432')
- SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
+ if DB_UNIX_SOCKET := os.getenv('NAMEX_DATABASE_UNIX_SOCKET', None):
+ SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}'
+ else:
+ SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
# KEYCLOAK & JWT_OIDC Settings
JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG')
diff --git a/api/devops/gcp/clouddeploy.yaml b/api/devops/gcp/clouddeploy.yaml
index 0109be3d7..39dd79a58 100644
--- a/api/devops/gcp/clouddeploy.yaml
+++ b/api/devops/gcp/clouddeploy.yaml
@@ -31,7 +31,7 @@ serialPipeline:
service-name: "namex-api-dev"
container-name: "namex-api-dev"
service-account: "sa-api@a083gt-dev.iam.gserviceaccount.com"
- cloudsql-instances: ""
+ cloudsql-instances: "a083gt-dev:northamerica-northeast1:namex-db-dev"
- targetId: a083gt-test
profiles: [test]
strategy:
diff --git a/api/devops/vaults.gcp.env b/api/devops/vaults.gcp.env
index dfab34508..6bebafaa7 100644
--- a/api/devops/vaults.gcp.env
+++ b/api/devops/vaults.gcp.env
@@ -10,11 +10,11 @@ ENTITY_SERVICE_ACCOUNT_CLIENT_ID="op://keycloak/$APP_ENV/entity-service-account/
ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET="op://keycloak/$APP_ENV/entity-service-account/ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET"
SENTRY_DSN="op://sentry/$APP_ENV/examination/SENTRY_DSN"
SENTRY_ENABLE="op://sentry/$APP_ENV/examination/SENTRY_ENABLE"
-NAMEX_DATABASE_HOST="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_HOST"
-NAMEX_DATABASE_PORT="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_PORT"
-NAMEX_DATABASE_NAME="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_NAME"
-NAMEX_DATABASE_USERNAME="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_USERNAME"
-NAMEX_DATABASE_PASSWORD="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_PASSWORD"
+NAMEX_DATABASE_UNIX_SOCKET="op://database/$APP_ENV/namex-db-gcp/DATABASE_UNIX_SOCKET"
+NAMEX_DATABASE_PORT="op://database/$APP_ENV/namex-db-gcp/DATABASE_PORT"
+NAMEX_DATABASE_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_NAME"
+NAMEX_DATABASE_USERNAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_USERNAME"
+NAMEX_DATABASE_PASSWORD="op://database/$APP_ENV/namex-db-gcp/DATABASE_PASSWORD"
REPORT_API_URL="op://API/$APP_ENV/report-api/REPORT_API_URL"
REPORT_API_VERSION="op://API/$APP_ENV/report-api/REPORT_API_VERSION"
PAY_API_URL="op://API/$APP_ENV/pay-api/PAY_API_URL"
diff --git a/api/flags.json b/api/flags.json
new file mode 100644
index 000000000..fe9e799c1
--- /dev/null
+++ b/api/flags.json
@@ -0,0 +1,8 @@
+{
+ "flagValues": {
+ "string-flag": "a string value",
+ "bool-flag": true,
+ "integer-flag": 10,
+ "enable-won-emails": false
+ }
+}
diff --git a/api/namex/VERSION.py b/api/namex/VERSION.py
index 3f0999100..e4b162959 100644
--- a/api/namex/VERSION.py
+++ b/api/namex/VERSION.py
@@ -1 +1 @@
-__version__ = '1.2.14'
+__version__ = '1.2.25'
diff --git a/api/namex/__init__.py b/api/namex/__init__.py
index a5ac2db58..89e6a6615 100644
--- a/api/namex/__init__.py
+++ b/api/namex/__init__.py
@@ -32,6 +32,7 @@
from namex.models import db, ma
from namex.resources import api
from namex.utils.run_version import get_run_version
+from namex.services import flags
# noqa: I003; dont know what flake8 wants here
@@ -52,6 +53,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')):
integrations=[FlaskIntegration()]
)
+ flags.init_app(app)
queue.init_app(app)
db.init_app(app)
diff --git a/api/namex/resources/name_requests/report_resource.py b/api/namex/resources/name_requests/report_resource.py
index 53fd4c54b..09b4d7bec 100644
--- a/api/namex/resources/name_requests/report_resource.py
+++ b/api/namex/resources/name_requests/report_resource.py
@@ -9,7 +9,9 @@
import requests
from flask import current_app, jsonify, request, make_response
from flask_restx import Resource
+from gcp_queue.logging import structured_log
+from namex.constants import RequestAction
from namex.models import Request, State
from namex.utils.api_resource import handle_exception
from namex.utils.auth import cors_preflight, full_access_to_name_request
@@ -18,6 +20,7 @@
from namex.services.name_request.utils import get_mapped_entity_and_action_code
from namex.utils.auth import get_client_credentials
from .api_namespace import api
+from ..utils import EntityUtils
setup_logging() # Important to do this first
@@ -144,10 +147,12 @@ def _get_template_data(nr_model):
nr_report_json['legalAct'] = ReportResource._get_legal_act(nr_model['entity_type_cd'])
isXPRO = nr_model['entity_type_cd'] in ['XCR', 'XUL', 'RLC', 'XLP', 'XLL', 'XCP', 'XSO']
nr_report_json['isXPRO'] = isXPRO
- nr_report_json['isModernized'] = ReportResource._is_modernized(nr_model['entity_type_cd'])
- nr_report_json['isColin'] = ReportResource._is_colin(nr_model['entity_type_cd'])
- nr_report_json['isSociety'] = ReportResource._is_society(nr_model['entity_type_cd'])
- nr_report_json['isPaper'] = not (ReportResource._is_colin(nr_model['entity_type_cd']) or ReportResource._is_modernized(nr_model['entity_type_cd']) or ReportResource._is_society(nr_model['entity_type_cd']))
+ instruction_group = ReportResource._get_instruction_group(nr_model['entity_type_cd'], nr_model['request_action_cd'], nr_model['corpNum'])
+ nr_report_json['isModernized'] = True if instruction_group == 'modernized' else False
+ nr_report_json['isColin'] = True if instruction_group == 'colin' else False
+ nr_report_json['isSociety'] = True if instruction_group == 'so' else False
+ nr_report_json['isNew'] = True if instruction_group == 'new' else False
+ nr_report_json['isPaper'] = not (ReportResource._is_colin(nr_model['entity_type_cd']) or ReportResource._is_modernized(nr_model['entity_type_cd']) or ReportResource._is_society(nr_model['entity_type_cd']) or ReportResource._is_potential_colin(nr_model['entity_type_cd']))
nr_report_json['requestCodeDescription'] = \
ReportResource._get_request_action_cd_description(nr_report_json['request_action_cd'])
nr_report_json['nrStateDescription'] = \
@@ -168,7 +173,8 @@ def _get_template_data(nr_model):
if nr_report_json['applicants']['countryTypeCd']:
nr_report_json['applicants']['countryName'] = \
pycountry.countries.search_fuzzy(nr_report_json['applicants']['countryTypeCd'])[0].name
- actions_obj = ReportResource._get_next_action_text(nr_model['entity_type_cd'])
+ action_url = ReportResource._get_action_url(nr_model['entity_type_cd'], instruction_group)
+ actions_obj = ReportResource._get_next_action_text(nr_model['entity_type_cd'], action_url)
if actions_obj:
action_text = actions_obj.get(nr_report_json['request_action_cd'])
if not action_text:
@@ -255,16 +261,24 @@ def _get_entity_type_description(entity_type_cd: str):
}
return entity_type_descriptions.get(entity_type_cd, None)
-
+ @staticmethod
+ def _is_lear_entity(corpNum):
+ if not corpNum:
+ return False
+ entity_url = f'{current_app.config.get("ENTITY_SVC_URL")}/businesses/{corpNum}'
+ response = EntityUtils.make_authenticated_request(entity_url)
+ if response.status_code == HTTPStatus.OK and response.json():
+ return True
+ return False
+
@staticmethod
def _is_modernized(legal_type):
modernized_list = ['GP', 'DBA', 'FR', 'CP', 'BC']
return legal_type in modernized_list
-
@staticmethod
def _is_colin(legal_type):
- colin_list = ['CR', 'UL', 'CC', 'XCR', 'XUL', 'RLC']
+ colin_list = ['XCR', 'XUL', 'RLC']
return legal_type in colin_list
@staticmethod
@@ -272,19 +286,41 @@ def _is_society(legal_type):
society_list = ['SO', 'XSO']
return legal_type in society_list
+ @staticmethod
+ def _is_potential_colin(legal_type):
+ potential_colin_list = ['CR', 'UL', 'CC']
+ return legal_type in potential_colin_list
@staticmethod
- def _get_instruction_group(legal_type):
+ def _get_instruction_group(legal_type, request_action, corpNum):
+ if request_action in {RequestAction.CHG.value, RequestAction.CNV.value}:
+ # For the 'Name Change' or 'Alteration', return 'modernized' if the company is in LEAR, and 'colin' if not
+ return 'modernized' if ReportResource._is_lear_entity(corpNum) else 'colin'
+ if not ReportResource._get_enable_won_emails_ff():
+ return ReportResource._old_get_instruction_group(legal_type)
if ReportResource._is_modernized(legal_type):
return 'modernized'
if ReportResource._is_colin(legal_type):
return 'colin'
if ReportResource._is_society(legal_type):
return 'so'
+ # return "new" for BC/CC/ULC IAs, "colin" for for BC/CC/ULC others
+ if ReportResource._is_potential_colin(legal_type):
+ return 'new' if request_action == RequestAction.NEW.value else 'colin'
+ return ''
+
+ @staticmethod
+ def _old_get_instruction_group(legal_type):
+ if ReportResource._is_modernized(legal_type):
+ return 'modernized'
+ if ReportResource._is_colin(legal_type) or ReportResource._is_potential_colin(legal_type):
+ return 'colin'
+ if ReportResource._is_society(legal_type):
+ return 'so'
return ''
@staticmethod
- def _get_action_url(entity_type_cd: str):
+ def _get_action_url(entity_type_cd: str, instruction_group: str):
DECIDE_BUSINESS_URL = current_app.config.get('DECIDE_BUSINESS_URL')
CORP_FORMS_URL = current_app.config.get('CORP_FORMS_URL')
@@ -294,8 +330,8 @@ def _get_action_url(entity_type_cd: str):
url = {
# BC Types
- 'CR': CORP_ONLINE_URL,
- 'UL': CORP_ONLINE_URL,
+ 'CR': BUSINESS_URL if instruction_group == 'modernized' else CORP_ONLINE_URL,
+ 'UL': BUSINESS_URL if instruction_group == 'modernized' else CORP_ONLINE_URL,
'FR': DECIDE_BUSINESS_URL,
'GP': DECIDE_BUSINESS_URL,
'DBA': DECIDE_BUSINESS_URL,
@@ -303,7 +339,7 @@ def _get_action_url(entity_type_cd: str):
'LL': CORP_FORMS_URL,
'CP': BUSINESS_URL,
'BC': BUSINESS_URL,
- 'CC': CORP_ONLINE_URL,
+ 'CC': BUSINESS_URL if instruction_group == 'modernized' else CORP_ONLINE_URL,
'SO': SOCIETIES_URL,
'PA': ReportResource.GENERIC_STEPS,
'FI': ReportResource.GENERIC_STEPS,
@@ -350,19 +386,19 @@ def _get_legal_act(entity_type_cd: str):
return next_action_text.get(entity_type_cd, None)
@staticmethod
- def _get_next_action_text(entity_type_cd: str):
+ def _get_next_action_text(entity_type_cd: str, url: str):
BUSINESS_CHANGES_URL = current_app.config.get('BUSINESS_CHANGES_URL')
- url = ReportResource._get_action_url(entity_type_cd)
-
next_action_text = {
# BC Types
'CR': {
+ 'NEW': 'Check your email for instructions on how to complete your application using this name request.',
'DEFAULT': f'Use this name request to complete your application by visiting '
f'{url}'
},
'UL': {
+ 'NEW': 'Check your email for instructions on how to complete your application using this name request.',
'DEFAULT': f'Use this name request to complete your application by visiting '
f'{url}'
},
@@ -405,6 +441,7 @@ def _get_next_action_text(entity_type_cd: str):
'DEFAULT': f'Use this name request to complete your application by visiting {url}'
},
'CC': {
+ 'NEW': 'Check your email for instructions on how to complete your application using this name request.',
'DEFAULT': f'Use this name request to complete your application by visiting '
f'{url}'
},
@@ -458,4 +495,115 @@ def _get_next_action_text(entity_type_cd: str):
'DEFAULT': 'FIRM (Legacy Oracle)'
}
}
- return next_action_text.get(entity_type_cd, None)
+ old_next_action_text = {
+ # BC Types
+ 'CR': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ f'{url}'
+ },
+ 'UL': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ f'{url}'
+ },
+ 'FR': {
+ 'NEW': f'Use this name request to complete your application by visiting '
+ 'Registering Proprietorships and Partnerships',
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ 'Registering Proprietorships and Partnerships for more information. To learn more, visit '
+ f'Making Changes to your Proprietorship or'
+ ' Partnership'
+ },
+ 'GP': {
+ 'NEW': f'Use this name request to complete your application by visiting '
+ 'BC Registries and Online Services',
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ 'BC Registries and Online Services for more information. To learn more, visit '
+ f'Making Changes to your Proprietorship or'
+ ' Partnership'
+ },
+ 'DBA': {
+ 'NEW': f'Use this name request to complete your application by visiting '
+ 'Registering Proprietorships and Partnerships',
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ 'Registering Proprietorships and Partnerships for more information. To learn more, visit '
+ f'Making Changes to your Proprietorship or'
+ ' Partnership'
+ },
+ 'LP': {
+ 'DEFAULT': f'Visit Forms, fees and information packages page and'
+ ' download the appropriate form'
+ },
+ 'LL': {
+ 'DEFAULT': f'Visit Forms, fees and information packages page and'
+ ' download the appropriate form'
+ },
+ 'CP': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting {url}'
+ },
+ 'BC': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting {url}'
+ },
+ 'CC': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ f'{url}'
+ },
+ 'SO': {
+ 'DEFAULT': f'To complete your filing, visit '
+ f'{url} and login with your BCeID.'
+ },
+ 'PA': {
+ 'DEFAULT': ReportResource.GENERIC_STEPS
+ },
+ 'FI': {
+ 'DEFAULT': ReportResource.GENERIC_STEPS
+ },
+ 'PAR': {
+ 'DEFAULT': ReportResource.GENERIC_STEPS
+ },
+ # XPRO and Foreign Types
+ 'XCR': {
+ 'NEW': f'Use this name request to complete your application by visiting '
+ f'{url}',
+ 'CHG': f'Use this name request to complete your application by visiting '
+ f'{url}',
+ 'DEFAULT': f'To complete your filing, visit our Forms page to'
+ ' download and submit a form'
+ },
+ 'XUL': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ f'{url}'
+ },
+ 'RLC': {
+ 'DEFAULT': f'Use this name request to complete your application by visiting '
+ f'{url}'
+ },
+ 'XLP': {
+ 'DEFAULT': f'Visit Forms, fees and information packages page and'
+ ' download the appropriate form'
+ },
+ 'XLL': {
+ 'DEFAULT': f'Visit Forms, fees and information packages page and'
+ ' download the appropriate form'
+ },
+ 'XCP': {
+ 'DEFAULT': 'Extraprovincial Cooperative Association'
+ },
+ 'XSO': {
+ 'DEFAULT': f'To complete your filing, visit '
+ f'{url} and login with your BCeID.'
+ },
+ # Used for mapping back to legacy oracle codes, description not required
+ 'FIRM': {
+ 'DEFAULT': 'FIRM (Legacy Oracle)'
+ }
+ }
+ if ReportResource._get_enable_won_emails_ff():
+ return next_action_text.get(entity_type_cd, None)
+ return old_next_action_text.get(entity_type_cd, None)
+
+ @staticmethod
+ def _get_enable_won_emails_ff():
+ from namex.services import flags # pylint: disable=import-outside-toplevel
+ enable_won_emails = flags.value('enable-won-emails')
+ structured_log(request, "DEBUG", f"NR-Email: NameX API: enable_way_of_navigation_emails feature_flag: {enable_won_emails}")
+ return enable_won_emails
diff --git a/api/namex/resources/payment/payment.py b/api/namex/resources/payment/payment.py
index a82e6652c..db91f2174 100644
--- a/api/namex/resources/payment/payment.py
+++ b/api/namex/resources/payment/payment.py
@@ -1,4 +1,5 @@
import json
+import requests
from datetime import datetime, timedelta
from dateutil import parser as dateutil_parser
@@ -315,6 +316,50 @@ def get(self, nr_id):
})
class CreateNameRequestPayment(AbstractNameRequestResource):
+
+ @staticmethod
+ def _is_staff(auth_header):
+ """Determine if the user is a staff member."""
+ return auth_header and validate_roles(jwt, auth_header, [User.STAFF])
+
+ @staticmethod
+ def _affiliate_business_account(auth_header, business_account_id, nr_model):
+ """Affiliate the new NR to the business account."""
+ auth_svc_url = current_app.config.get('AUTH_SVC_URL')
+ if not auth_svc_url:
+ raise ValueError("AUTH_SVC_URL is not configured in the application.")
+
+ nr_num = nr_model.nrNum
+ phone_num = nr_model.applicants[0].phoneNumber
+ auth_url = f'{auth_svc_url}/orgs/{business_account_id}/affiliations?newBusiness=true'
+ headers = {
+ 'Authorization': auth_header,
+ 'Content-Type': 'application/json'
+ }
+ payload = {
+ 'businessIdentifier': nr_num,
+ 'phone': phone_num
+ }
+
+ try:
+ response = requests.post(url=auth_url, json=payload, headers=headers)
+
+ # Check the response status
+ if not response.ok:
+ current_app.logger.error(
+ f"Failed to affiliate business account {business_account_id} with {nr_num}. "
+ f"Status Code: {response.status_code}, Response: {response.text}"
+ )
+ else:
+ current_app.logger.debug(
+ f"Successfully affiliated business account {business_account_id} with {nr_num}."
+ )
+ except requests.exceptions.RequestException as e:
+ current_app.logger.error(
+ f"Error affiliating business account {business_account_id} with {nr_num}: {e}"
+ )
+
+
"""Create name request payment endpoints."""
@payment_api.expect(payment_request_schema)
@payment_api.response(200, 'Success', '')
@@ -337,7 +382,6 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value):
nr_model = RequestDAO.query.get(nr_id)
# only used for adding namerequest service user to event recording
nr_svc = self.nr_service
-
if not nr_model:
# Should this be a 400 or 404... hmmm
return make_response(jsonify(message=f'Name Request {nr_id} not found'), 400)
@@ -362,10 +406,17 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value):
if not valid_nr_state:
return make_response(jsonify(message=f'Invalid NR state [{payment_action}]'), 400)
+ json_input = request.get_json()
+ headers = json_input.get('headers')
+ auth_header = headers.get('Authorization')
+ is_new_nr = False
+ is_staff = self._is_staff(auth_header)
+
if valid_payment_action and valid_nr_state:
if payment_action in [NameRequestActions.CREATE.value, NameRequestActions.RESUBMIT.value]:
update_solr = True
nr_model = self.add_new_nr_number(nr_model, update_solr)
+ is_new_nr = True
existing_payment = PaymentDAO.find_by_existing_nr_id(nr_id, payment_action)
if existing_payment:
@@ -383,7 +434,6 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value):
nr_model,
nr_svc)
- json_input = request.get_json()
payment_request = {}
if not json_input:
# return make_response(jsonify(message=MSG_BAD_REQUEST_NO_JSON_BODY), 400
@@ -400,14 +450,11 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value):
business_info = payment_request.get('businessInfo')
details = payment_request.get('details')
- headers = json_input.get('headers')
- auth = headers.get('Authorization')
account_info = {}
-
if folio_number := headers.pop('folioNumber', None):
filing_info['folioNumber'] = folio_number
- if auth and validate_roles(jwt, auth, [User.STAFF]):
+ if is_staff:
if routing_slip_number := headers.get('routingSlipNumber'):
account_info['routingSlip'] = routing_slip_number
del headers['routingSlipNumber']
@@ -445,12 +492,23 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value):
# payment will be saved in handle_payment_response with a payment_token
payment_response = create_payment(req.as_dict(), headers)
- return handle_payment_response(payment_action,
+ payment_response_result = handle_payment_response(payment_action,
payment_response,
payment,
nr_id,
nr_model,
nr_svc)
+ # after a new NR saved
+ # if logged in user is a staff, affiliate the new NR with the business_account_id
+ if is_new_nr and is_staff:
+ business_account_id = json_input.get("businessAccountId")
+ if business_account_id:
+ current_app.logger.debug(f"Affiliating to business account ID: {business_account_id} with {nr_model.nrNum}")
+ # affiliate the new NR to the account_info
+ #affiliate_business_account(business_account_id, nr_model.nrNum)
+ self._affiliate_business_account(auth_header, business_account_id, nr_model)
+
+ return payment_response_result
except PaymentServiceError as err:
return handle_exception(err, err.message, 500)
diff --git a/api/namex/resources/payment_societies.py b/api/namex/resources/payment_societies.py
index 5664593ec..5d6d13f58 100644
--- a/api/namex/resources/payment_societies.py
+++ b/api/namex/resources/payment_societies.py
@@ -5,7 +5,7 @@
from sqlalchemy.orm.exc import NoResultFound
from namex import jwt
-from namex.models import PaymentSociety as PaymentSocietyDAO, Request as RequestDAO, User
+from namex.models import State, PaymentSociety as PaymentSocietyDAO, Request as RequestDAO, User
from namex.utils.auth import cors_preflight
from namex.utils.logging import setup_logging
@@ -121,7 +121,8 @@ def post(self):
ps_instance.save_to_db()
current_app.logger.debug(f'ps_instance saved...')
- nrd.stateCd = 'DRAFT'
+ if nrd.stateCd == State.PENDING_PAYMENT:
+ nrd.stateCd = 'DRAFT'
nrd.save_to_db()
current_app.logger.debug(f'nrd saved...')
diff --git a/api/namex/resources/requests.py b/api/namex/resources/requests.py
index bc3857649..a8d944fd0 100644
--- a/api/namex/resources/requests.py
+++ b/api/namex/resources/requests.py
@@ -377,13 +377,13 @@ def post(self, *args, **kwargs):
@cors_preflight("GET, POST")
@api.route('/search', methods=['GET', 'POST', 'OPTIONS'])
class RequestSearch(Resource):
- """Search for NR's."""
+ """Search for NR's by NR number or associated name."""
@staticmethod
@cors.crossdomain(origin='*')
@jwt.requires_auth
def get():
- """Query for name requests.
+ """Query for name requests with partial matching for both NR number and name.
example: query=NR3742302 or query=abcd
"""
@@ -441,7 +441,8 @@ def get():
start += rows
return make_response(jsonify(data), 200)
- except Exception:
+ except Exception as e:
+ current_app.logger.error(f"Error in /search, {e}")
return make_response(jsonify({'message': 'Internal server error'}), 500)
@staticmethod
diff --git a/api/namex/services/__init__.py b/api/namex/services/__init__.py
index d8304be4d..53d1d61c7 100644
--- a/api/namex/services/__init__.py
+++ b/api/namex/services/__init__.py
@@ -7,3 +7,6 @@
from .messages import MessageServices
from .audit_trail import EventRecorder
from .name_request.name_request_state import is_reapplication_eligible
+from .flags import Flags
+
+flags = Flags()
diff --git a/api/namex/services/flags.py b/api/namex/services/flags.py
new file mode 100644
index 000000000..25c3366e2
--- /dev/null
+++ b/api/namex/services/flags.py
@@ -0,0 +1,118 @@
+# Copyright © 2025 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Manage the Feature Flags initialization, setup and service."""
+import logging
+from flask import current_app
+from ldclient import get as ldclient_get, set_config as ldclient_set_config # noqa: I001
+from ldclient.config import Config # noqa: I005
+from ldclient import Context
+from ldclient.integrations import Files
+
+from namex.models import User
+
+
+class Flags():
+ """Wrapper around the feature flag system.
+
+ calls FAIL to FALSE
+
+ If the feature flag service is unavailable
+ AND
+ there is no local config file
+ Calls -> False
+
+ """
+
+ def __init__(self, app=None):
+ """Initialize this object."""
+ self.sdk_key = None
+ self.app = None
+
+ if app:
+ self.init_app(app)
+
+ def init_app(self, app):
+ """Initialize the Feature Flag environment."""
+ self.app = app
+ self.sdk_key = app.config.get('NAMEX_LD_SDK_ID')
+ env = app.config.get('ENVIRONMENT')
+
+ if self.sdk_key or env == 'local':
+
+ if env == 'local':
+ factory = Files.new_data_source(paths=['flags.json'], auto_update=True)
+ config = Config(sdk_key=self.sdk_key,
+ update_processor_class=factory,
+ send_events=False)
+ else:
+ config = Config(sdk_key=self.sdk_key)
+
+ ldclient_set_config(config)
+ client = ldclient_get()
+
+ app.extensions['featureflags'] = client
+
+ def _get_client(self):
+ try:
+ client = current_app.extensions['featureflags']
+ except KeyError:
+ try:
+ self.init_app(current_app)
+ client = current_app.extensions['featureflags']
+ except KeyError:
+ logging.warning("Couldn\'t retrieve launch darkly client from extensions.")
+ client = None
+
+ return client
+
+ @staticmethod
+ def _get_anonymous_user():
+ return Context.create('anonymous')
+
+ @staticmethod
+ def _user_as_key(user: User):
+ return Context.builder(user.idp_userid)\
+ .set('firstName', user.firstname)\
+ .set('lastName', user.lastname).build()
+
+ def is_on(self, flag: str, default: bool = False, user: User = None) -> bool:
+ """Assert that the flag is set for this user."""
+ client = self._get_client()
+
+ if not client:
+ return default
+
+ if user:
+ flag_user = self._user_as_key(user)
+ else:
+ flag_user = self._get_anonymous_user()
+
+ return bool(client.variation(flag, flag_user, default))
+
+ def value(self, flag: str, default=None, user: User = None):
+ """Retrieve the value of the (flag, user) tuple."""
+ client = self._get_client()
+
+ if not client:
+ return default
+
+ if user:
+ flag_user = self._user_as_key(user)
+ else:
+ flag_user = self._get_anonymous_user()
+
+ return client.variation(flag, flag_user, default)
+
+
+flags = Flags()
diff --git a/api/namex/services/name_request/utils.py b/api/namex/services/name_request/utils.py
index d18355661..bdaea1f4f 100644
--- a/api/namex/services/name_request/utils.py
+++ b/api/namex/services/name_request/utils.py
@@ -147,6 +147,12 @@ def valid_state_transition(user, nr, new_state):
:param new_state:
:return: (bool)
"""
+ # when the legacy user just created a new NR from legacy side, the user should be allowed to
+ # modify and cancel it from the legacy side (while the NR still in DRAFT state).
+ if (nr.stateCd == State.DRAFT or nr.stateCd == State.APPROVED or nr.stateCd == State.CANCELLED) \
+ and (new_state == State.DRAFT or new_state == State.CANCELLED):
+ return True
+
if (new_state in (State.APPROVED,
State.REJECTED,
State.CONDITIONAL)) \
diff --git a/api/poetry.lock b/api/poetry.lock
index ffea6c829..b96ba3674 100644
--- a/api/poetry.lock
+++ b/api/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.
[[package]]
name = "aiohttp"
@@ -6,6 +6,7 @@ version = "3.9.5"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
@@ -101,6 +102,7 @@ version = "1.3.1"
description = "aiosignal: a list of registered asynchronous callbacks"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
{file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
@@ -115,6 +117,7 @@ version = "1.13.2"
description = "A database migration tool for SQLAlchemy."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"},
{file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"},
@@ -134,6 +137,7 @@ version = "9.0.1"
description = "A library for parsing ISO 8601 strings."
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"},
{file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
@@ -148,6 +152,7 @@ version = "3.2.3"
description = "An abstract syntax tree for Python with inference support."
optional = false
python-versions = ">=3.8.0"
+groups = ["dev"]
files = [
{file = "astroid-3.2.3-py3-none-any.whl", hash = "sha256:3eae9ea67c11c858cdd2c91337d2e816bd019ac897ca07d7b346ac10105fceb3"},
{file = "astroid-3.2.3.tar.gz", hash = "sha256:7099b5a60985529d8d46858befa103b82d0d05a5a5e8b816b5303ed96075e1d9"},
@@ -159,6 +164,7 @@ version = "22.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.6"
+groups = ["main", "dev"]
files = [
{file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
{file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
@@ -177,6 +183,7 @@ version = "2.3.1"
description = "A tool that automatically formats Python code to conform to the PEP 8 style guide"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "autopep8-2.3.1-py2.py3-none-any.whl", hash = "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d"},
{file = "autopep8-2.3.1.tar.gz", hash = "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda"},
@@ -191,6 +198,7 @@ version = "1.8.2"
description = "Fast, simple object-to-object and broadcast signaling"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"},
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
@@ -202,6 +210,7 @@ version = "0.14.0"
description = "httplib2 caching for requests"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"},
{file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"},
@@ -222,6 +231,7 @@ version = "0.13.0"
description = "A collection of cache libraries in the same API interface."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"},
{file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"},
@@ -233,6 +243,7 @@ version = "5.4.0"
description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"},
{file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"},
@@ -244,6 +255,7 @@ version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
+groups = ["main"]
files = [
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
@@ -255,6 +267,7 @@ version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.6.0"
+groups = ["main"]
files = [
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
@@ -269,6 +282,7 @@ version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
@@ -283,6 +297,7 @@ version = "1.0.26"
description = "A versioned python wrapper package for The CMU Pronouncing Dictionary data files."
optional = false
python-versions = "<4.0,>=3.8"
+groups = ["main"]
files = [
{file = "cmudict-1.0.26-py3-none-any.whl", hash = "sha256:68de98e1f9bc701dd306bc25167fdc147832931515bb1040016cd17384158870"},
{file = "cmudict-1.0.26.tar.gz", hash = "sha256:d4b7b6e47e87d303d60e9e907091918b9abb9194cf9e5a6ce2f357b2c68980d6"},
@@ -298,10 +313,12 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main", "dev"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""}
[[package]]
name = "coverage"
@@ -309,6 +326,7 @@ version = "7.6.0"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"},
{file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"},
@@ -373,6 +391,7 @@ version = "8.3.0"
description = "Python interface to Oracle"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "cx_Oracle-8.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b6a23da225f03f50a81980c61dbd6a358c3575f212ca7f4c22bb65a9faf94f7f"},
{file = "cx_Oracle-8.3.0-cp310-cp310-win32.whl", hash = "sha256:715a8bbda5982af484ded14d184304cc552c1096c82471dd2948298470e88a04"},
@@ -398,6 +417,7 @@ version = "0.6"
description = "A backport of the dataclasses module for Python 3.6"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"},
{file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"},
@@ -409,6 +429,7 @@ version = "0.3.8"
description = "serialize all of Python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
{file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
@@ -424,6 +445,7 @@ version = "2.2.0"
description = "Filesystem-like pathing and searching for dictionaries"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576"},
{file = "dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e"},
@@ -435,6 +457,7 @@ version = "0.18.0"
description = "ECDSA cryptographic signature library (pure python)"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+groups = ["main"]
files = [
{file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"},
{file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
@@ -453,6 +476,7 @@ version = "1.2.2"
description = "Dictionary with auto-expiring values for caching purposes"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"},
{file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"},
@@ -467,6 +491,7 @@ version = "7.1.0"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.8.1"
+groups = ["dev"]
files = [
{file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"},
{file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"},
@@ -483,6 +508,7 @@ version = "0.2.1"
description = "A flake8 extension that checks for blind except: statements"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "flake8-blind-except-0.2.1.tar.gz", hash = "sha256:f25a575a9dcb3eeb3c760bf9c22db60b8b5a23120224ed1faa9a43f75dd7dd16"},
]
@@ -493,6 +519,7 @@ version = "4.1.2"
description = "ipdb/pdb statement checker plugin for flake8"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"},
{file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"},
@@ -508,6 +535,7 @@ version = "1.7.0"
description = "Extension for flake8 which uses pydocstyle to check docstrings"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"},
{file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"},
@@ -523,6 +551,7 @@ version = "6.1.1"
description = "flake8 plugin that integrates isort"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"},
{file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"},
@@ -541,6 +570,7 @@ version = "3.4.0"
description = "Flake8 lint for quotes."
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"},
]
@@ -555,6 +585,7 @@ version = "3.0.3"
description = "A simple framework for building complex web applications."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"},
{file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"},
@@ -577,6 +608,7 @@ version = "1.11.1"
description = "Adds caching support to Flask applications."
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "Flask-Caching-1.11.1.tar.gz", hash = "sha256:28af189e97defb9e39b43ebe197b54a58aaee81bdeb759f46d969c26d7aa7810"},
{file = "Flask_Caching-1.11.1-py3-none-any.whl", hash = "sha256:36592812eec6cba86eca48bcda74eff24bfd6c8eaf6056ca0184474bb78c0dc4"},
@@ -592,6 +624,7 @@ version = "4.0.1"
description = "A Flask extension adding a decorator for CORS support"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "Flask_Cors-4.0.1-py2.py3-none-any.whl", hash = "sha256:f2a704e4458665580c074b714c4627dd5a306b333deb9074d0b1794dfa2fb677"},
{file = "flask_cors-4.0.1.tar.gz", hash = "sha256:eeb69b342142fdbf4766ad99357a7f3876a2ceb77689dc10ff912aac06c389e4"},
@@ -602,24 +635,25 @@ Flask = ">=0.9"
[[package]]
name = "flask-jwt-oidc"
-version = "0.6.0"
+version = "0.7.0"
description = "Opinionated flask oidc client"
optional = false
python-versions = "^3.9"
+groups = ["main"]
files = []
develop = false
[package.dependencies]
-cachelib = "^0.13.0"
-Flask = "^3"
+cachelib = "0.*"
+Flask = ">=2"
python-jose = "^3.3.0"
six = "^1.16.0"
[package.source]
type = "git"
-url = "https://github.com/bolyachevets/flask-jwt-oidc.git"
-reference = "bump-flask-version"
-resolved_reference = "f023e01b537e454dfa569bc45575f1f0680daf49"
+url = "https://github.com/seeker25/flask-jwt-oidc.git"
+reference = "HEAD"
+resolved_reference = "563f01ef6453eb0ea1cc0a2d71c6665350c853ff"
[[package]]
name = "flask-marshmallow"
@@ -627,6 +661,7 @@ version = "0.14.0"
description = "Flask + marshmallow for beautiful APIs"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"},
{file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"},
@@ -650,6 +685,7 @@ version = "2.7.0"
description = "SQLAlchemy database migrations for Flask applications using Alembic"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "Flask-Migrate-2.7.0.tar.gz", hash = "sha256:ae2f05671588762dd83a21d8b18c51fe355e86783e24594995ff8d7380dffe38"},
{file = "Flask_Migrate-2.7.0-py2.py3-none-any.whl", hash = "sha256:26871836a4e46d2d590cf8e558c6d60039e1c003079b240689d845726b6b57c0"},
@@ -666,6 +702,7 @@ version = "0.11.0"
description = "Formatting of dates and times in Flask templates using moment.js."
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "Flask-Moment-0.11.0.tar.gz", hash = "sha256:ff4cc0c4f8ec6798e19ba17fac409a8090f21677da6b21e3e1e4450344d8ed71"},
{file = "Flask_Moment-0.11.0-py2.py3-none-any.whl", hash = "sha256:75e1ae59b7562731acf9faf295c0bfd8165f51f67a62bd779e0c57e5f1c66dbf"},
@@ -680,6 +717,7 @@ version = "1.1.0"
description = "OpenTracing support for Flask applications"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "Flask-OpenTracing-1.1.0.tar.gz", hash = "sha256:a9a39d367fbe7e9ed9c77b90ac48159c1a3e82982a5abf84d3f4d710d24580ac"},
]
@@ -697,6 +735,7 @@ version = "1.3.0"
description = "Fully featured framework for fast, easy and documented API development with Flask"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"},
{file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"},
@@ -721,6 +760,7 @@ version = "3.0.5"
description = "Add SQLAlchemy support to your Flask application."
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"},
{file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"},
@@ -736,6 +776,7 @@ version = "1.5.1"
description = "Let your Python tests travel through time"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"},
{file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"},
@@ -750,6 +791,7 @@ version = "1.4.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
@@ -836,6 +878,7 @@ version = "2.0.0"
description = ""
optional = false
python-versions = "^3.12"
+groups = ["main"]
files = []
develop = false
@@ -859,6 +902,7 @@ version = "1.34.1"
description = "Google API client core library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "google-api-core-1.34.1.tar.gz", hash = "sha256:3399c92887a97d33038baa4bfd3bf07acc05d474b0171f333e1f641c1364e552"},
{file = "google_api_core-1.34.1-py3-none-any.whl", hash = "sha256:52bcc9d9937735f8a3986fa0bbf9135ae9cf5393a722387e5eced520e39c774a"},
@@ -883,6 +927,7 @@ version = "2.32.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"},
{file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"},
@@ -906,6 +951,7 @@ version = "2.22.0"
description = "Google Cloud Pub/Sub API client library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "google_cloud_pubsub-2.22.0-py2.py3-none-any.whl", hash = "sha256:229bf60a3835c1bb21ee36c7d4368b111097678b8ed25d3fbc5e639a1d03388d"},
{file = "google_cloud_pubsub-2.22.0.tar.gz", hash = "sha256:a4c2b1a5ca2c0b32c8d3776c85f498266c3d79696696ea67010c857b45af17d8"},
@@ -929,6 +975,7 @@ version = "1.63.2"
description = "Common protobufs used in Google APIs"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"},
{file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"},
@@ -947,6 +994,7 @@ version = "3.0.3"
description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
@@ -1018,6 +1066,7 @@ version = "0.13.1"
description = "IAM API client library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"},
{file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"},
@@ -1034,6 +1083,7 @@ version = "1.65.1"
description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"},
{file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"},
@@ -1092,6 +1142,7 @@ version = "1.48.2"
description = "Status proto mapping for gRPC"
optional = false
python-versions = ">=3.6"
+groups = ["main"]
files = [
{file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"},
{file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"},
@@ -1108,6 +1159,7 @@ version = "20.1.0"
description = "WSGI HTTP Server for UNIX"
optional = false
python-versions = ">=3.5"
+groups = ["main"]
files = [
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
{file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
@@ -1128,6 +1180,7 @@ version = "3.7"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
+groups = ["main", "dev"]
files = [
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
@@ -1139,6 +1192,7 @@ version = "8.0.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"},
{file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"},
@@ -1158,6 +1212,7 @@ version = "5.13.0"
description = "Read resources from Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"},
{file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"},
@@ -1173,6 +1228,7 @@ version = "6.2.0"
description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "inflect-6.2.0-py3-none-any.whl", hash = "sha256:5a005e0c9afe152cc95d552a59b8b0c19efc51823405b43d89e984f0c33bc243"},
{file = "inflect-6.2.0.tar.gz", hash = "sha256:518088ef414a4e15df70e6bcb40d021da4d423cc6c2fd4c0cad5500d39f86627"},
@@ -1192,6 +1248,7 @@ version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@@ -1203,6 +1260,7 @@ version = "5.13.2"
description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.8.0"
+groups = ["dev"]
files = [
{file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
{file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
@@ -1217,6 +1275,7 @@ version = "2.2.0"
description = "Safely pass data to untrusted environments and back."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
{file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
@@ -1228,6 +1287,7 @@ version = "4.8.0"
description = "Jaeger Python OpenTracing Tracer implementation"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
]
@@ -1247,6 +1307,7 @@ version = "3.1.4"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
{file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
@@ -1264,6 +1325,7 @@ version = "1.4.2"
description = "Lightweight pipelining with Python functions"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"},
{file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
@@ -1275,6 +1337,7 @@ version = "2.2.0"
description = "Python library for serializing any arbitrary object graph into JSON"
optional = false
python-versions = ">=2.7"
+groups = ["main"]
files = [
{file = "jsonpickle-2.2.0-py2.py3-none-any.whl", hash = "sha256:de7f2613818aa4f234138ca11243d6359ff83ae528b2185efdd474f62bcf9ae1"},
{file = "jsonpickle-2.2.0.tar.gz", hash = "sha256:7b272918b0554182e53dc340ddd62d9b7f902fec7e7b05620c04f3ccef479a0e"},
@@ -1291,6 +1354,7 @@ version = "4.23.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"},
{file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"},
@@ -1312,6 +1376,7 @@ version = "2023.12.1"
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
{file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
@@ -1326,6 +1391,7 @@ version = "8.3.0"
description = "LaunchDarkly SDK for Python"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "launchdarkly_server_sdk-8.3.0-py3-none-any.whl", hash = "sha256:bc59dbf9897fd2d9c70098c13bb073983bef29f58cae9439e70b2463982f1bb5"},
{file = "launchdarkly_server_sdk-8.3.0.tar.gz", hash = "sha256:cdb8fadd457e6ae569c0cb0d5de112d7f2a9c84a0ba03167bb7d68710dde7283"},
@@ -1350,6 +1416,7 @@ version = "4.9.4"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
+groups = ["main"]
files = [
{file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e214025e23db238805a600f1f37bf9f9a15413c7bf5f9d6ae194f84980c78722"},
{file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec53a09aee61d45e7dbe7e91252ff0491b6b5fee3d85b2d45b173d8ab453efc1"},
@@ -1458,6 +1525,7 @@ version = "1.3.5"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"},
{file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"},
@@ -1477,6 +1545,7 @@ version = "2.1.5"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
@@ -1546,6 +1615,7 @@ version = "3.21.3"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"},
{file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"},
@@ -1565,6 +1635,7 @@ version = "0.28.2"
description = "SQLAlchemy integration with the marshmallow (de)serialization library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "marshmallow-sqlalchemy-0.28.2.tar.gz", hash = "sha256:2ab0f1280c793e5aec81deab3e63ec23688ddfe05e5f38ac960368a1079520a1"},
{file = "marshmallow_sqlalchemy-0.28.2-py2.py3-none-any.whl", hash = "sha256:c31b3bdf794de1d78c53e1c495502cbb3eeb06ed216869980c71d6159e7e9e66"},
@@ -1587,6 +1658,7 @@ version = "0.7.0"
description = "McCabe checker, plugin for flake8"
optional = false
python-versions = ">=3.6"
+groups = ["dev"]
files = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
@@ -1598,6 +1670,7 @@ version = "1.0.8"
description = "MessagePack serializer"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"},
{file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"},
@@ -1663,6 +1736,7 @@ version = "6.0.5"
description = "multidict implementation"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
@@ -1762,6 +1836,7 @@ version = "3.8.1"
description = "Natural Language Toolkit"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"},
{file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"},
@@ -1787,6 +1862,7 @@ version = "1.26.4"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.9"
+groups = ["main"]
files = [
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
@@ -1832,6 +1908,7 @@ version = "2.4.0"
description = "OpenTracing API for Python. See documentation at http://opentracing.io"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
]
@@ -1845,6 +1922,7 @@ version = "21.3"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.6"
+groups = ["main", "dev"]
files = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
@@ -1859,6 +1937,7 @@ version = "1.5.3"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"},
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"},
@@ -1903,6 +1982,7 @@ version = "0.13.3"
description = "Check PEP-8 naming conventions, plugin for flake8"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"},
{file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"},
@@ -1917,6 +1997,7 @@ version = "1.3.10"
description = "Resolve a name to an object."
optional = false
python-versions = ">=3.6"
+groups = ["main"]
files = [
{file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
{file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
@@ -1928,6 +2009,7 @@ version = "4.2.2"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
{file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
@@ -1944,6 +2026,7 @@ version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@@ -1959,6 +2042,7 @@ version = "0.2.0"
description = "A simple interface for the CMU pronouncing dictionary"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "pronouncing-0.2.0.tar.gz", hash = "sha256:ff7856e1d973b3e16ff490c5cf1abdb52f08f45e2c35e463249b75741331e7c4"},
]
@@ -1972,6 +2056,7 @@ version = "1.24.0"
description = "Beautiful, Pythonic protocol buffers."
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
{file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
@@ -1989,6 +2074,7 @@ version = "3.20.3"
description = "Protocol Buffers"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"},
{file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"},
@@ -2020,6 +2106,7 @@ version = "2.9.9"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"},
@@ -2101,6 +2188,7 @@ version = "0.4.8"
description = "ASN.1 types and codecs"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
@@ -2112,6 +2200,7 @@ version = "0.4.0"
description = "A collection of ASN.1-based protocols modules"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
{file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
@@ -2126,6 +2215,7 @@ version = "2.12.0"
description = "Python style guide checker"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"},
{file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"},
@@ -2137,6 +2227,7 @@ version = "22.3.5"
description = "ISO country, subdivision, language, currency and script definitions and their translations"
optional = false
python-versions = ">=3.6, <4"
+groups = ["main"]
files = [
{file = "pycountry-22.3.5.tar.gz", hash = "sha256:b2163a246c585894d808f18783e19137cb70a0c18fb36748dc01fc6f109c1646"},
]
@@ -2150,6 +2241,7 @@ version = "1.10.17"
description = "Data validation and settings management using python type hints"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"},
{file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"},
@@ -2209,6 +2301,7 @@ version = "6.3.0"
description = "Python docstring style checker"
optional = false
python-versions = ">=3.6"
+groups = ["dev"]
files = [
{file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"},
{file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"},
@@ -2226,6 +2319,7 @@ version = "3.2.0"
description = "passive checker of Python programs"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
@@ -2237,6 +2331,7 @@ version = "2.1.0"
description = "Hamcrest framework for matcher objects"
optional = false
python-versions = ">=3.6"
+groups = ["dev"]
files = [
{file = "pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587"},
{file = "pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c"},
@@ -2254,6 +2349,7 @@ version = "3.2.5"
description = "python code static checker"
optional = false
python-versions = ">=3.8.0"
+groups = ["dev"]
files = [
{file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"},
{file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"},
@@ -2278,6 +2374,7 @@ version = "0.6"
description = "pylint-flask is a Pylint plugin to aid Pylint in recognizing and understanding errors caused when using Flask"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "pylint-flask-0.6.tar.gz", hash = "sha256:f4d97de2216bf7bfce07c9c08b166e978fe9f2725de2a50a9845a97de7e31517"},
]
@@ -2291,6 +2388,7 @@ version = "0.8.2"
description = "Utilities and helpers for writing Pylint plugins"
optional = false
python-versions = ">=3.7,<4.0"
+groups = ["dev"]
files = [
{file = "pylint_plugin_utils-0.8.2-py3-none-any.whl", hash = "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507"},
{file = "pylint_plugin_utils-0.8.2.tar.gz", hash = "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4"},
@@ -2305,6 +2403,7 @@ version = "3.1.2"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
optional = false
python-versions = ">=3.6.8"
+groups = ["main", "dev"]
files = [
{file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
{file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
@@ -2319,6 +2418,7 @@ version = "1.1"
description = "Generate and parse RFC 3339 timestamps"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"},
{file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"},
@@ -2333,6 +2433,7 @@ version = "0.18.1"
description = "Persistent/Functional/Immutable data structures"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"},
{file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"},
@@ -2363,6 +2464,7 @@ version = "3.9.0"
description = "Lightweight Python client for Apache Solr"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "pysolr-3.9.0.tar.gz", hash = "sha256:6ef05feb87c614894243eddc62e9b0a6134a889c159ae868655cf6cd749545e6"},
]
@@ -2379,6 +2481,7 @@ version = "8.2.2"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
{file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
@@ -2399,6 +2502,7 @@ version = "1.0.5"
description = "Pytest plugin for aiohttp support"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "pytest-aiohttp-1.0.5.tar.gz", hash = "sha256:880262bc5951e934463b15e3af8bb298f11f7d4d3ebac970aab425aff10a780a"},
{file = "pytest_aiohttp-1.0.5-py3-none-any.whl", hash = "sha256:63a5360fd2f34dda4ab8e6baee4c5f5be4cd186a403cabd498fced82ac9c561e"},
@@ -2418,6 +2522,7 @@ version = "0.23.8"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"},
{file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"},
@@ -2436,6 +2541,7 @@ version = "4.1.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
{file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
@@ -2454,6 +2560,7 @@ version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
@@ -2471,6 +2578,7 @@ version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+groups = ["main", "dev"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@@ -2485,6 +2593,7 @@ version = "0.21.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"},
{file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"},
@@ -2499,6 +2608,7 @@ version = "1.0.4"
description = "Programmatically open an editor, capture the result."
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"},
{file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"},
@@ -2511,6 +2621,7 @@ version = "3.3.0"
description = "JOSE implementation in Python"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"},
{file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"},
@@ -2532,6 +2643,7 @@ version = "2022.7.1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
@@ -2543,6 +2655,7 @@ version = "0.35.1"
description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
{file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
@@ -2558,6 +2671,7 @@ version = "2022.10.31"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.6"
+groups = ["main"]
files = [
{file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"},
{file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"},
@@ -2655,6 +2769,7 @@ version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
@@ -2676,6 +2791,7 @@ version = "0.19.0"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"},
{file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"},
@@ -2784,6 +2900,7 @@ version = "4.9"
description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
+groups = ["main"]
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
@@ -2798,6 +2915,7 @@ version = "0.0.0"
description = ""
optional = false
python-versions = "*"
+groups = ["main"]
files = []
develop = false
@@ -2821,6 +2939,7 @@ version = "3.0.2"
description = "Python helper for Semantic Versioning (https://semver.org)"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"},
{file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"},
@@ -2832,6 +2951,7 @@ version = "1.45.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "sentry-sdk-1.45.0.tar.gz", hash = "sha256:509aa9678c0512344ca886281766c2e538682f8acfa50fd8d405f8c417ad0625"},
{file = "sentry_sdk-1.45.0-py2.py3-none-any.whl", hash = "sha256:1ce29e30240cc289a027011103a8c83885b15ef2f316a60bcc7c5300afa144f1"},
@@ -2879,6 +2999,7 @@ version = "71.0.2"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "setuptools-71.0.2-py3-none-any.whl", hash = "sha256:f6640114f96be808024fbd1f721161215543796d3a68da4524349de700604ce8"},
{file = "setuptools-71.0.2.tar.gz", hash = "sha256:ca359bea0cd5c8ce267d7463239107e87f312f2e2a11b6ca6357565d82b6c0d7"},
@@ -2895,6 +3016,7 @@ version = "0.0.2"
description = "A short description of the project"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = []
develop = false
@@ -2913,6 +3035,7 @@ version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+groups = ["main", "dev"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@@ -2924,6 +3047,7 @@ version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
@@ -2935,6 +3059,7 @@ version = "1.4.52"
description = "Database Abstraction Library"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+groups = ["main"]
files = [
{file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"},
{file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"},
@@ -3014,6 +3139,7 @@ version = "0.7"
description = "Strict, simple, lightweight RFC3339 functions"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"},
]
@@ -3024,6 +3150,7 @@ version = "1.0.0"
description = "Synonyms API"
optional = false
python-versions = "*"
+groups = ["main"]
files = []
develop = false
@@ -3045,6 +3172,7 @@ version = "1.0.2"
description = "Tornado IOLoop Backed Concurrent Futures"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
@@ -3059,6 +3187,7 @@ version = "0.20.0"
description = "Python bindings for the Apache Thrift RPC system"
optional = false
python-versions = "*"
+groups = ["main"]
files = [
{file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"},
]
@@ -3077,6 +3206,7 @@ version = "0.13.0"
description = "Style preserving TOML library"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"},
{file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"},
@@ -3088,6 +3218,7 @@ version = "0.12.1"
description = "List processing tools and functional utilities"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"},
{file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"},
@@ -3099,6 +3230,7 @@ version = "6.4.1"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"},
{file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"},
@@ -3119,6 +3251,7 @@ version = "4.66.4"
description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
{file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
@@ -3139,6 +3272,7 @@ version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@@ -3150,6 +3284,7 @@ version = "1.26.19"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+groups = ["main"]
files = [
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
@@ -3166,6 +3301,7 @@ version = "3.0.3"
description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"},
{file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"},
@@ -3183,6 +3319,7 @@ version = "0.13.0"
description = "Makes working with XML feel like you are working with JSON"
optional = false
python-versions = ">=3.4"
+groups = ["main"]
files = [
{file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"},
{file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"},
@@ -3194,6 +3331,7 @@ version = "1.9.4"
description = "Yet another URL library"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
@@ -3297,6 +3435,7 @@ version = "3.19.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
{file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
@@ -3307,6 +3446,6 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[metadata]
-lock-version = "2.0"
+lock-version = "2.1"
python-versions = "^3.12"
-content-hash = "4bb7c31d73432ff45852c592e608a94d95b7fc59f8df8d306510ac0a69f4904b"
+content-hash = "4f9d9f3a1c660c04648d52d1934209776e698c0082cd99133faa16cc79e686c0"
diff --git a/api/pyproject.toml b/api/pyproject.toml
index 8ec3f47e5..da805fcca 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -13,7 +13,7 @@ gcp_queue = { git = "https://github.com/bcgov/namex.git", subdirectory = "servic
swagger_client = { git = "https://github.com/bcgov/namex-synonyms-api-py-client.git" }
simple_cloudevent = { git = "https://github.com/daxiom/simple-cloudevent.py" }
# flask-jwt-oidc = { git = "https://github.com/thorwolpert/flask-jwt-oidc.git" }
-flask-jwt-oidc = { git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version" }
+flask-jwt-oidc = { git = "https://github.com/seeker25/flask-jwt-oidc.git" }
sbc-common-components = { git = "https://github.com/bcgov/sbc-common-components.git", subdirectory = "python" }
gunicorn = "^20.1.0"
diff --git a/api/report-templates/template-parts/name-request/nrDetails.html b/api/report-templates/template-parts/name-request/nrDetails.html
index 7d5ab6b4e..e247d1520 100644
--- a/api/report-templates/template-parts/name-request/nrDetails.html
+++ b/api/report-templates/template-parts/name-request/nrDetails.html
@@ -5,7 +5,7 @@
Business Type:
Request Status:
Request Type:
- Retrieval Time:
+ Retrieved Date and Time:
diff --git a/api/tests/python/services/test_flags.py b/api/tests/python/services/test_flags.py
new file mode 100644
index 000000000..4310a1dec
--- /dev/null
+++ b/api/tests/python/services/test_flags.py
@@ -0,0 +1,184 @@
+# Copyright © 2025 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests to assure the Flag Services.
+
+Test-Suite to ensure that the Flag Service is working as expected.
+"""
+import pytest
+from flask import Flask
+
+from namex.models import User
+from namex.services.flags import Flags
+
+
+def test_flags_init():
+ """Ensure that extension can be initialized."""
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'local'
+
+ with app.app_context():
+ flags = Flags(app)
+
+ assert flags
+ assert app.extensions['featureflags']
+
+
+def test_flags_init_app():
+ """Ensure that extension can be initialized."""
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'local'
+ app.config['NAMEX_LD_SDK_ID'] = 'https://no.flag/avail'
+
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ assert app.extensions['featureflags']
+
+
+def test_flags_init_app_production():
+ """Ensure that extension can be initialized."""
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'production'
+ app.config['NAMEX_LD_SDK_ID'] = 'https://no.flag/avail'
+
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ assert app.extensions['featureflags']
+
+
+def test_flags_init_app_no_key_dev():
+ """Assert that the extension is setup with a KEY, but in non-production mode."""
+ app = Flask(__name__)
+ app.config['NAMEX_LD_SDK_ID'] = None
+ app.config['ENVIRONMENT'] = 'local'
+
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ assert app.extensions['featureflags']
+
+
+def test_flags_init_app_no_key_prod():
+ """Assert that prod with no key initializes, but does not setup the extension."""
+ app = Flask(__name__)
+ app.config['NAMEX_LD_SDK_ID'] = None
+ app.config['ENVIRONMENT'] = 'production'
+
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ with pytest.raises(KeyError):
+ client = app.extensions['featureflags']
+ assert not client
+
+
+def test_flags_bool_no_key_prod():
+ """Assert that prod with no key initializes, but does not setup the extension."""
+ app = Flask(__name__)
+ app.config['NAMEX_LD_SDK_ID'] = None
+ app.config['ENVIRONMENT'] = 'production'
+
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ on = flags.is_on('bool-flag')
+
+ assert not on
+
+
+def test_flags_bool():
+ """Assert that a boolean (True) is returned, when using the local Flag.json file."""
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'local'
+ app.config['NAMEX_LD_SDK_ID'] = 'https://no.flag/avail'
+
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ flag_on = flags.is_on('bool-flag')
+
+ assert flag_on
+
+
+def test_flags_bool_missing_flag(app):
+ """Assert that a boolean (False) is returned when flag doesn't exist, when using the local Flag.json file."""
+ from namex import flags
+ app_env = app.config.get('ENVIRONMENT')
+ try:
+ with app.app_context():
+ flag_on = flags.is_on('no flag here')
+
+ assert not flag_on
+ except: # pylint: disable=bare-except; # noqa: B901, E722
+ # for tests we don't care
+ assert False
+ finally:
+ app.config['ENVIRONMENT'] = app_env
+
+
+def test_flags_bool_using_current_app():
+ """Assert that a boolean (True) is returned, when using the local Flag.json file."""
+ from namex import flags
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'local'
+
+ with app.app_context():
+ flag_on = flags.is_on('bool-flag')
+
+ assert flag_on
+
+
+@pytest.mark.parametrize('test_name,flag_name,expected', [
+ ('boolean flag', 'bool-flag', True),
+ ('string flag', 'string-flag', 'a string value'),
+ ('integer flag', 'integer-flag', 10),
+ ('boolean flag', 'enable-won-emails', False),
+])
+def test_flags_bool_value(test_name, flag_name, expected):
+ """Assert that a boolean (True) is returned, when using the local Flag.json file."""
+ from namex import flags
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'local'
+
+ with app.app_context():
+ val = flags.value(flag_name)
+
+ assert val == expected
+
+
+def test_flag_bool_unique_user():
+ """Assert that a unique user can retrieve a flag, when using the local Flag.json file."""
+ app = Flask(__name__)
+ app.config['ENVIRONMENT'] = 'local'
+ app.config['NAMEX_LD_SDK_ID'] = 'https://no.flag/avail'
+
+ user = User(username='username', firstname='firstname', lastname='lastname', sub='sub', iss='iss', idp_userid='123', login_source='IDIR')
+
+ app_env = app.config['ENVIRONMENT']
+ try:
+ with app.app_context():
+ flags = Flags()
+ flags.init_app(app)
+ app.config['ENVIRONMENT'] = 'local'
+ val = flags.value('bool-flag', user)
+ flag_on = flags.is_on('bool-flag', user)
+
+ assert val
+ assert flag_on
+ except: # pylint: disable=bare-except; # noqa: B901, E722
+ # for tests we don't care
+ assert False
+ finally:
+ app.config['ENVIRONMENT'] = app_env
diff --git a/jobs/bad-designation-notifier/.env.smample b/jobs/bad-designation-notifier/.env.smample
new file mode 100644
index 000000000..3d8655829
--- /dev/null
+++ b/jobs/bad-designation-notifier/.env.smample
@@ -0,0 +1,9 @@
+export NAMEX_DATABASE_HOST=
+export NAMEX_DATABASE_NAME=
+export NAMEX_DATABASE_PASSWORD=
+export NAMEX_DATABASE_PORT=
+export NAMEX_DATABASE_USERNAME=
+
+export EMAIL_RECIPIENTS=
+export SMTP_SERVER=
+export SMTP_USER=
\ No newline at end of file
diff --git a/jobs/bad-designation-notifier/Dockerfile b/jobs/bad-designation-notifier/Dockerfile
new file mode 100644
index 000000000..30b5f3d23
--- /dev/null
+++ b/jobs/bad-designation-notifier/Dockerfile
@@ -0,0 +1,44 @@
+# Use Python 3.12 slim image
+FROM python:3.12-slim
+
+# Set environment variables
+ENV PYTHONDONTWRITEBYTECODE=1 \
+ PYTHONUNBUFFERED=1 \
+ PIP_NO_CACHE_DIR=1 \
+ PIP_DISABLE_PIP_VERSION_CHECK=1 \
+ POETRY_NO_INTERACTION=1 \
+ POETRY_VIRTUALENVS_CREATE=false \
+ PATH="/root/.local/bin:$PATH" \
+ PYTHONPATH="/app/src"
+
+# Install system dependencies
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ build-essential \
+ libpq-dev \
+ curl \
+ && apt-get clean && rm -rf /var/lib/apt/lists/*
+
+# Upgrade pip and install Poetry
+RUN python3 -m ensurepip --upgrade && \
+ python3 -m pip install --upgrade pip setuptools wheel && \
+ curl --proto "=https" --tlsv1.2 -sSf -L https://install.python-poetry.org | python3 - && \
+ poetry --version
+
+# Set working directory
+WORKDIR /app
+
+# Copy dependency files and install dependencies
+COPY pyproject.toml poetry.lock /app/
+RUN poetry install --no-dev --no-interaction --no-ansi
+
+# Copy the application source code
+COPY src /app/src
+
+# Ensure all files are group-writable for OpenShift compatibility
+RUN chmod -R g=u /app
+
+# OpenShift will assign a random UID at runtime
+USER 1001
+
+# Default command
+CMD ["python", "src/app.py"]
diff --git a/jobs/bad-designation-notifier/Makefile b/jobs/bad-designation-notifier/Makefile
new file mode 100644
index 000000000..cc96b133d
--- /dev/null
+++ b/jobs/bad-designation-notifier/Makefile
@@ -0,0 +1,77 @@
+.PHONY: clean clean-build clean-pyc clean-test ci lint test build build-nc push run setup help
+
+CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(abspath $(lastword $(MAKEFILE_LIST)))))
+PROJECT_NAME:=bad-designation-notifier
+DOCKER_NAME:=bad-designation-notifier
+VENV_DIR:=.venv
+
+#################################################################################
+# Setup #
+#################################################################################
+setup: clean install ## Setup the project
+
+install: ## Install Python dependencies in a virtual environment
+ test -f $(VENV_DIR)/bin/activate || python3.12 -m venv $(VENV_DIR) ;\
+ . $(VENV_DIR)/bin/activate ;\
+ pip install poetry ;\
+ poetry install --no-dev
+
+#################################################################################
+# Clean #
+#################################################################################
+clean: clean-build clean-pyc clean-test ## Clean the project
+ rm -rf $(VENV_DIR)/
+
+clean-build: ## Clean build files
+ rm -fr build/ dist/ .eggs/
+ find . -name '*.egg-info' -exec rm -fr {} +
+ find . -name '*.egg' -exec rm -fr {} +
+
+clean-pyc: ## Clean Python cache files
+ find . -name '*.pyc' -exec rm -f {} +
+ find . -name '*.pyo' -exec rm -f {} +
+ find . -name '*~' -exec rm -f {} +
+ find . -name '__pycache__' -exec rm -fr {} +
+
+clean-test: ## Clean test files
+ find . -name '.pytest_cache' -exec rm -fr {} +
+ rm -fr .tox/ .coverage htmlcov/
+
+#################################################################################
+# Linting and Testing #
+#################################################################################
+ci: lint test ## CI flow
+
+lint: ## Run linting with pylint
+ . $(VENV_DIR)/bin/activate && pylint --rcfile=setup.cfg src/**/*.py
+
+test: ## Run unit tests
+ . $(VENV_DIR)/bin/activate && pytest src/
+
+#################################################################################
+# Build and Push Docker #
+#################################################################################
+build: ## Build the Docker container
+ docker build . -t $(DOCKER_NAME) \
+ --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \
+ --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
+
+build-nc: ## Build the Docker container without caching
+ docker build --no-cache -t $(DOCKER_NAME) .
+
+push: build ## Push the Docker container to the registry
+ @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\
+ docker tag $(DOCKER_NAME) $(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME):latest ;\
+ docker push $(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME):latest
+
+#################################################################################
+# Run #
+#################################################################################
+run: ## Run the project locally
+ . $(VENV_DIR)/bin/activate && poetry run python src/app.py
+
+#################################################################################
+# Self-Documenting Commands #
+#################################################################################
+help: ## Show available commands
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/jobs/bad-designation-notifier/README.md b/jobs/bad-designation-notifier/README.md
new file mode 100644
index 000000000..23e834807
--- /dev/null
+++ b/jobs/bad-designation-notifier/README.md
@@ -0,0 +1,22 @@
+# Bad-designation-Notifier
+
+Bad-designation-Notifier is a Python application designed to identify designations does not matching request types in a PostgreSQL database and send email notifications to specified recipients. The application runs as a standalone job, suitable for deployment in OpenShift or similar platforms.
+
+## Features
+- Query a PostgreSQL database for designations in the names with request type in 'FR','LL','LP','XLL','XLP'.
+- Filter out designations that are not in specific states (e.g., 'CANCELLED','EXPIRED','PENDING_DELETION','REJECTED').
+- Send email notifications with formatted data to a configurable list of recipients.
+- Configurable via environment variables for flexibility.
+
+## Requirements
+- Python 3.12+
+- PostgreSQL database
+- Poetry for dependency management
+- OpenShift (for deployment)
+
+## Installation
+
+1. Clone the repository:
+ ```bash
+ git clone https://github.com/your-repo/bad-designation-notifier.git
+ cd bad-designation-notifier
diff --git a/jobs/bad-designation-notifier/poetry.lock b/jobs/bad-designation-notifier/poetry.lock
new file mode 100644
index 000000000..82fbce0b4
--- /dev/null
+++ b/jobs/bad-designation-notifier/poetry.lock
@@ -0,0 +1,454 @@
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
+
+[[package]]
+name = "blinker"
+version = "1.9.0"
+description = "Fast, simple object-to-object and broadcast signaling"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
+ {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.8"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
+ {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "flask"
+version = "3.1.0"
+description = "A simple framework for building complex web applications."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"},
+ {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"},
+]
+
+[package.dependencies]
+blinker = ">=1.9"
+click = ">=8.1.3"
+itsdangerous = ">=2.2"
+Jinja2 = ">=3.1.2"
+Werkzeug = ">=3.1"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
+[[package]]
+name = "greenlet"
+version = "3.1.1"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"},
+ {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"},
+ {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"},
+ {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"},
+ {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"},
+ {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"},
+ {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"},
+ {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"},
+ {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"},
+ {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"},
+ {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"},
+ {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"},
+ {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"},
+ {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"},
+ {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"},
+ {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"},
+ {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"},
+ {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"},
+ {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"},
+ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
+ {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
+]
+
+[package.extras]
+docs = ["Sphinx", "furo"]
+test = ["objgraph", "psutil"]
+
+[[package]]
+name = "itsdangerous"
+version = "2.2.0"
+description = "Safely pass data to untrusted environments and back."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
+ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.5"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
+ {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
+ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
+]
+
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.10"
+description = "psycopg2 - Python-PostgreSQL Database Adapter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
+ {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
+[[package]]
+name = "pytz"
+version = "2022.7.1"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
+ {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
+]
+
+[[package]]
+name = "secure-smtplib"
+version = "0.1.1"
+description = "Secure SMTP subclasses for Python 2"
+optional = false
+python-versions = "*"
+files = [
+ {file = "secure-smtplib-0.1.1.tar.gz", hash = "sha256:3d4903f6612626ccd9b77ef05313cb3c3952a59a4d756cda2377b272fabad8b9"},
+ {file = "secure_smtplib-0.1.1-py2.py3-none-any.whl", hash = "sha256:645ea0a341f306fa3307c8fe1169765c417ff363db9946fa69f3669a9b718069"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "1.4.54"
+description = "Database Abstraction Library"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"},
+ {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"},
+]
+
+[package.dependencies]
+greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+
+[package.extras]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
+aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
+asyncio = ["greenlet (!=0.4.17)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"]
+mssql = ["pyodbc"]
+mssql-pymssql = ["pymssql", "pymssql"]
+mssql-pyodbc = ["pyodbc", "pyodbc"]
+mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
+mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
+mysql-connector = ["mysql-connector-python", "mysql-connector-python"]
+oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
+postgresql = ["psycopg2 (>=2.7)"]
+postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"]
+postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
+pymysql = ["pymysql", "pymysql (<1)"]
+sqlcipher = ["sqlcipher3_binary"]
+
+[[package]]
+name = "werkzeug"
+version = "3.1.3"
+description = "The comprehensive WSGI web application library."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"},
+ {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog (>=2.3)"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.12"
+content-hash = "91eee9ab02266929bf2a634cb6e9030d5d23cabcfa07ce38743eb8eb64449d65"
diff --git a/jobs/bad-designation-notifier/pyproject.toml b/jobs/bad-designation-notifier/pyproject.toml
new file mode 100644
index 000000000..de0a80088
--- /dev/null
+++ b/jobs/bad-designation-notifier/pyproject.toml
@@ -0,0 +1,20 @@
+[tool.poetry]
+name = "bad-designation-notifier"
+version = "0.1.0"
+description = "An app to detect and notify about bad designations."
+authors = ["Your Name "]
+license = "Apache-2.0"
+readme = "README.md"
+packages = [
+ { include = "services", from = "src" }, # Updated to specify the `src` path
+ { include = "src" } # Ensure `src` folder is packaged
+]
+
+[tool.poetry.dependencies]
+python = "^3.12"
+flask = "^3.0.2"
+sqlalchemy = "^1.4.18"
+psycopg2-binary = "^2.9.7"
+secure-smtplib = "^0.1.1"
+pytz = "^2022.4"
+python-dotenv = "^1.0.1"
diff --git a/jobs/bad-designation-notifier/run.sh b/jobs/bad-designation-notifier/run.sh
new file mode 100755
index 000000000..48c20a207
--- /dev/null
+++ b/jobs/bad-designation-notifier/run.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Activate virtual environment
+source .venv/bin/activate
+
+# Export environment variables
+export FLASK_APP=src/app.py
+export FLASK_ENV=development
+
+# Run the Flask app
+python src/app.py
diff --git a/jobs/bad-designation-notifier/src/app.py b/jobs/bad-designation-notifier/src/app.py
new file mode 100644
index 000000000..2ced503b2
--- /dev/null
+++ b/jobs/bad-designation-notifier/src/app.py
@@ -0,0 +1,41 @@
+from config import get_named_config
+from flask import Flask, current_app
+from services.database_service import get_bad_designations
+from services.email_service import send_email_notification
+
+import logging
+import logging.config
+import os
+
+def create_app(config_name="default"):
+ """Creates and configures the Flask app."""
+
+ app = Flask(__name__) # NOSONAR
+ app.config.from_object(get_named_config(config_name))
+ print(app.config)
+ return app
+
+def run_task():
+ """Executes the task to query bad names and send an email."""
+ try:
+ # Step 1: Query data
+ bad_designations = get_bad_designations()
+
+ # Step 2: Send email
+ send_email_notification(bad_designations)
+ current_app.logger.info("Notification sent successfully.")
+ except Exception as e:
+ current_app.logger.error(f"An error occurred: {e}")
+
+def setup_logging():
+ # Load the logging configuration
+ logging.config.fileConfig(os.path.join(os.path.dirname(__file__), "logging.conf"))
+ logger = logging.getLogger("api") # Use the `api` logger
+ # Example log message
+ logger.info("Logging is configured and ready!")
+
+if __name__ == "__main__":
+ setup_logging()
+ app = create_app()
+ with app.app_context(): # Ensures Flask app context is available
+ run_task()
diff --git a/jobs/bad-designation-notifier/src/config.py b/jobs/bad-designation-notifier/src/config.py
new file mode 100644
index 000000000..8f2f5991e
--- /dev/null
+++ b/jobs/bad-designation-notifier/src/config.py
@@ -0,0 +1,57 @@
+import os
+from dotenv import find_dotenv, load_dotenv
+
+# Get the project root directory
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Load the .env file from the project root
+load_dotenv(os.path.join(BASE_DIR, '.env'))
+
+
+class Config:
+ # Database Configuration
+ _DB_USER = os.getenv("NAMEX_DATABASE_USERNAME", "")
+ _DB_PASSWORD = os.getenv("NAMEX_DATABASE_PASSWORD", "")
+ _DB_NAME = os.getenv("NAMEX_DATABASE_NAME", "")
+ _DB_HOST = os.getenv("NAMEX_DATABASE_HOST", "")
+ _DB_PORT = os.getenv("NAMEX_DATABASE_PORT", "5432")
+ NAMEX_DATABASE_URI = f"postgresql://{_DB_USER}:{_DB_PASSWORD}@{_DB_HOST}:{_DB_PORT}/{_DB_NAME}"
+
+ # Email Configuration
+ EMAIL_RECIPIENTS = os.getenv("EMAIL_RECIPIENTS", "").split(",")
+ SMTP_SERVER = os.getenv("SMTP_SERVER", "")
+ SMTP_USER = os.getenv("SMTP_USER", "")
+
+ # General Settings
+ DEBUG = False
+ TESTING = False
+
+
+class DevConfig(Config):
+ DEBUG = True
+
+
+class TestConfig(Config):
+ DEBUG = True
+ TESTING = True
+
+
+class ProdConfig(Config):
+ pass
+
+
+# Environment-specific configuration mapping
+_APP_CONFIG = {
+ "development": DevConfig,
+ "testing": TestConfig,
+ "production": ProdConfig,
+ "default": ProdConfig,
+}
+
+
+def get_named_config(config_name: str = "default"):
+ """Return the configuration object based on the name."""
+ try:
+ return _APP_CONFIG[config_name]
+ except KeyError:
+ raise KeyError(f"Unknown configuration: {config_name}")
diff --git a/services/auto-analyze/logging.conf b/jobs/bad-designation-notifier/src/logging.conf
similarity index 97%
rename from services/auto-analyze/logging.conf
rename to jobs/bad-designation-notifier/src/logging.conf
index ffc1a01e3..0806a8a2c 100644
--- a/services/auto-analyze/logging.conf
+++ b/jobs/bad-designation-notifier/src/logging.conf
@@ -25,4 +25,4 @@ args=(sys.stdout,)
[formatter_simple]
format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s
-datefmt=
\ No newline at end of file
+datefmt=
diff --git a/jobs/bad-designation-notifier/src/services/__init__.py b/jobs/bad-designation-notifier/src/services/__init__.py
new file mode 100644
index 000000000..a6d425dfa
--- /dev/null
+++ b/jobs/bad-designation-notifier/src/services/__init__.py
@@ -0,0 +1,6 @@
+# services/__init__.py
+
+from .database_service import get_bad_designations
+from .email_service import send_email_notification
+
+__all__ = ["get_bad_designations", "send_email_notification"]
diff --git a/jobs/bad-designation-notifier/src/services/database_service.py b/jobs/bad-designation-notifier/src/services/database_service.py
new file mode 100644
index 000000000..9a23af111
--- /dev/null
+++ b/jobs/bad-designation-notifier/src/services/database_service.py
@@ -0,0 +1,73 @@
+from flask import current_app
+from sqlalchemy import create_engine, text
+from sqlalchemy.orm import sessionmaker
+from src.config import get_named_config
+from .utils import get_yesterday_str
+
+config = get_named_config
+
+def get_bad_designations():
+ """Fetches bad names from the database."""
+ # Create the database engine and session
+ engine = create_engine(current_app.config["NAMEX_DATABASE_URI"])
+ session = sessionmaker(bind=engine)()
+
+ yesterday_pacific = get_yesterday_str()
+
+ # Print the timestamps
+ current_app.logger.info(f"Yesterday (Pacific): {yesterday_pacific}")
+
+ try:
+ # Wrap the query in text()
+ query = text(f"""
+ select
+ r.nr_num
+ ,n.name
+ ,TO_CHAR(r.last_update AT TIME ZONE 'America/Vancouver', 'YYYY-MM-DD HH24:MI:SS') AS last_update
+ ,r.request_type_cd
+ ,r.entity_type_cd
+ ,r.state_cd
+ ,TO_CHAR(r.expiration_date, 'YYYY-MM-DD HH24:MI:SS') AS expiration_date
+ ,n.corp_num as consumed_corp_num
+ ,TO_CHAR(n.consumption_date AT TIME ZONE 'America/Vancouver', 'YYYY-MM-DD HH24:MI:SS') AS consumed_date
+ from requests r
+ ,names n
+ where r.id=n.nr_id
+ and to_char(last_update at time zone 'America/Vancouver','yyyy-mm-dd') = '{yesterday_pacific}'
+ and r.request_type_cd in ('FR','LL','LP','XLL','XLP')
+ and r.state_cd NOT in ('CANCELLED','EXPIRED','PENDING_DELETION','REJECTED')
+ and
+ (
+ replace(name,'.','') like '%CCC'
+ or replace(name,'.','') like '%COMMUNITY CONTRIBUTION COMPANY'
+ or replace(name,'.','') like '%CORP'
+ or replace(name,'.','') like '%CORPORATION'
+ or replace(name,'.','') like '%INC'
+ or replace(name,'.','') like '%INCORPORATED'
+ or replace(name,'.','') like '%INCORPOREE'
+ or (replace(name,'.','') like '%LIMITED' and replace(name,'.','') not like '%UNLIMITED')
+ or replace(name,'.','') like '%LIMITEE'
+ or replace(name,'.','') like '%LIMITED LIABILITY COMPANY'
+ or replace(name,'.','') like '%LIMITED LIABILITY CO'
+ or replace(name,'.','') like '%LLC'
+ or replace(name,'.','') like '%LTEE'
+ or replace(name,'.','') like '%LTD'
+ or replace(name,'.','') like '%SRI'
+ or replace(name,'.','') like '%ULC'
+ or replace(name,'.','') like '%UNLIMITED LIABILITY COMPANY'
+ )
+ and r.nr_num not like 'NR L%'
+ order by r.last_update desc
+ """)
+ result = session.execute(query).fetchall()
+
+ # Convert result rows to a list of dictionaries
+ formatted_result = [
+ [(value if value is not None else 'n/a') for value in row]
+ for row in result
+ ]
+
+ return formatted_result
+ finally:
+ # Ensure the session is closed
+ session.close()
\ No newline at end of file
diff --git a/jobs/bad-designation-notifier/src/services/email_service.py b/jobs/bad-designation-notifier/src/services/email_service.py
new file mode 100644
index 000000000..40049dfce
--- /dev/null
+++ b/jobs/bad-designation-notifier/src/services/email_service.py
@@ -0,0 +1,83 @@
+import csv
+import smtplib
+import os
+
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from email.mime.base import MIMEBase
+from email import encoders
+from flask import current_app
+from .utils import get_yesterday_str
+
+def load_recipients():
+ """Load recipients dynamically from an environment variable."""
+ recipients = current_app.config["EMAIL_RECIPIENTS"]
+ return recipients if isinstance(recipients, list) else []
+
+def send_email_notification(bad_designations):
+ """Sends an email notification with the bad names."""
+ # Dynamically load recipients
+ recipients = load_recipients()
+ current_app.logger.info(f'recipients:{recipients}')
+
+ # Check if recipients list is empty
+ if not recipients:
+ current_app.logger.error("No recipients found in the configuration.")
+ raise ValueError("Email recipients are not defined. Please check the configuration.")
+
+ # Define headers
+ headers = [
+ "NR", "Name", "Last Update", "Request Type", "Entity Type",
+ "State", "Expiration Date", "Consumed Corp", "Consumed Date"
+ ]
+
+ total_count = len(bad_designations)
+
+ # Add total count at the end
+ email_body = f"""
+ The attached report contains {total_count} record(s) collected on {get_yesterday_str()}.
+ Please find the detailed report in the attached file.
+ """ if total_count > 0 else f"""
+ No bad designations were found on {get_yesterday_str()}.
+ """
+
+ smtp_user = current_app.config["SMTP_USER"]
+ smtp_server = current_app.config["SMTP_SERVER"]
+
+ # Compose the email
+ msg = MIMEMultipart()
+ msg.attach(MIMEText(email_body, "plain"))
+ msg["Subject"] = "Bad designation in names"
+ msg["From"] = smtp_user
+ msg["To"] = ", ".join(recipients)
+
+ csv_file = f"bad-designation-{get_yesterday_str()}.csv"
+
+ # Attach the CSV file if there are records
+ if total_count > 0:
+ with open(csv_file, "w", newline="") as f:
+ writer = csv.writer(f)
+ writer.writerow(headers)
+ writer.writerows(bad_designations)
+
+ with open(csv_file, "rb") as f:
+ part = MIMEBase("application", "octet-stream")
+ part.set_payload(f.read())
+ encoders.encode_base64(part)
+ part.add_header("Content-Disposition", f"attachment; filename={csv_file}")
+ msg.attach(part)
+
+ # Send email
+ try:
+ with smtplib.SMTP(smtp_server) as server:
+ server.starttls()
+ server.send_message(msg)
+ current_app.logger.info("Email sent successfully to: %s", ", ".join(recipients))
+ except Exception as e:
+ current_app.logger.error("Failed to send email: %s", e)
+ raise
+ finally:
+ # Cleanup: Delete the CSV file if it exists
+ if os.path.exists(csv_file):
+ os.remove(csv_file)
+ current_app.logger.info("Temporary CSV file deleted: %s", csv_file)
\ No newline at end of file
diff --git a/jobs/bad-designation-notifier/src/services/utils.py b/jobs/bad-designation-notifier/src/services/utils.py
new file mode 100644
index 000000000..ba5bc3e4a
--- /dev/null
+++ b/jobs/bad-designation-notifier/src/services/utils.py
@@ -0,0 +1,12 @@
+from datetime import datetime, timedelta
+import pytz
+
+def get_yesterday_str():
+ pacific = pytz.timezone('America/Los_Angeles')
+
+ # Calculate the start of today and yesterday in Pacific Time
+ start_of_today_pacific = pacific.localize(datetime.now().replace(hour=0, minute=0, second=0, microsecond=0))
+ start_of_yesterday_pacific = start_of_today_pacific - timedelta(days=1)
+
+ # Format the date as yyyy-mm-dd
+ return start_of_yesterday_pacific.strftime('%Y-%m-%d')
\ No newline at end of file
diff --git a/jobs/bad-name-notifier/.env.smample b/jobs/bad-name-notifier/.env.smample
new file mode 100644
index 000000000..3d8655829
--- /dev/null
+++ b/jobs/bad-name-notifier/.env.smample
@@ -0,0 +1,9 @@
+export NAMEX_DATABASE_HOST=
+export NAMEX_DATABASE_NAME=
+export NAMEX_DATABASE_PASSWORD=
+export NAMEX_DATABASE_PORT=
+export NAMEX_DATABASE_USERNAME=
+
+export EMAIL_RECIPIENTS=
+export SMTP_SERVER=
+export SMTP_USER=
\ No newline at end of file
diff --git a/jobs/bad-name-notifier/Dockerfile b/jobs/bad-name-notifier/Dockerfile
new file mode 100644
index 000000000..30b5f3d23
--- /dev/null
+++ b/jobs/bad-name-notifier/Dockerfile
@@ -0,0 +1,44 @@
+# Use Python 3.12 slim image
+FROM python:3.12-slim
+
+# Set environment variables
+ENV PYTHONDONTWRITEBYTECODE=1 \
+ PYTHONUNBUFFERED=1 \
+ PIP_NO_CACHE_DIR=1 \
+ PIP_DISABLE_PIP_VERSION_CHECK=1 \
+ POETRY_NO_INTERACTION=1 \
+ POETRY_VIRTUALENVS_CREATE=false \
+ PATH="/root/.local/bin:$PATH" \
+ PYTHONPATH="/app/src"
+
+# Install system dependencies
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ build-essential \
+ libpq-dev \
+ curl \
+ && apt-get clean && rm -rf /var/lib/apt/lists/*
+
+# Upgrade pip and install Poetry
+RUN python3 -m ensurepip --upgrade && \
+ python3 -m pip install --upgrade pip setuptools wheel && \
+ curl --proto "=https" --tlsv1.2 -sSf -L https://install.python-poetry.org | python3 - && \
+ poetry --version
+
+# Set working directory
+WORKDIR /app
+
+# Copy dependency files and install dependencies
+COPY pyproject.toml poetry.lock /app/
+RUN poetry install --no-dev --no-interaction --no-ansi
+
+# Copy the application source code
+COPY src /app/src
+
+# Ensure all files are group-writable for OpenShift compatibility
+RUN chmod -R g=u /app
+
+# OpenShift will assign a random UID at runtime
+USER 1001
+
+# Default command
+CMD ["python", "src/app.py"]
diff --git a/jobs/bad-name-notifier/Makefile b/jobs/bad-name-notifier/Makefile
new file mode 100644
index 000000000..6202e56d2
--- /dev/null
+++ b/jobs/bad-name-notifier/Makefile
@@ -0,0 +1,77 @@
+.PHONY: clean clean-build clean-pyc clean-test ci lint test build build-nc push run setup help
+
+CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(abspath $(lastword $(MAKEFILE_LIST)))))
+PROJECT_NAME:=bad-name-notifier
+DOCKER_NAME:=bad-name-notifier
+VENV_DIR:=.venv
+
+#################################################################################
+# Setup #
+#################################################################################
+setup: clean install ## Setup the project
+
+install: ## Install Python dependencies in a virtual environment
+ test -f $(VENV_DIR)/bin/activate || python3.12 -m venv $(VENV_DIR) ;\
+ . $(VENV_DIR)/bin/activate ;\
+ pip install poetry ;\
+ poetry install --no-dev
+
+#################################################################################
+# Clean #
+#################################################################################
+clean: clean-build clean-pyc clean-test ## Clean the project
+ rm -rf $(VENV_DIR)/
+
+clean-build: ## Clean build files
+ rm -fr build/ dist/ .eggs/
+ find . -name '*.egg-info' -exec rm -fr {} +
+ find . -name '*.egg' -exec rm -fr {} +
+
+clean-pyc: ## Clean Python cache files
+ find . -name '*.pyc' -exec rm -f {} +
+ find . -name '*.pyo' -exec rm -f {} +
+ find . -name '*~' -exec rm -f {} +
+ find . -name '__pycache__' -exec rm -fr {} +
+
+clean-test: ## Clean test files
+ find . -name '.pytest_cache' -exec rm -fr {} +
+ rm -fr .tox/ .coverage htmlcov/
+
+#################################################################################
+# Linting and Testing #
+#################################################################################
+ci: lint test ## CI flow
+
+lint: ## Run linting with pylint
+ . $(VENV_DIR)/bin/activate && pylint --rcfile=setup.cfg src/**/*.py
+
+test: ## Run unit tests
+ . $(VENV_DIR)/bin/activate && pytest src/
+
+#################################################################################
+# Build and Push Docker #
+#################################################################################
+build: ## Build the Docker container
+ docker build . -t $(DOCKER_NAME) \
+ --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \
+ --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
+
+build-nc: ## Build the Docker container without caching
+ docker build --no-cache -t $(DOCKER_NAME) .
+
+push: build ## Push the Docker container to the registry
+ @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\
+ docker tag $(DOCKER_NAME) $(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME):latest ;\
+ docker push $(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME):latest
+
+#################################################################################
+# Run #
+#################################################################################
+run: ## Run the project locally
+ . $(VENV_DIR)/bin/activate && poetry run python src/app.py
+
+#################################################################################
+# Self-Documenting Commands #
+#################################################################################
+help: ## Show available commands
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/jobs/bad-name-notifier/README.md b/jobs/bad-name-notifier/README.md
new file mode 100644
index 000000000..2d1614937
--- /dev/null
+++ b/jobs/bad-name-notifier/README.md
@@ -0,0 +1,22 @@
+# Bad-Name-Notifier
+
+Bad-Name-Notifier is a Python application designed to identify names with special characters in a PostgreSQL database and send email notifications to specified recipients. The application runs as a standalone job, suitable for deployment in OpenShift or similar platforms.
+
+## Features
+- Query a PostgreSQL database for names with special characters.
+- Filter out names that are not in specific states (e.g., `APPROVED`, `CONDITION`).
+- Send email notifications with formatted data to a configurable list of recipients.
+- Configurable via environment variables for flexibility.
+
+## Requirements
+- Python 3.12+
+- PostgreSQL database
+- Poetry for dependency management
+- OpenShift (for deployment)
+
+## Installation
+
+1. Clone the repository:
+ ```bash
+ git clone https://github.com/your-repo/bad-name-notifier.git
+ cd bad-name-notifier
diff --git a/jobs/bad-name-notifier/poetry.lock b/jobs/bad-name-notifier/poetry.lock
new file mode 100644
index 000000000..3e47be67b
--- /dev/null
+++ b/jobs/bad-name-notifier/poetry.lock
@@ -0,0 +1,454 @@
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
+
+[[package]]
+name = "blinker"
+version = "1.9.0"
+description = "Fast, simple object-to-object and broadcast signaling"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
+ {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "flask"
+version = "3.1.0"
+description = "A simple framework for building complex web applications."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"},
+ {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"},
+]
+
+[package.dependencies]
+blinker = ">=1.9"
+click = ">=8.1.3"
+itsdangerous = ">=2.2"
+Jinja2 = ">=3.1.2"
+Werkzeug = ">=3.1"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
+[[package]]
+name = "greenlet"
+version = "3.1.1"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"},
+ {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"},
+ {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"},
+ {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"},
+ {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"},
+ {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"},
+ {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"},
+ {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"},
+ {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"},
+ {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"},
+ {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"},
+ {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"},
+ {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"},
+ {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"},
+ {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"},
+ {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"},
+ {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"},
+ {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"},
+ {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"},
+ {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"},
+ {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"},
+ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
+ {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
+]
+
+[package.extras]
+docs = ["Sphinx", "furo"]
+test = ["objgraph", "psutil"]
+
+[[package]]
+name = "itsdangerous"
+version = "2.2.0"
+description = "Safely pass data to untrusted environments and back."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
+ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
+ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
+]
+
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.10"
+description = "psycopg2 - Python-PostgreSQL Database Adapter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
+ {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
+[[package]]
+name = "pytz"
+version = "2022.7.1"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
+ {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
+]
+
+[[package]]
+name = "secure-smtplib"
+version = "0.1.1"
+description = "Secure SMTP subclasses for Python 2"
+optional = false
+python-versions = "*"
+files = [
+ {file = "secure-smtplib-0.1.1.tar.gz", hash = "sha256:3d4903f6612626ccd9b77ef05313cb3c3952a59a4d756cda2377b272fabad8b9"},
+ {file = "secure_smtplib-0.1.1-py2.py3-none-any.whl", hash = "sha256:645ea0a341f306fa3307c8fe1169765c417ff363db9946fa69f3669a9b718069"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "1.4.54"
+description = "Database Abstraction Library"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"},
+ {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"},
+]
+
+[package.dependencies]
+greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+
+[package.extras]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
+aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
+asyncio = ["greenlet (!=0.4.17)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"]
+mssql = ["pyodbc"]
+mssql-pymssql = ["pymssql", "pymssql"]
+mssql-pyodbc = ["pyodbc", "pyodbc"]
+mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
+mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
+mysql-connector = ["mysql-connector-python", "mysql-connector-python"]
+oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
+postgresql = ["psycopg2 (>=2.7)"]
+postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"]
+postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
+pymysql = ["pymysql", "pymysql (<1)"]
+sqlcipher = ["sqlcipher3_binary"]
+
+[[package]]
+name = "werkzeug"
+version = "3.1.3"
+description = "The comprehensive WSGI web application library."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"},
+ {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog (>=2.3)"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.12"
+content-hash = "91eee9ab02266929bf2a634cb6e9030d5d23cabcfa07ce38743eb8eb64449d65"
diff --git a/jobs/bad-name-notifier/pyproject.toml b/jobs/bad-name-notifier/pyproject.toml
new file mode 100644
index 000000000..a75aaaa5f
--- /dev/null
+++ b/jobs/bad-name-notifier/pyproject.toml
@@ -0,0 +1,20 @@
+[tool.poetry]
+name = "bad-name-notifier"
+version = "0.1.0"
+description = "An app to detect and notify about bad names."
+authors = ["Your Name "]
+license = "Apache-2.0"
+readme = "README.md"
+packages = [
+ { include = "services", from = "src" }, # Updated to specify the `src` path
+ { include = "src" } # Ensure `src` folder is packaged
+]
+
+[tool.poetry.dependencies]
+python = "^3.12"
+flask = "^3.0.2"
+sqlalchemy = "^1.4.18"
+psycopg2-binary = "^2.9.7"
+secure-smtplib = "^0.1.1"
+pytz = "^2022.4"
+python-dotenv = "^1.0.1"
diff --git a/jobs/bad-name-notifier/run.sh b/jobs/bad-name-notifier/run.sh
new file mode 100755
index 000000000..249590062
--- /dev/null
+++ b/jobs/bad-name-notifier/run.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Activate virtual environment
+source venv/bin/activate
+
+# Export environment variables
+export FLASK_APP=src/app.py
+export FLASK_ENV=development
+
+# Run the Flask app
+python src/app.py
diff --git a/jobs/bad-name-notifier/src/app.py b/jobs/bad-name-notifier/src/app.py
new file mode 100644
index 000000000..e85d6753a
--- /dev/null
+++ b/jobs/bad-name-notifier/src/app.py
@@ -0,0 +1,41 @@
+from src.config import APP_CONFIG
+from flask import Flask, current_app
+from services.database_service import get_bad_names
+from services.email_service import send_email_notification
+
+import logging
+import logging.config
+import os
+
+def create_app(config_name="default"):
+ """Creates and configures the Flask app."""
+
+ app = Flask(__name__) # NOSONAR
+ app.config.from_object(APP_CONFIG[config_name])
+ print(app.config)
+ return app
+
+def run_task():
+ """Executes the task to query bad names and send an email."""
+ try:
+ # Step 1: Query data
+ bad_names = get_bad_names()
+
+ # Step 2: Send email
+ send_email_notification(bad_names)
+ current_app.logger.info("Notification sent successfully.")
+ except Exception as e:
+ current_app.logger.error(f"An error occurred: {e}")
+
+def setup_logging():
+ # Load the logging configuration
+ logging.config.fileConfig(os.path.join(os.path.dirname(__file__), "logging.conf"))
+ logger = logging.getLogger("api") # Use the `api` logger
+ # Example log message
+ logger.info("Logging is configured and ready!")
+
+if __name__ == "__main__":
+ setup_logging()
+ app = create_app()
+ with app.app_context(): # Ensures Flask app context is available
+ run_task()
diff --git a/jobs/bad-name-notifier/src/config.py b/jobs/bad-name-notifier/src/config.py
new file mode 100644
index 000000000..c5a6cb7da
--- /dev/null
+++ b/jobs/bad-name-notifier/src/config.py
@@ -0,0 +1,66 @@
+import os
+from dotenv import find_dotenv, load_dotenv
+
+# Get the project root directory
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Load the .env file from the project root
+load_dotenv(os.path.join(BASE_DIR, '.env'))
+
+
+class Config:
+ """Base configuration class."""
+
+ # Database Configuration
+ DB_USER = os.getenv("NAMEX_DATABASE_USERNAME", "")
+ DB_PASSWORD = os.getenv("NAMEX_DATABASE_PASSWORD", "")
+ DB_NAME = os.getenv("NAMEX_DATABASE_NAME", "")
+ DB_HOST = os.getenv("NAMEX_DATABASE_HOST", "")
+ DB_PORT = os.getenv("NAMEX_DATABASE_PORT", "5432")
+ NAMEX_DATABASE_URI = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
+
+ # Email Configuration
+ EMAIL_RECIPIENTS = os.getenv("EMAIL_RECIPIENTS", "").split(",")
+ SMTP_SERVER = os.getenv("SMTP_SERVER", "")
+ SMTP_USER = os.getenv("SMTP_USER", "")
+
+ # General Settings
+ DEBUG = False
+ TESTING = False
+
+
+class DevConfig(Config):
+ """Development-specific configuration."""
+ DEBUG = True
+ TESTING = False
+
+
+class TestConfig(Config):
+ """Testing-specific configuration."""
+ DEBUG = True
+ TESTING = True
+
+
+class ProdConfig(Config):
+ """Production-specific configuration."""
+ DEBUG = False
+ TESTING = False
+
+
+# Environment-specific configuration mapping
+APP_CONFIG = {
+ "development": DevConfig,
+ "testing": TestConfig,
+ "production": ProdConfig,
+ "default": ProdConfig,
+}
+
+
+def get_named_config(config_name: str = "default"):
+ """Return the configuration object based on the name.
+
+ :raise: KeyError: if an unknown configuration is requested
+ """
+ if config_name not in APP_CONFIG:
+ raise KeyError(f"Unknown configuration: {config_name}")
+ return APP_CONFIG[config_name]
diff --git a/jobs/bad-name-notifier/src/logging.conf b/jobs/bad-name-notifier/src/logging.conf
new file mode 100644
index 000000000..0806a8a2c
--- /dev/null
+++ b/jobs/bad-name-notifier/src/logging.conf
@@ -0,0 +1,28 @@
+[loggers]
+keys=root,api
+
+[handlers]
+keys=console
+
+[formatters]
+keys=simple
+
+[logger_root]
+level=DEBUG
+handlers=console
+
+[logger_api]
+level=DEBUG
+handlers=console
+qualname=api
+propagate=0
+
+[handler_console]
+class=StreamHandler
+level=DEBUG
+formatter=simple
+args=(sys.stdout,)
+
+[formatter_simple]
+format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s
+datefmt=
diff --git a/jobs/bad-name-notifier/src/services/__init__.py b/jobs/bad-name-notifier/src/services/__init__.py
new file mode 100644
index 000000000..674a7149f
--- /dev/null
+++ b/jobs/bad-name-notifier/src/services/__init__.py
@@ -0,0 +1,6 @@
+# services/__init__.py
+
+from .database_service import get_bad_names
+from .email_service import send_email_notification
+
+__all__ = ["get_bad_names", "send_email_notification"]
diff --git a/jobs/bad-name-notifier/src/services/database_service.py b/jobs/bad-name-notifier/src/services/database_service.py
new file mode 100644
index 000000000..713d3b3a0
--- /dev/null
+++ b/jobs/bad-name-notifier/src/services/database_service.py
@@ -0,0 +1,55 @@
+from flask import current_app
+from sqlalchemy import create_engine, text
+from sqlalchemy.orm import sessionmaker
+from src.config import get_named_config
+from .utils import get_yesterday_utc_range
+
+config = get_named_config
+
+def get_bad_names():
+ """Fetches bad names from the database."""
+ # Create the database engine and session
+ engine = create_engine(current_app.config["NAMEX_DATABASE_URI"])
+ Session = sessionmaker(bind=engine)
+ session = Session()
+
+ start_of_yesterday_utc, start_of_today_utc = get_yesterday_utc_range()
+
+ # Print the timestamps
+ current_app.logger.info(f"Start of yesterday (UTC): {start_of_yesterday_utc}")
+ current_app.logger.info(f"Start of today (UTC): {start_of_today_utc}")
+
+ try:
+ # Wrap the query in text()
+ query = text(f"""
+ SELECT DISTINCT r.nr_num, n.choice, n.name
+ FROM requests r
+ JOIN names n ON r.id = n.nr_id
+ JOIN events e on r.id = e.nr_id
+ WHERE n.state = 'NE'
+ AND r.nr_num NOT LIKE 'NR L%'
+ AND NOT EXISTS (
+ SELECT 1
+ FROM names n2
+ WHERE n2.nr_id = r.id
+ AND n2.state IN ('APPROVED', 'CONDITION')
+ )
+ AND EXISTS (
+ SELECT 1
+ FROM generate_series(1, length(n.name)) AS i
+ WHERE ascii(substr(n.name, i, 1)) < 32
+ OR ascii(substr(n.name, i, 1)) > 122
+ )
+ AND e.event_dt >= '{start_of_yesterday_utc}'
+ --AND e.event_dt < '{start_of_today_utc}'
+ """)
+ result = session.execute(query).fetchall()
+
+ # Convert result rows to a list of dictionaries
+ keys = ["nr_num", "choice", "name"]
+ formatted_result = [dict(zip(keys, row)) for row in result]
+
+ return formatted_result
+ finally:
+ # Ensure the session is closed
+ session.close()
\ No newline at end of file
diff --git a/jobs/bad-name-notifier/src/services/email_service.py b/jobs/bad-name-notifier/src/services/email_service.py
new file mode 100644
index 000000000..98269d680
--- /dev/null
+++ b/jobs/bad-name-notifier/src/services/email_service.py
@@ -0,0 +1,70 @@
+from email.mime.text import MIMEText
+from flask import current_app
+from .utils import get_yesterday_str
+import smtplib
+
+def load_recipients():
+ """Load recipients dynamically from an environment variable."""
+ recipients = current_app.config["EMAIL_RECIPIENTS"]
+ return recipients if isinstance(recipients, list) else []
+
+def send_email_notification(bad_names):
+ """Sends an email notification with the bad names."""
+ # Dynamically load recipients
+ recipients = load_recipients()
+ current_app.logger.info(f'recipients:{recipients}')
+
+ # Check if recipients list is empty
+ if not recipients:
+ current_app.logger.error("No recipients found in the configuration.")
+ raise ValueError("Email recipients are not defined. Please check the configuration.")
+
+ # Create email content
+ body = generate_report_title() + "\n\n" + generate_report_body(bad_names)
+
+ msg = MIMEText(body)
+ smtp_user = current_app.config["SMTP_USER"]
+ smtp_server = current_app.config["SMTP_SERVER"]
+ msg["Subject"] = "Bad characters in names"
+ msg["From"] = smtp_user
+ msg["To"] = ", ".join(recipients)
+
+ # Send email
+ try:
+ with smtplib.SMTP(smtp_server) as server:
+ server.starttls()
+ server.sendmail(smtp_user, recipients, msg.as_string())
+ current_app.logger.info("Email sent successfully to: %s", ", ".join(recipients))
+ except Exception as e:
+ current_app.logger.error("Failed to send email: %s", e)
+ raise
+
+def generate_report_title():
+ """Generates an email title with yesterday's date."""
+ # Format the date as yyyy-mm-dd
+ yesterday = get_yesterday_str()
+
+ # Construct the email title
+ email_title = f"BAD CHARACTERS FOR {yesterday}"
+
+ return email_title
+
+def generate_report_body(bad_names):
+ """Formats the result into a table with headers and adds a total count at the end."""
+ # Table headers
+ title = f"{'NR Number':<15}{'Choice':<10}Name"
+ separator = "-" * len(title)
+
+ # Format each row
+ lines = [
+ f"{row['nr_num']:<15}{row['choice']:<10}{row['name']}"
+ for row in bad_names
+ ]
+
+ # Add total count at the end
+ total_count = len(bad_names)
+ footer_separator = "-" * len(title) # Line of dashes before the total
+ footer = f"\n{footer_separator}\nTotal bad names: {total_count}"
+
+ # Combine title, separator, rows, and footer into a single formatted string
+ return "\n".join([title, separator] + lines) + footer
diff --git a/jobs/bad-name-notifier/src/services/utils.py b/jobs/bad-name-notifier/src/services/utils.py
new file mode 100644
index 000000000..bfc2eaea4
--- /dev/null
+++ b/jobs/bad-name-notifier/src/services/utils.py
@@ -0,0 +1,22 @@
+from datetime import datetime, timedelta
+import pytz
+
+def get_yesterday_str():
+ # Calculate yesterday's date
+ yesterday = datetime.now() - timedelta(days=1)
+
+ # Format the date as yyyy-mm-dd
+ return yesterday.strftime('%Y-%m-%d')
+
+def get_yesterday_utc_range():
+ pacific = pytz.timezone('America/Los_Angeles')
+
+ # Calculate the start of today and yesterday in Pacific Time
+ start_of_today_pacific = pacific.localize(datetime.now().replace(hour=0, minute=0, second=0, microsecond=0))
+ start_of_yesterday_pacific = start_of_today_pacific - timedelta(days=1)
+
+ # Convert to UTC
+ start_of_today_utc = start_of_today_pacific.astimezone(pytz.utc).strftime('%Y-%m-%d %H:%M:%S')
+ start_of_yesterday_utc = start_of_yesterday_pacific.astimezone(pytz.utc).strftime('%Y-%m-%d %H:%M:%S')
+
+ return start_of_yesterday_utc, start_of_today_utc
\ No newline at end of file
diff --git a/jobs/nr-day-job/Dockerfile b/jobs/nr-day-job/Dockerfile
index b4f7996c6..cd5127d79 100644
--- a/jobs/nr-day-job/Dockerfile
+++ b/jobs/nr-day-job/Dockerfile
@@ -79,7 +79,7 @@ USER web
# The following stage is only for production:
# FROM development_build AS production_build
COPY --chown=web:web . /code
-RUN chmod -R 755 /code
+RUN chmod -R 755 /code/run.sh
EXPOSE 8080
diff --git a/jobs/nr-day-job/config.py b/jobs/nr-day-job/config.py
index f111ad749..8011eaa8f 100644
--- a/jobs/nr-day-job/config.py
+++ b/jobs/nr-day-job/config.py
@@ -66,13 +66,10 @@ class Config(): # pylint: disable=too-few-public-methods
DB_NAME = os.getenv('DATABASE_NAME', '')
DB_HOST = os.getenv('DATABASE_HOST', '')
DB_PORT = os.getenv('DATABASE_PORT', '5432')
- SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format(
- user=DB_USER,
- password=DB_PASSWORD,
- host=DB_HOST,
- port=int(DB_PORT),
- name=DB_NAME,
- )
+ if DB_UNIX_SOCKET := os.getenv('DATABASE_UNIX_SOCKET', None):
+ SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}'
+ else:
+ SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
GCP_AUTH_KEY = os.getenv('BUSINESS_GCP_AUTH_KEY', None)
EMAILER_TOPIC = os.getenv('NAMEX_MAILER_TOPIC', '')
diff --git a/jobs/nr-day-job/devops/gcp/clouddeploy.yaml b/jobs/nr-day-job/devops/gcp/clouddeploy.yaml
index af39af1eb..06b393940 100644
--- a/jobs/nr-day-job/devops/gcp/clouddeploy.yaml
+++ b/jobs/nr-day-job/devops/gcp/clouddeploy.yaml
@@ -30,7 +30,7 @@ serialPipeline:
deploy-project-id: "a083gt-dev"
job-name: "namex-nr-day-job-dev"
service-account: "sa-api@a083gt-dev.iam.gserviceaccount.com"
- cloudsql-instances: ""
+ cloudsql-instances: "a083gt-dev:northamerica-northeast1:namex-db-dev"
run-command: "./run.sh"
- targetId: a083gt-test
profiles: [test]
diff --git a/jobs/nr-day-job/devops/vaults.gcp.env b/jobs/nr-day-job/devops/vaults.gcp.env
index 403cfa602..cf758ef86 100644
--- a/jobs/nr-day-job/devops/vaults.gcp.env
+++ b/jobs/nr-day-job/devops/vaults.gcp.env
@@ -1,8 +1,8 @@
-DATABASE_USERNAME="op://namex/$APP_ENV/postgres-namex/DATABASE_USERNAME"
-DATABASE_PASSWORD="op://namex/$APP_ENV/postgres-namex/DATABASE_PASSWORD"
-DATABASE_NAME="op://namex/$APP_ENV/postgres-namex/DATABASE_NAME"
-DATABASE_HOST="op://namex/$APP_ENV/postgres-namex/DATABASE_HOST"
-DATABASE_PORT="op://namex/$APP_ENV/postgres-namex/DATABASE_PORT"
+DATABASE_USERNAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_USERNAME"
+DATABASE_PASSWORD="op://database/$APP_ENV/namex-db-gcp/DATABASE_PASSWORD"
+DATABASE_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_NAME"
+DATABASE_UNIX_SOCKET="op://database/$APP_ENV/namex-db-gcp/DATABASE_UNIX_SOCKET"
+DATABASE_PORT="op://database/$APP_ENV/namex-db-gcp/DATABASE_PORT"
BUSINESS_GCP_AUTH_KEY="op://gcp-queue/$APP_ENV/a083gt/BUSINESS_GCP_AUTH_KEY"
NAMEX_MAILER_TOPIC="op://gcp-queue/$APP_ENV/topics/NAMEX_MAILER_TOPIC"
NAMEX_NR_STATE_TOPIC="op://gcp-queue/$APP_ENV/topics/NAMEX_NR_STATE_TOPIC"
\ No newline at end of file
diff --git a/jobs/nr-day-job/poetry.lock b/jobs/nr-day-job/poetry.lock
index afe13f6eb..994d56bb7 100644
--- a/jobs/nr-day-job/poetry.lock
+++ b/jobs/nr-day-job/poetry.lock
@@ -1782,7 +1782,7 @@ ecdsa = "^0.18.0"
flask = "^3.0.2"
flask-caching = "^1.10.1"
flask-cors = "^4.0.0"
-flask-jwt-oidc = {git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version"}
+flask-jwt-oidc = { git = "https://github.com/seeker25/flask-jwt-oidc.git" }
flask-marshmallow = "^0.14.0"
flask-migrate = "^2.7.0"
flask-moment = "^0.11.0"
@@ -1845,7 +1845,7 @@ zipp = "^3.8.1"
type = "git"
url = "https://github.com/bcgov/namex.git"
reference = "HEAD"
-resolved_reference = "79316e352cd643f71f43e0be3c6e553a7531273e"
+resolved_reference = "5e07bca426b4813b36b75fda4adfbe4fa51bdc88"
subdirectory = "api"
[[package]]
diff --git a/jobs/nr-day-job/pyproject.toml b/jobs/nr-day-job/pyproject.toml
index 5a9f0a4c8..0dc0e6f57 100644
--- a/jobs/nr-day-job/pyproject.toml
+++ b/jobs/nr-day-job/pyproject.toml
@@ -10,7 +10,7 @@ python = "^3.12"
namex = { git = "https://github.com/bcgov/namex.git", subdirectory = "api", rev = "main"}
gcp_queue = { git = "https://github.com/bcgov/namex.git", subdirectory = "services/pubsub" }
# flask-jwt-oidc = { git = "https://github.com/thorwolpert/flask-jwt-oidc.git" }
-flask-jwt-oidc = { git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version" }
+flask-jwt-oidc = { git = "https://github.com/seeker25/flask-jwt-oidc.git" }
sbc-common-components = { git = "https://github.com/bcgov/sbc-common-components.git", subdirectory = "python" }
flask = "^3.0.2"
diff --git a/jobs/sftp-nuans-report/notebook/generate_files.ipynb b/jobs/sftp-nuans-report/notebook/generate_files.ipynb
index a8f69bdb8..d0b989bd4 100644
--- a/jobs/sftp-nuans-report/notebook/generate_files.ipynb
+++ b/jobs/sftp-nuans-report/notebook/generate_files.ipynb
@@ -250,7 +250,7 @@
"\n",
"with open(corp_filename, 'w') as f:\n",
" if not df_corp.empty:\n",
- " df_corp_string = \"\\n\".join(df_corp['formatted_output'].tolist())\n",
+ " df_corp_string = \"\\n\".join(df_corp['formatted_output'].tolist()) + \"\\n\"\n",
" f.write(df_corp_string)\n",
"\n",
"# Filter the DataFrame for 'FIRM'\n",
@@ -259,7 +259,7 @@
"\n",
"with open(firm_filename, 'w') as f:\n",
" if not df_firm.empty:\n",
- " df_firm_string = \"\\n\".join(df_firm['formatted_output'].tolist())\n",
+ " df_firm_string = \"\\n\".join(df_firm['formatted_output'].tolist()) + \"\\n\"\n",
" f.write(df_firm_string)\n",
"\n",
"print(f\"Reports generated: {corp_filename} and {firm_filename}\")"
diff --git a/services/auto-analyze/.devcontainer/Dockerfile b/services/auto-analyze/.devcontainer/Dockerfile
deleted file mode 100644
index d241eac2a..000000000
--- a/services/auto-analyze/.devcontainer/Dockerfile
+++ /dev/null
@@ -1,56 +0,0 @@
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-
-FROM python:3.6.12-buster
-
-# Avoid warnings by switching to noninteractive
-ENV DEBIAN_FRONTEND=noninteractive
-
-# This Dockerfile adds a non-root user with sudo access. Use the "remoteUser"
-# property in devcontainer.json to use it. On Linux, the container user's GID/UIDs
-# will be updated to match your local UID/GID (when using the dockerFile property).
-# See https://aka.ms/vscode-remote/containers/non-root-user for details.
-ARG USERNAME=notebook
-ARG USER_UID=1000
-ARG USER_GID=$USER_UID
-
-# Uncomment the following COPY line and the corresponding lines in the `RUN` command if you wish to
-# include your requirements in the image itself. It is suggested that you only do this if your
-# requirements rarely (if ever) change.
-# COPY requirements.txt /tmp/pip-tmp/
-
-# Configure apt and install packages
-RUN apt-get update \
- && apt-get -y install --no-install-recommends apt-utils dialog 2>&1 \
- #
- # Verify git, process tools, lsb-release (common in install instructions for CLIs) installed
- && apt-get -y install git openssh-client iproute2 procps lsb-release \
- #
- # Install pylint
- && pip --disable-pip-version-check --no-cache-dir install pylint \
- #
- # Update Python environment based on requirements.txt
- # && pip --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \
- # && rm -rf /tmp/pip-tmp \
- #
- # Create a non-root user to use if preferred - see https://aka.ms/vscode-remote/containers/non-root-user.
- && groupadd --gid $USER_GID $USERNAME \
- && useradd -s /bin/bash --uid $USER_UID --gid $USER_GID -m $USERNAME \
- # [Optional] Add sudo support for the non-root user
- && apt-get install -y sudo \
- && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME\
- && chmod 0440 /etc/sudoers.d/$USERNAME \
- #
- # Clean up
- && apt-get autoremove -y \
- && apt-get clean -y \
- && rm -rf /var/lib/apt/lists/*
-
-# Switch back to dialog for any ad-hoc use of apt-get
-ENV DEBIAN_FRONTEND=dialog
-
-# USER $USERNAME
-
-
diff --git a/services/auto-analyze/.devcontainer/devcontainer.json b/services/auto-analyze/.devcontainer/devcontainer.json
deleted file mode 100644
index 36e6606ae..000000000
--- a/services/auto-analyze/.devcontainer/devcontainer.json
+++ /dev/null
@@ -1,31 +0,0 @@
-// For format details, see https://aka.ms/vscode-remote/devcontainer.json or this file's README at:
-// https://github.com/microsoft/vscode-dev-containers/tree/v0.101.0/containers/python-3
-{
- "name": "Python 3",
- "context": "..",
- "dockerFile": "Dockerfile",
- // Set *default* container specific settings.json values on container create.
- "settings": {
- "terminal.integrated.shell.linux": "/bin/bash",
- "python.pythonPath": "/usr/local/bin/python",
- "python.linting.enabled": true,
- "python.linting.pylintEnabled": true,
- "python.linting.pylintPath": "/usr/local/bin/pylint"
- },
- // Add the IDs of extensions you want installed when the container is created.
- "extensions": [
- "ms-python.python"
- ]
-
- "appPort": [
- 3000,
- 8888
- ],
- "postCreateCommand": "pip install -r requirements.txt",
- // Use 'forwardPorts' to make a list of ports inside the container available locally.
- // "forwardPorts": [],
- // Use 'postCreateCommand' to run commands after the container is created.
- // "postCreateCommand": "pip install -r requirements.txt",
- // Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/containers/non-root.
- // "remoteUser": "vscode"
-}
\ No newline at end of file
diff --git a/services/auto-analyze/.dockerignore b/services/auto-analyze/.dockerignore
deleted file mode 100644
index e69de29bb..000000000
diff --git a/services/auto-analyze/Dockerfile b/services/auto-analyze/Dockerfile
deleted file mode 100644
index 5e4d518ea..000000000
--- a/services/auto-analyze/Dockerfile
+++ /dev/null
@@ -1,32 +0,0 @@
-FROM python:3.8.6-buster
-
-ARG BUILD_DATE
-ARG VCS_REF
-
-LABEL build_date=$BUILD_DATE
-LABEL vcs_ref=$VCS_REF
-
-USER root
-
-# Create working directory
-RUN mkdir /opt/app-root && chmod 755 /opt/app-root
-WORKDIR /opt/app-root
-
-# Install the requirements
-COPY ./requirements.txt .
-
-RUN pip install --upgrade pip
-RUN pip install --no-cache-dir -r requirements.txt
-
-COPY . .
-
-RUN pip install .
-
-USER 1001
-
-# Set Python path
-ENV PYTHONPATH=/opt/app-root/src
-
-EXPOSE 8080
-
-CMD ["hypercorn", "--bind", "0.0.0.0:8080", "wsgi:app"]
diff --git a/services/auto-analyze/LICENSE b/services/auto-analyze/LICENSE
deleted file mode 100644
index 18b5abc34..000000000
--- a/services/auto-analyze/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright © 2018 Province of British Columbia
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/services/auto-analyze/Makefile b/services/auto-analyze/Makefile
deleted file mode 100644
index 544890b03..000000000
--- a/services/auto-analyze/Makefile
+++ /dev/null
@@ -1,152 +0,0 @@
-.PHONY: license
-.PHONY: setup
-.PHONY: ci cd
-.PHONY: db run
-
-MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST)))
-CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH)))
-
-PROJECT_NAME:=auto_analyze
-DOCKER_NAME:=auto-analyze
-
-#################################################################################
-# COMMANDS -- license #
-#################################################################################
-license: ## Verify source code license headers.
- ./scripts/verify_license_headers.sh $(CURRENT_ABS_DIR)/src $(CURRENT_ABS_DIR)/tests
-
-#################################################################################
-# COMMANDS -- Setup #
-#################################################################################
-setup: clean install install-dev ## Setup the project
-
-clean: clean-build clean-pyc clean-test ## Clean the project
- rm -rf venv/
-
-clean-build: ## Clean build files
- rm -fr build/
- rm -fr dist/
- rm -fr .eggs/
- find . -name '*.egg-info' -exec rm -fr {} +
- find . -name '*.egg' -exec rm -fr {} +
-
-clean-pyc: ## Clean cache files
- find . -name '*.pyc' -exec rm -f {} +
- find . -name '*.pyo' -exec rm -f {} +
- find . -name '*~' -exec rm -f {} +
- find . -name '__pycache__' -exec rm -fr {} +
-
-clean-test: ## clean test files
- find . -name '.pytest_cache' -exec rm -fr {} +
- rm -fr .tox/
- rm -f .coverage
- rm -fr htmlcov/
-
-build-req: clean ## Upgrade requirements
- test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\
- . venv/bin/activate ;\
- pip install --upgrade pip ;\
- pip install -Ur requirements/prod.txt ;\
- pip freeze | sort > requirements.txt ;\
- cat requirements/bcregistry-libraries.txt >> requirements.txt ;\
- pip install -Ur requirements/bcregistry-libraries.txt
-
-install: clean ## Install python virtrual environment
- test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\
- . venv/bin/activate ;\
- pip install --upgrade pip ;\
- pip install -Ur requirements.txt
-
-install-dev: ## Install local application
- . venv/bin/activate ; \
- pip install -Ur requirements/dev.txt; \
- pip install -e .
-
-#################################################################################
-# COMMANDS - CI #
-#################################################################################
-ci: pylint flake8 test ## CI flow
-
-pylint: ## Linting with pylint
- . venv/bin/activate && pylint --rcfile=setup.cfg src/$(PROJECT_NAME)
-
-flake8: ## Linting with flake8
- . venv/bin/activate && flake8 src/$(PROJECT_NAME) tests
-
-lint: pylint flake8 ## run all lint type scripts
-
-test: ## Unit testing
- . venv/bin/activate && pytest
-
-mac-cov: local-test ## Run the coverage report and display in a browser window (mac)
- open -a "Google Chrome" htmlcov/index.html
-
-#################################################################################
-# COMMANDS - CD
-# expects the terminal to be openshift login
-# expects export OPENSHIFT_DOCKER_REGISTRY=""
-# expects export OPENSHIFT_SA_NAME="$(oc whoami)"
-# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)"
-# expects export OPENSHIFT_REPOSITORY=""
-# expects export TAG_NAME="dev/test/prod"
-# expects export OPS_REPOSITORY="" #
-#################################################################################
-cd: ## CD flow
-ifeq ($(TAG_NAME), test)
-cd: update-env
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME)
-else ifeq ($(TAG_NAME), prod)
-cd: update-env
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F)
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME)
-else
-TAG_NAME=dev
-cd: build update-env tag
-endif
-
-build: ## Build the docker container
- docker build . -t $(DOCKER_NAME) \
- --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \
- --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \
-
-build-nc: ## Build the docker container without caching
- docker build --no-cache -t $(DOCKER_NAME) .
-
-REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME)
-push: #build ## Push the docker container to the registry & tag latest
- @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\
- docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\
- docker push $(REGISTRY_IMAGE):latest
-
-VAULTS=`cat devops/vaults.json`
-update-env: ## Update env from 1pass
- oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \
- -m "secret" \
- -e "$(TAG_NAME)" \
- -a "$(DOCKER_NAME)-$(TAG_NAME)" \
- -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \
- -v "$(VAULTS)" \
- -r "true" \
- -f "false"
-
-tag: push ## tag image
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME)
-
-#################################################################################
-# COMMANDS - Local #
-#################################################################################
-run: db ## Run the project in local
- . venv/bin/activate && python -m flask run -p 5000
-
-db: ## Update the local database
- . venv/bin/activate && python -m manage.py db upgrade
-
-#################################################################################
-# Self Documenting Commands #
-#################################################################################
-.PHONY: help
-
-.DEFAULT_GOAL := help
-
-help:
- @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/services/auto-analyze/README.md b/services/auto-analyze/README.md
deleted file mode 100644
index d0161c9d8..000000000
--- a/services/auto-analyze/README.md
+++ /dev/null
@@ -1,43 +0,0 @@
-# Application Name
-
-Auto Analyze Service
-
-## Technology Stack Used
-* Python, Quart
-
-## How to
-Open this in VSCode and use the devcontainer to do development.
-
-### Linting
-``` bash
-make pylint
-make flake8
-```
-
-### Testing
-``` bash
-pytest
-```
-
-## How to Contribute
-
-If you would like to contribute, please see our [CONTRIBUTING](./CONTRIBUTING.md) guidelines.
-
-Please note that this project is released with a [Contributor Code of Conduct](./CODE_OF_CONDUCT.md).
-By participating in this project you agree to abide by its terms.
-
-## License
-
- Copyright 2020 Province of British Columbia
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/services/auto-analyze/devops/vaults.gcp.env b/services/auto-analyze/devops/vaults.gcp.env
deleted file mode 100644
index 6a5a5a65d..000000000
--- a/services/auto-analyze/devops/vaults.gcp.env
+++ /dev/null
@@ -1,13 +0,0 @@
-DATABASE_USERNAME="op://namex/$APP_ENV/postgres-namex/DATABASE_USERNAME"
-DATABASE_PASSWORD="op://namex/$APP_ENV/postgres-namex/DATABASE_PASSWORD"
-DATABASE_NAME="op://namex/$APP_ENV/postgres-namex/DATABASE_NAME"
-DATABASE_HOST="op://namex/$APP_ENV/postgres-namex/DATABASE_HOST"
-DATABASE_PORT="op://namex/$APP_ENV/postgres-namex/DATABASE_PORT"
-JWT_OIDC_AUDIENCE="op://namex/$APP_ENV/jwt/JWT_OIDC_AUDIENCE"
-JWT_OIDC_CLIENT_SECRET="op://namex/$APP_ENV/jwt/JWT_OIDC_CLIENT_SECRET"
-SOLR_SYNONYMS_API_URL="op://API/$APP_ENV/solr-synonyms-api/SOLR_SYNONYMS_API_URL"
-SOLR_SYNONYMS_API_VERSION="op://API/$APP_ENV/solr-synonyms-api/SOLR_SYNONYMS_API_VERSION"
-JWT_OIDC_ISSUER="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_ISSUER"
-JWT_OIDC_JWKS_CACHE_TIMEOUT="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_JWKS_CACHE_TIMEOUT"
-JWT_OIDC_CACHING_ENABLED="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_CACHING_ENABLED"
-JWT_OIDC_WELL_KNOWN_CONFIG="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_WELL_KNOWN_CONFIG"
\ No newline at end of file
diff --git a/services/auto-analyze/requirements.txt b/services/auto-analyze/requirements.txt
deleted file mode 100644
index 02e604dae..000000000
--- a/services/auto-analyze/requirements.txt
+++ /dev/null
@@ -1,44 +0,0 @@
-Flask-SQLAlchemy==2.5.1
-Flask==1.1.2
-Hypercorn==0.11.2
-Jinja2==2.11.3
-MarkupSafe==1.1.1
-Quart==0.10.0
-SQLAlchemy==1.4.11
-Werkzeug==1.0.1
-aiofiles==0.7.0
-blinker==1.4
-certifi==2020.12.5
-chardet==4.0.0
-click==7.1.2
-flask-marshmallow==0.14.0
-greenlet==1.0.0
-h11==0.12.0
-h2==4.0.0
-hpack==4.0.0
-hyperframe==6.0.1
-idna==2.10
-inflect==5.3.0
-itsdangerous==1.1.0
-joblib==1.0.1
-jsonpickle==2.0.0
-lxml==4.6.3
-marshmallow-sqlalchemy==0.24.3
-marshmallow==3.11.1
-nltk==3.6.2
-numpy==1.20.2
-pandas==1.2.4
-priority==1.3.0
-pysolr==3.9.0
-python-dateutil==2.8.1
-pytz==2021.1
-regex==2021.4.4
-requests==2.25.1
-six==1.15.0
-toml==0.10.2
-tqdm==4.60.0
-urllib3==1.26.4
-wsproto==1.0.0
-git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api
-git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client
-git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client
\ No newline at end of file
diff --git a/services/auto-analyze/requirements/bcregistry-libraries.txt b/services/auto-analyze/requirements/bcregistry-libraries.txt
deleted file mode 100644
index f22a0d2b6..000000000
--- a/services/auto-analyze/requirements/bcregistry-libraries.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api
-git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client
-git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client
\ No newline at end of file
diff --git a/services/auto-analyze/requirements/dev.txt b/services/auto-analyze/requirements/dev.txt
deleted file mode 100755
index 0ff09bcff..000000000
--- a/services/auto-analyze/requirements/dev.txt
+++ /dev/null
@@ -1,28 +0,0 @@
-# Everything the developer needs other than production requirements
-
-# Testing
-coverage
-freezegun
-hypothesis<=4.38.1
-pyhamcrest
-pytest
-pytest-mock
-pytest-asyncio
-requests-mock
-
-# Lint and code style
-autopep8
-flake8
-flake8-blind-except
-flake8-debugger
-flake8-docstrings
-flake8-isort
-flake8-quotes
-pep8-naming
-pydocstyle
-pylint
-isort
-
-# docker
-lovely-pytest-docker
-pytest_docker
diff --git a/services/auto-analyze/requirements/prod.txt b/services/auto-analyze/requirements/prod.txt
deleted file mode 100644
index 8b61bbba8..000000000
--- a/services/auto-analyze/requirements/prod.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-hypercorn
-quart
-requests
-nltk
-Flask-SQLAlchemy
-Flask-Marshmallow
-marshmallow
-marshmallow-sqlalchemy
-pandas
-inflect
-werkzeug
-pysolr
-jsonpickle
-lxml
\ No newline at end of file
diff --git a/services/auto-analyze/setup.cfg b/services/auto-analyze/setup.cfg
deleted file mode 100644
index de72fc3bc..000000000
--- a/services/auto-analyze/setup.cfg
+++ /dev/null
@@ -1,125 +0,0 @@
-[metadata]
-name = auto_analyze
-url = https://github.com/thorwolpert/namex
-author = team-le
-author_email = thor@wolpert.ca
-classifiers =
- Development Status :: Beta
- Intended Audience :: Developers / QA
- Topic :: Names Examination
- License :: OSI Approved :: Apache Software License
- Natural Language :: English
- Programming Language :: Python :: 3.8
-license = Apache Software License Version 2.0
-description = A short description of the project
-long_description = file: README.md
-keywords =
-
-[options]
-zip_safe = True
-python_requires = >=3.8
-include_package_data = True
-packages = find:
-
-[options.package_data]
-auto_analyze =
-
-[wheel]
-universal = 1
-
-[bdist_wheel]
-universal = 1
-
-[aliases]
-test = pytest
-
-[flake8]
-exclude = .git,*migrations*
-max-line-length = 120
-docstring-min-length=10
-per-file-ignores =
- */__init__.py:F401
-
-[pycodestyle]
-max_line_length = 120
-ignore = E501
-docstring-min-length=10
-notes=FIXME,XXX # TODO is ignored
-match_dir = src/auto_analyze
-ignored-modules=flask_sqlalchemy
- sqlalchemy
-per-file-ignores =
- */__init__.py:F401
-good-names=
- b,
- d,
- i,
- e,
- f,
- k,
- q,
- u,
- v,
- ar,
- d1,
- d2,
- d3,
- db,
- k1,
- k2,
- rv,
- v1,
- v2,
- logger,
-
-[pylint]
-ignore=migrations,test
-max_line_length=120
-notes=FIXME,XXX,TODO
-ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session
-ignored-classes=scoped_session
-disable=C0301,W0511,W0703,R0801,R0902,R0401
-
-[isort]
-line_length = 120
-indent = 4
-multi_line_output = 3
-lines_after_imports = 2
-include_trailing_comma = True
-
-[tool:pytest]
-minversion = 2.0
-testpaths = tests
-addopts = --verbose
- --strict
- -p no:warnings
-python_files = tests/*/test*.py
-norecursedirs = .git .tox venv* requirements* build
-log_cli = true
-log_cli_level = 1
-filterwarnings =
- ignore::UserWarning
-markers =
- slow
- serial
-
-[coverage:run]
-branch = True
-source =
- src/auto_analyze
- src/config
-omit =
- src/auto_analyze/wsgi.py
-
-[report:run]
-exclude_lines =
- pragma: no cover
- from
- import
- def __repr__
- if self.debug:
- if settings.DEBUG
- raise AssertionError
- raise NotImplementedError
- if 0:
- if __name__ == .__main__.:
diff --git a/services/auto-analyze/setup.py b/services/auto-analyze/setup.py
deleted file mode 100644
index 0c6aa4388..000000000
--- a/services/auto-analyze/setup.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright © 2020 Province of British Columbia.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Installer and setup for this module
-"""
-import ast
-from glob import glob
-from os.path import basename, splitext
-import re
-
-from setuptools import setup, find_packages
-
-_version_re = re.compile(r'__version__\s+=\s+(.*)') # pylint: disable=invalid-name
-
-with open('src/auto_analyze/version.py', 'rb') as f:
- version = str(ast.literal_eval(_version_re.search( # pylint: disable=invalid-name
- f.read().decode('utf-8')).group(1)))
-
-
-def read_requirements(filename):
- """
- Get application requirements from
- the requirements.txt file.
- :return: Python requirements
- """
- with open(filename, 'r') as req:
- requirements = req.readlines()
- install_requires = [r.strip() for r in requirements if r.find('git+') != 0]
- return install_requires
-
-
-def read(filepath):
- """
- Read the contents from a file.
- :param str filepath: path to the file to be read
- :return: file contents
- """
- with open(filepath, 'r') as file_handle:
- content = file_handle.read()
- return content
-
-
-REQUIREMENTS = read_requirements('requirements.txt')
-
-setup(
- name="auto_analyze",
- version=version,
- author_email='thor@wolpert.ca',
- packages=find_packages('src'),
- package_dir={'': 'src'},
- py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
- include_package_data=True,
- license=read('LICENSE'),
- long_description=read('README.md'),
- zip_safe=False,
- install_requires=REQUIREMENTS,
- setup_requires=["pytest-runner", ],
- tests_require=["pytest", ],
-)
diff --git a/services/auto-analyze/src/auto_analyze/__init__.py b/services/auto-analyze/src/auto_analyze/__init__.py
deleted file mode 100644
index aedb6ef82..000000000
--- a/services/auto-analyze/src/auto_analyze/__init__.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The service that analyzes an array of names."""
-# This is important as this will add modules purporting to be Flask modules for later use by the extension.
-# Without this, Flask-SQLAlchemy may not work!
-# Thanks!
-import asyncio
-import os
-
-import config # pylint: disable=wrong-import-order; # noqa: I001
-import quart.flask_patch
-from namex import models
-from namex.models import db, ma
-from namex.services.name_request.auto_analyse.protected_name_analysis import ProtectedNameAnalysisService
-from quart import Quart, jsonify, request
-
-
-from .analyzer import auto_analyze
-
-
-# Set config
-QUART_APP = os.getenv('QUART_APP')
-RUN_MODE = os.getenv('FLASK_ENV', 'production')
-
-
-async def create_app(run_mode):
- """Create the app object for configuration and use."""
- try:
- quart_app = Quart(__name__)
- quart_app.logger.debug('APPLICATION CREATED')
- quart_app.config.from_object(config.CONFIGURATION[run_mode])
- db.init_app(quart_app)
- ma.init_app(quart_app)
- except Exception as err:
- quart_app.logger.debug(
- 'Error creating application in auto-analyze service: {0}'.format(repr(err.with_traceback(None))))
- raise
-
- @quart_app.after_request
- def add_version(response): # pylint: disable=unused-variable; linter not understanding framework call
- os.getenv('OPENSHIFT_BUILD_COMMIT', '')
- return response
-
- register_shellcontext(quart_app)
- await quart_app.app_context().push()
- return quart_app
-
-
-def register_shellcontext(quart_app):
- """Register shell context objects."""
-
- def shell_context():
- """Shell context objects."""
- return {
- 'app': quart_app,
- # 'jwt': jwt,
- 'db': db,
- 'models': models
- }
-
- quart_app.shell_context_processor(shell_context)
-
-
-loop = asyncio.get_event_loop()
-app = loop.run_until_complete(create_app(RUN_MODE))
-db.app = app # Just set it, see if it works...
-
-
-@app.route('/', methods=['POST'])
-async def private_service():
- """Return the outcome of this private service call."""
- name_analysis_service = ProtectedNameAnalysisService()
- service = name_analysis_service
- np_svc_prep_data = service.name_processing_service
- np_svc_prep_data.prepare_data()
-
- json_data = await request.get_json()
- list_dist = json_data.get('list_dist')
- list_desc = json_data.get('list_desc')
- list_name = json_data.get('list_name')
- dict_substitution = json_data.get('dict_substitution')
- dict_synonyms = json_data.get('dict_synonyms')
- matches = json_data.get('names')
-
- app.logger.debug('Number of matches: {0}'.format(len(matches)))
-
- result = await asyncio.gather(
- *[auto_analyze(name, list_name, list_dist, list_desc, dict_substitution, dict_synonyms, np_svc_prep_data) for
- name in matches]
- )
- return jsonify(result=result)
-
-
-if __name__ == '__main__':
- app.run(port=7000, host='localhost')
diff --git a/services/auto-analyze/src/auto_analyze/analyzer.py b/services/auto-analyze/src/auto_analyze/analyzer.py
deleted file mode 100644
index 9f1d09554..000000000
--- a/services/auto-analyze/src/auto_analyze/analyzer.py
+++ /dev/null
@@ -1,319 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Analyzes a single name."""
-import itertools
-import logging
-import math
-import re
-from collections import Counter
-
-from namex.services.name_processing.name_processing import NameProcessingService
-from namex.services.name_request.auto_analyse.name_analysis_utils import (
- get_classification,
- get_flat_list,
- remove_double_letters_list_dist_words,
- remove_spaces_list,
- subsequences,
-)
-from namex.services.name_request.auto_analyse.protected_name_analysis import ProtectedNameAnalysisService
-from namex.services.name_request.builders.name_analysis_builder import NameAnalysisBuilder
-from nltk.stem import PorterStemmer
-from swagger_client import SynonymsApi as SynonymService
-
-
-porter = PorterStemmer()
-
-synonym_service = SynonymService()
-name_processing_service = NameProcessingService()
-name_analysis_service = ProtectedNameAnalysisService()
-builder = NameAnalysisBuilder(name_analysis_service)
-
-STEM_W = 0.85
-SUBS_W = 0.65
-OTHER_W_DESC = 3.0
-OTHER_W_DIST = 0.35
-
-EXACT_MATCH = 1.0
-HIGH_SIMILARITY = 0.85
-MEDIUM_SIMILARITY = 0.71
-MINIMUM_SIMILARITY = 0.66
-
-HIGH_CONFLICT_RECORDS = 20
-
-
-# ok deep function
-async def auto_analyze(name: str, # pylint: disable=too-many-locals, too-many-arguments
- list_name: list, list_dist: list,
- list_desc: list, dict_substitution: dict,
- dict_synonyms: dict,
- np_svc_prep_data: name_analysis_service) -> dict:
- """Return a dictionary with name as key and similarity as value, 1.0 is an exact match."""
- logging.getLogger(__name__).debug(
- 'name: %s , list_name %s, list_dist: %s, list_desc: %s, dict_subst: %s, dict_syns: %s',
- name, list_name, list_dist, list_desc, dict_substitution, dict_synonyms)
- syn_svc = synonym_service
- service = name_analysis_service
- np_svc = service.name_processing_service
- wc_svc = service.word_classification_service
- token_svc = service.token_classifier_service
-
- dict_matches_counter = {}
-
- np_svc.set_name(name, np_svc_prep_data)
- stand_alone_words = np_svc_prep_data.get_stand_alone_words()
-
- if np_svc.name_tokens == list_name:
- similarity = EXACT_MATCH
- else:
- match_list = np_svc.name_tokens
- get_classification(service, stand_alone_words, syn_svc, match_list, wc_svc, token_svc, True)
-
- dist_db_substitution_dict = builder.get_substitutions_distinctive(service.get_list_dist())
- service._list_dist_words, match_list, _ = remove_double_letters_list_dist_words(service.get_list_dist(),
- match_list)
-
- desc_tmp_synonym_dict = builder.get_substitutions_descriptive(service.get_list_desc())
- desc_tmp_synonym_dict = remove_extra_value(desc_tmp_synonym_dict, dict_synonyms)
-
- # Update key in desc_db_synonym_dict
- service._dict_desc_words_search_conflicts = stem_key_dictionary( # pylint: disable=protected-access
- desc_tmp_synonym_dict
- )
- service._dict_desc_words_search_conflicts = add_key_values( # pylint: disable=protected-access
- service.get_dict_desc_search_conflicts()
- )
- dict_synonyms = stem_key_dictionary(dict_synonyms)
- dict_synonyms = add_key_values(dict_synonyms)
-
- list_desc, dict_synonyms = remove_descriptive_same_category(dict_synonyms)
-
- service._list_desc_words = list( # pylint: disable=protected-access
- service.get_dict_desc_search_conflicts().keys()
- )
-
- # Check if list_dist needs to be spplitted based on service.get_list_dist()
- list_dist = get_split_compound(list_dist, service.get_list_dist())
- service._list_dist_words = get_split_compound( # pylint: disable=protected-access
- service.get_list_dist(),
- list_dist)
-
- list_dist_stem = [porter.stem(word) for word in list_dist]
- vector1_dist = text_to_vector(list_dist_stem)
-
- vector2_dist, entropy_dist = get_vector(service.get_list_dist(), list_dist,
- dist_db_substitution_dict, True)
-
- if all(value == OTHER_W_DIST for value in vector2_dist.values()):
- vector2_dist, entropy_dist, _ = check_compound_dist(list_dist=list(vector2_dist.keys()),
- list_desc=None,
- original_class_list=list_dist,
- class_subs_dict=dist_db_substitution_dict)
-
- if not vector2_dist:
- match_list_desc = list(service.get_list_desc())
- match_list_dist_desc = service.get_list_dist() + match_list_desc[0:-1]
- vector2_dist, entropy_dist, service._list_desc_words = check_compound_dist(
- list_dist=match_list_dist_desc,
- list_desc=service.get_list_desc(),
- original_class_list=list_dist,
- class_subs_dict=dict_synonyms)
-
- similarity_dist = round(get_similarity(vector1_dist, vector2_dist, entropy_dist), 2)
-
- list_desc_stem = [porter.stem(word) for word in list_desc]
- vector1_desc = text_to_vector(list_desc_stem)
-
- vector2_desc, entropy_desc = get_vector(
- remove_spaces_list(service.get_list_desc()), list_desc,
- service.get_dict_desc_search_conflicts())
- similarity_desc = round(
- get_similarity(vector1_desc, vector2_desc, entropy_desc), 2)
-
- similarity = round((similarity_dist + similarity_desc) / 2, 2)
- logging.getLogger(__name__).debug('similarity: %s', similarity)
-
- if similarity == EXACT_MATCH or (
- similarity >= MINIMUM_SIMILARITY and not is_not_real_conflict(list_name,
- stand_alone_words,
- list_dist,
- dict_synonyms,
- service)):
- dict_matches_counter.update({name: similarity})
-
- return dict_matches_counter
-
-
-def get_vector(conflict_class_list, original_class_list, class_subs_dict, dist=False):
- """Return vector of words (or synonyms) found in original_class_list which are in conflict_class_list."""
- vector = dict()
- entropy = list()
- original_class_list = original_class_list if original_class_list else []
- class_subs_dict = class_subs_dict if class_subs_dict else {}
-
- conflict_class_stem = [porter.stem(name.lower()) for name in conflict_class_list]
-
- for idx, word in enumerate(original_class_list): # pylint: disable=unused-variable
- k = word.lower()
- word_stem = porter.stem(k)
- counter = 1
- if word.lower() in conflict_class_list:
- entropy.append(1)
- elif word_stem in conflict_class_stem:
- entropy.append(STEM_W)
- elif word_stem in get_flat_list(class_subs_dict.values()):
- entropy.append(SUBS_W)
- else:
- counter = OTHER_W_DIST if dist else OTHER_W_DESC
- entropy.append(0.0)
- if counter == 1:
- vector[word_stem] = counter
-
- # Make sure we don't divide by zero!
- entropy_score = sum(entropy) / len(entropy) if len(entropy) > 0 else 0
- return vector, entropy_score
-
-
-def check_compound_dist(list_dist, list_desc, original_class_list, class_subs_dict):
- """Return a vector with distinctive compound items and updated list of descriptives."""
- vector_dist = {}
- entropy_dist = 0.0
- for i in range(2, len(list_dist) + 1):
- compound_space_list = [x for x in subsequences(list_dist, i)] # pylint: disable=unnecessary-comprehension
- compound = [x.replace(' ', '') for x in compound_space_list]
- vector_dist, entropy_dist = get_vector(compound, original_class_list, class_subs_dict)
-
- # Update descriptive list
- if list_desc and entropy_dist > 0.0:
- token_list = []
- for word in compound_space_list:
- token_list.extend(word.split())
- intersection = [x for x in list_desc if x in token_list]
- for word in intersection:
- list_desc.remove(word)
-
- return vector_dist, entropy_dist, list_desc
-
-
-def text_to_vector(list_name):
- """Return a vector."""
- return Counter(list_name)
-
-
-def get_cosine(vec1, vec2):
- """Return cosine similarity between vector 1 and vector 2."""
- intersection = set(vec1.keys()) & set(vec2.keys())
- numerator = sum([vec1[x] * vec2[x] for x in intersection])
-
- sum1 = sum([vec1[x] ** 2 for x in list(vec1.keys())])
- sum2 = sum([vec2[x] ** 2 for x in list(vec2.keys())])
-
- denominator = math.sqrt(sum1) * math.sqrt(sum2)
-
- if not denominator:
- return 0.0
- return float(numerator) / denominator
-
-
-def get_similarity(vector1, vector2, entropy):
- """Return similarity between two vectors which are either both distinctives or descriptives."""
- return get_cosine(vector1, vector2) * entropy
-
-
-def is_not_real_conflict(list_name, stand_alone_words, list_dist, dict_desc, service):
- """Return True if the name is not a real conflict. Otherwise, false if the name is a conflict."""
- list_desc = list(dict_desc.keys())
- if is_standalone_name(list_name, stand_alone_words):
- return stand_alone_additional_dist_desc(list_dist, service.get_list_dist(), list_desc,
- service.get_list_desc())
- return False
-
-
-def is_standalone_name(list_name, stand_alone_words):
- """Return True if standalone name."""
- if any(stand_alone in list_name for stand_alone in stand_alone_words):
- return True
- return False
-
-
-def stand_alone_additional_dist_desc(lst_dist_name1, lst_dist_name2, lst_desc_name1, lst_desc_name2):
- """Return True if there is an additional distinctive or descriptive in the stand-alone name."""
- if lst_dist_name1.__len__() != lst_dist_name2.__len__() or lst_desc_name1.__len__() != lst_desc_name2.__len__():
- return True
-
- return False
-
-
-def remove_extra_value(d1, d2):
- """Return d1 with d2 items removed."""
- for k2, v2 in d2.items(): # pylint: disable=unused-variable
- for k1, v1 in d1.items():
- if len(set(v1) ^ set(v2)) == 1 and k1 not in v2:
- try:
- v1.remove(k1)
- break
- except ValueError:
- pass
- return d1
-
-
-def update_dictionary_key(d1, d2):
- """Update key dictionary in d1 with key in d2."""
- d3 = {}
- skip = False
- for k1, v1 in d1.items():
- for k2, v2 in d2.items():
- if (len(set(v1) ^ set(v2)) == 1 and k1 in v2) or len(set(v1) ^ set(v2)) == 0:
- d3.update({k2: v1})
- skip = True
- break
- if not skip:
- d3.update({k1: v1})
- skip = False
- return d3
-
-
-def remove_descriptive_same_category(dict_desc):
- """Remove descriptive with the same category."""
- dict_desc_unique_category = {key: val for i, (key, val) in enumerate(dict_desc.items())
- if porter.stem(key) not in itertools.chain(*list(dict_desc.values())[:i])}
-
- return list(dict_desc_unique_category.keys()), dict_desc_unique_category
-
-
-def stem_key_dictionary(d1):
- """Stem the dictionary key."""
- dict_stem = {porter.stem(k): v for (k, v) in d1.items()}
-
- return dict_stem
-
-
-def get_split_compound(a_dist, b_dist):
- """Split items in a_dist based on b_dist."""
- str_tokens = ' '.join([str(elem) for elem in b_dist])
- new_dist = list()
- for element in a_dist:
- if re.search(r'\b{0}\b'.format(re.escape(str_tokens.lower())), element.lower()):
- new_dist.extend(b_dist)
- else:
- new_dist.append(element)
-
- return new_dist
-
-
-def add_key_values(d1):
- """Add key in dictionary to values if does not exist."""
- for key, values in d1.items():
- if key not in values:
- values.append(key)
- return d1
diff --git a/services/auto-analyze/src/auto_analyze/version.py b/services/auto-analyze/src/auto_analyze/version.py
deleted file mode 100644
index 25b3b9e09..000000000
--- a/services/auto-analyze/src/auto_analyze/version.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Version of this service in PEP440.
-
-[N!]N(.N)*[{a|b|rc}N][.postN][.devN]
-Epoch segment: N!
-Release segment: N(.N)*
-Pre-release segment: {a|b|rc}N
-Post-release segment: .postN
-Development release segment: .devN
-"""
-
-__version__ = '0.0.1' # pylint: disable=invalid-name
diff --git a/services/auto-analyze/src/config.py b/services/auto-analyze/src/config.py
deleted file mode 100644
index d246ce22e..000000000
--- a/services/auto-analyze/src/config.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""All of the configuration for the service is captured here.
-
-All items are loaded, or have Constants defined here that
-are loaded into the Quartz configuration.
-All modules and lookups get their configuration from the
-Quartz config, rather than reading environment variables directly
-or by accessing this configuration directly.
-"""
-import os
-
-from dotenv import find_dotenv, load_dotenv
-
-
-# This will load all the envars from a .env file located in the project root
-load_dotenv(find_dotenv())
-
-CONFIGURATION = {
- 'development': 'config.DevConfig',
- 'testing': 'config.TestConfig',
- 'production': 'config.Config',
- 'default': 'config.Config'
-}
-
-
-class Config: # pylint: disable=too-few-public-methods
- """Base class configuration that should set reasonable defaults.
-
- Used as the base for all the other configurations.
- """
-
- TESTING = False
- DEBUG = False
-
- PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
- SECRET_KEY = 'a secret'
- SQLALCHEMY_TRACK_MODIFICATIONS = False
- ALEMBIC_INI = 'migrations/alembic.ini'
-
- # POSTGRESQL
- DB_USER = os.getenv('DATABASE_USERNAME', '')
- DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '')
- DB_NAME = os.getenv('DATABASE_NAME', '')
- DB_HOST = os.getenv('DATABASE_HOST', '')
- DB_PORT = os.getenv('DATABASE_PORT', '5432')
- SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format(
- user=DB_USER,
- password=DB_PASSWORD,
- host=DB_HOST,
- port=int(DB_PORT),
- name=DB_NAME
- )
-
- SOLR_SYNONYMS_API_URL = f'{os.getenv('SOLR_SYNONYMS_API_URL', None)}{os.getenv('SOLR_SYNONYMS_API_VERSION', None)}'
-
- # JWT_OIDC Settings
- JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG')
- JWT_OIDC_ALGORITHMS = os.getenv('JWT_OIDC_ALGORITHMS')
- JWT_OIDC_JWKS_URI = os.getenv('JWT_OIDC_JWKS_URI')
- JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_ISSUER')
- JWT_OIDC_AUDIENCE = os.getenv('JWT_OIDC_AUDIENCE')
- JWT_OIDC_CLIENT_SECRET = os.getenv('JWT_OIDC_CLIENT_SECRET')
- JWT_OIDC_CACHING_ENABLED = os.getenv('JWT_OIDC_CACHING_ENABLED')
- # JWT_OIDC_JWKS_CACHE_TIMEOUT = int(os.getenv('JWT_OIDC_JWKS_CACHE_TIMEOUT'), 300)
-
-
-class DevConfig(Config): # pylint: disable=too-few-public-methods
- """Creates the Development Config object."""
-
- DEBUG = True
- TESTING = False
-
-
-class TestConfig(Config): # pylint: disable=too-few-public-methods
- """In support of testing only.
-
- Used by the py.test suite
- """
-
- DEBUG = True
- TESTING = True
-
- # POSTGRESQL
- DB_USER = os.getenv('DATABASE_TEST_USERNAME', '')
- DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', '')
- DB_NAME = os.getenv('DATABASE_TEST_NAME', '')
- DB_HOST = os.getenv('DATABASE_TEST_HOST', '')
- DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432')
- # Set this in your .env to debug SQL Alchemy queries (for local development)
- SQLALCHEMY_ECHO = os.getenv('DEBUG_SQL_QUERIES', None)
- SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format(
- user=DB_USER,
- password=DB_PASSWORD,
- host=DB_HOST,
- port=int(DB_PORT),
- name=DB_NAME
- )
-
- SOLR_SYNONYMS_API_URL = f'{os.getenv('SOLR_SYNONYMS_API_URL', None)}{os.getenv('SOLR_SYNONYMS_API_VERSION', None)}'
-
- # JWT OIDC settings
- # JWT_OIDC_TEST_MODE will set jwt_manager to use
- JWT_OIDC_TEST_MODE = True
- JWT_OIDC_TEST_AUDIENCE = os.getenv('JWT_OIDC_AUDIENCE')
- JWT_OIDC_TEST_CLIENT_SECRET = os.getenv('JWT_OIDC_CLIENT_SECRET')
- JWT_OIDC_TEST_ISSUER = 'https://sso-dev.pathfinder.gov.bc.ca/auth/realms/sbc'
- JWT_OIDC_TEST_KEYS = {
- 'keys': [
- {
- 'kid': 'flask-jwt-oidc-test-client',
- 'kty': 'RSA',
- 'alg': 'RS256',
- 'use': 'sig',
- 'n': 'AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR', # noqa: E501
- 'e': 'AQAB'
- }
- ]
- }
-
- JWT_OIDC_TEST_PRIVATE_KEY_JWKS = {
- 'keys': [
- {
- 'kid': 'flask-jwt-oidc-test-client',
- 'kty': 'RSA',
- 'alg': 'RS256',
- 'use': 'sig',
- 'n': 'AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR', # noqa: E501
- 'e': 'AQAB',
- 'd': 'C0G3QGI6OQ6tvbCNYGCqq043YI_8MiBl7C5dqbGZmx1ewdJBhMNJPStuckhskURaDwk4-8VBW9SlvcfSJJrnZhgFMjOYSSsBtPGBIMIdM5eSKbenCCjO8Tg0BUh_xa3CHST1W4RQ5rFXadZ9AeNtaGcWj2acmXNO3DVETXAX3x0', # noqa: E501
- 'p': 'APXcusFMQNHjh6KVD_hOUIw87lvK13WkDEeeuqAydai9Ig9JKEAAfV94W6Aftka7tGgE7ulg1vo3eJoLWJ1zvKM',
- 'q': 'AOjX3OnPJnk0ZFUQBwhduCweRi37I6DAdLTnhDvcPTrrNWuKPg9uGwHjzFCJgKd8KBaDQ0X1rZTZLTqi3peT43s',
- 'dp': 'AN9kBoA5o6_Rl9zeqdsIdWFmv4DB5lEqlEnC7HlAP-3oo3jWFO9KQqArQL1V8w2D4aCd0uJULiC9pCP7aTHvBhc',
- 'dq': 'ANtbSY6njfpPploQsF9sU26U0s7MsuLljM1E8uml8bVJE1mNsiu9MgpUvg39jEu9BtM2tDD7Y51AAIEmIQex1nM',
- 'qi': 'XLE5O360x-MhsdFXx8Vwz4304-MJg-oGSJXCK_ZWYOB_FGXFRTfebxCsSYi0YwJo-oNu96bvZCuMplzRI1liZw'
- }
- ]
- }
-
- JWT_OIDC_TEST_PRIVATE_KEY_PEM = """
------BEGIN RSA PRIVATE KEY-----
-MIICXQIBAAKBgQDfn1nKQshOSj8xw44oC2klFWSNLmK3BnHONCJ1bZfq0EQ5gIfg
-tlvB+Px8Ya+VS3OnK7Cdi4iU1fxO9ktN6c6TjmmmFevk8wIwqLthmCSF3r+3+h4e
-ddj7hucMsXWv05QUrCPoL6YUUz7Cgpz7ra24rpAmK5z7lsV+f3BEvXkrUQIDAQAB
-AoGAC0G3QGI6OQ6tvbCNYGCqq043YI/8MiBl7C5dqbGZmx1ewdJBhMNJPStuckhs
-kURaDwk4+8VBW9SlvcfSJJrnZhgFMjOYSSsBtPGBIMIdM5eSKbenCCjO8Tg0BUh/
-xa3CHST1W4RQ5rFXadZ9AeNtaGcWj2acmXNO3DVETXAX3x0CQQD13LrBTEDR44ei
-lQ/4TlCMPO5bytd1pAxHnrqgMnWovSIPSShAAH1feFugH7ZGu7RoBO7pYNb6N3ia
-C1idc7yjAkEA6Nfc6c8meTRkVRAHCF24LB5GLfsjoMB0tOeEO9w9Ous1a4o+D24b
-AePMUImAp3woFoNDRfWtlNktOqLel5PjewJBAN9kBoA5o6/Rl9zeqdsIdWFmv4DB
-5lEqlEnC7HlAP+3oo3jWFO9KQqArQL1V8w2D4aCd0uJULiC9pCP7aTHvBhcCQQDb
-W0mOp436T6ZaELBfbFNulNLOzLLi5YzNRPLppfG1SRNZjbIrvTIKVL4N/YxLvQbT
-NrQw+2OdQACBJiEHsdZzAkBcsTk7frTH4yGx0VfHxXDPjfTj4wmD6gZIlcIr9lZg
-4H8UZcVFN95vEKxJiLRjAmj6g273pu9kK4ymXNEjWWJn
------END RSA PRIVATE KEY-----"""
diff --git a/services/auto-analyze/tests/__init__.py b/services/auto-analyze/tests/__init__.py
deleted file mode 100644
index 6c731676e..000000000
--- a/services/auto-analyze/tests/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""All of the unit, integration and postman test for this service."""
diff --git a/services/auto-analyze/tests/conftest.py b/services/auto-analyze/tests/conftest.py
deleted file mode 100644
index fcb30f769..000000000
--- a/services/auto-analyze/tests/conftest.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Common setup and fixtures for the pytest suite used by this service."""
-import pytest
-
-from auto_analyze import app as _app
-
-
-@pytest.fixture(scope='function')
-async def app():
- """Return a session-wide application configured in TEST mode."""
- return _app
diff --git a/services/auto-analyze/tests/unit/__init__.py b/services/auto-analyze/tests/unit/__init__.py
deleted file mode 100644
index 9780d23ad..000000000
--- a/services/auto-analyze/tests/unit/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""All of the unit tests for this service."""
diff --git a/services/auto-analyze/tests/unit/test_analyzer.py b/services/auto-analyze/tests/unit/test_analyzer.py
deleted file mode 100644
index 920176fde..000000000
--- a/services/auto-analyze/tests/unit/test_analyzer.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test suite for the name analyzer."""
-import pytest
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize('test_name, name, expected', [
- ('passing name', 'the name', True),
- ('failing name', 'failing name!', False),
-])
-async def test_auto_analyzer(test_name, name, expected):
- """Assert that all of the table tests pass for the name and expected outcome."""
- from auto_analyze.analyzer import auto_analyze
-
- r = await auto_analyze(name)
-
- assert r == expected
diff --git a/services/auto-analyze/tests/unit/test_auto_analyzer_api.py b/services/auto-analyze/tests/unit/test_auto_analyzer_api.py
deleted file mode 100644
index 57de30bdc..000000000
--- a/services/auto-analyze/tests/unit/test_auto_analyzer_api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test suite for the name analyzer API."""
-import pytest
-
-
-@pytest.mark.asyncio
-async def test_auto_analyzer_api(app):
- """Assert that the API can successfully recieve and process the name array."""
- data = {'names': ['person', 'man', 'woman', 'camera', 'tv', 'genius']}
- client = app.test_client()
- response = await client.post('/', json=data)
- assert response.status_code == 200
diff --git a/services/auto-analyze/wsgi.py b/services/auto-analyze/wsgi.py
deleted file mode 100644
index c5d46140d..000000000
--- a/services/auto-analyze/wsgi.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright © 2020 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Provides the WSGI entry point for running the application
-"""
-from auto_analyze import app
-
-if __name__ == "__main__":
- app.run()
diff --git a/services/emailer/.env.sample b/services/emailer/.env.sample
index 50956b18c..32359d165 100644
--- a/services/emailer/.env.sample
+++ b/services/emailer/.env.sample
@@ -15,10 +15,16 @@
NAMEX_API_VERSION=
REPORT_API_URL=
REPORT_API_VERSION=
+ LEGAL_API_URL=
+ LEGAL_API_VERSION=
# Letter urls
DECIDE_BUSINESS_URL=
COLIN_URL=
CORP_FORMS_URL=
SOCIETIES_URL=
- DASHBOARD_URL=
\ No newline at end of file
+ DASHBOARD_URL=
+ AUTH_WEB_URL=
+ BUSINESS_REGISTRY_URL=
+
+ NAMEX_LD_SDK_ID=sdk-075200eb-4ff7-4e0e-872a-848585d3d460
diff --git a/services/emailer/config.py b/services/emailer/config.py
index ed36b5615..1c2af23e8 100644
--- a/services/emailer/config.py
+++ b/services/emailer/config.py
@@ -73,7 +73,7 @@ class Config: # pylint: disable=too-few-public-methods
ENVIRONMENT = os.getenv("APP_ENV", "prod")
- LD_SDK_KEY = os.getenv("LD_SDK_KEY", None)
+ NAMEX_LD_SDK_ID = os.getenv("NAMEX_LD_SDK_ID", None)
SENTRY_DSN = os.getenv("SENTRY_DSN", None)
@@ -90,9 +90,12 @@ class Config: # pylint: disable=too-few-public-methods
NOTIFY_API_VERSION = os.getenv("NOTIFY_API_VERSION", "")
NAMEX_API_URL = os.getenv("NAMEX_API_URL", "")
NAMEX_API_VERSION = os.getenv("NAMEX_API_VERSION", "")
+ LEGAL_API_URL = os.getenv("LEGAL_API_URL", "https://legal-api-dev.apps.silver.devops.gov.bc.ca")
+ LEGAL_API_VERSION = os.getenv("LEGAL_API_VERSION", "/api/v1")
NOTIFY_API_URL = f"{NOTIFY_API_URL + NOTIFY_API_VERSION}/notify"
NAMEX_SVC_URL = f"{NAMEX_API_URL + NAMEX_API_VERSION}"
+ ENTITY_SVC_URL= f"{LEGAL_API_URL + LEGAL_API_VERSION}"
REPORT_SVC_URL = f'{os.getenv("REPORT_API_URL", None)}{os.getenv("REPORT_API_VERSION", None)}/reports'
@@ -102,12 +105,18 @@ class Config: # pylint: disable=too-few-public-methods
ACCOUNT_SVC_CLIENT_SECRET = os.getenv("KEYCLOAK_CLIENT_SECRET")
ACCOUNT_SVC_TIMEOUT = os.getenv("KEYCLOAK_TIMEOUT")
+ ENTITY_SVC_AUTH_URL = os.getenv("KEYCLOAK_AUTH_TOKEN_URL")
+ ENTITY_SERVICE_ACCOUNT_CLIENT_ID = os.getenv("KEYCLOAK_CLIENT_ID")
+ ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET = os.getenv("KEYCLOAK_CLIENT_SECRET")
+
NAME_REQUEST_URL = os.getenv("NAME_REQUEST_URL", "")
DECIDE_BUSINESS_URL = os.getenv("DECIDE_BUSINESS_URL", "")
BUSINESS_URL = os.getenv("BUSINESS_URL", "")
COLIN_URL = os.getenv("COLIN_URL", "")
CORP_FORMS_URL = os.getenv("CORP_FORMS_URL", "")
SOCIETIES_URL = os.getenv("SOCIETIES_URL", "")
+ AUTH_WEB_URL = os.getenv("AUTH_WEB_URL", "")
+ BUSINESS_REGISTRY_URL = os.getenv("BUSINESS_REGISTRY_URL", "https://business-registry-dev.web.app/en-CA/")
PAYMENT_SVC_AUTH_URL = os.getenv('KEYCLOAK_AUTH_TOKEN_URL', '')
PAYMENT_SVC_AUTH_CLIENT_ID = os.getenv('KEYCLOAK_CLIENT_ID', '')
diff --git a/services/emailer/devops/gcp/clouddeploy-targets.yaml b/services/emailer/devops/gcp/clouddeploy-targets.yaml
deleted file mode 100644
index b81e27f33..000000000
--- a/services/emailer/devops/gcp/clouddeploy-targets.yaml
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2022 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-apiVersion: deploy.cloud.google.com/v1
-kind: Target
-metadata:
- name: dev
-description: Dev Environment
-deployParameters:
- deploy-env: "development"
- deploy-project-id: "a083gt-dev"
- service-name: "namex-emailer-dev"
- container-name: "namex-emailer-dev"
- cloudsql-instances: ""
- app-env: "dev"
- service-account: "sa-api@a083gt-dev.iam.gserviceaccount.com"
-run:
- location: projects/a083gt-dev/locations/northamerica-northeast1
-executionConfigs:
-- usages: [DEPLOY, RENDER]
- artifactStorage: 'gs://c4hnrd-tools_clouddeploy/history'
----
-
-apiVersion: deploy.cloud.google.com/v1
-kind: Target
-metadata:
- name: test
-description: Test Environment
-deployParameters:
- deploy-env: "development"
- deploy-project-id: "a083gt-test"
- service-name: "namex-emailer-test"
- container-name: "namex-emailer-test"
- cloudsql-instances: ""
- app-env: "test"
- service-account: "sa-api@a083gt-test.iam.gserviceaccount.com"
-run:
- location: projects/a083gt-test/locations/northamerica-northeast1
-executionConfigs:
-- usages: [DEPLOY, RENDER]
- artifactStorage: 'gs://c4hnrd-tools_clouddeploy/history'
----
-
-apiVersion: deploy.cloud.google.com/v1
-kind: Target
-metadata:
- name: sandbox
-description: Sandbox Environment
-requireApproval: true
-deployParameters:
- deploy-env: "production"
- deploy-project-id: "a083gt-tools"
- service-name: "namex-emailer-sandbox"
- container-name: "namex-emailer-sandbox"
- cloudsql-instances: ""
- app-env: "sandbox"
- service-account: "sa-api@a083gt-tools.iam.gserviceaccount.com"
- max-scale: "50"
- container-concurrency: "20"
- container-port: "8080"
- resources-cpu: 4000m
- resources-memory: 8Gi
-run:
- location: projects/a083gt-tools/locations/northamerica-northeast1
-executionConfigs:
-- usages: [DEPLOY, RENDER]
- artifactStorage: 'gs://c4hnrd-tools_clouddeploy/history'
----
-
-apiVersion: deploy.cloud.google.com/v1
-kind: Target
-metadata:
- name: prod
-description: Production Environment
-requireApproval: true
-deployParameters:
- deploy-env: "production"
- deploy-project-id: "a083gt-prod"
- service-name: "namex-emailer-prod"
- container-name: "namex-emailer-prod"
- cloudsql-instances: ""
- app-env: "production"
- service-account: "sa-api@a083gt-prod.iam.gserviceaccount.com"
- max-scale: "50"
- container-concurrency: "20"
- container-port: "8080"
- resources-cpu: 4000m
- resources-memory: 8Gi
-run:
- location: projects/a083gt-prod/locations/northamerica-northeast1
-executionConfigs:
-- usages: [DEPLOY, RENDER]
- artifactStorage: 'gs://c4hnrd-tools_clouddeploy/history'
\ No newline at end of file
diff --git a/services/auto-analyze/devops/gcp/clouddeploy.yaml b/services/emailer/devops/gcp/clouddeploy.yaml
similarity index 72%
rename from services/auto-analyze/devops/gcp/clouddeploy.yaml
rename to services/emailer/devops/gcp/clouddeploy.yaml
index 0ed3d77cf..2e299269f 100644
--- a/services/auto-analyze/devops/gcp/clouddeploy.yaml
+++ b/services/emailer/devops/gcp/clouddeploy.yaml
@@ -15,7 +15,7 @@
apiVersion: deploy.cloud.google.com/v1
kind: DeliveryPipeline
metadata:
- name: namex-auto-analyze-pipeline
+ name: namex-emailer-pipeline
description: Deployment pipeline
serialPipeline:
stages:
@@ -28,10 +28,10 @@ serialPipeline:
- values:
deploy-env: "development"
deploy-project-id: "a083gt-dev"
- service-name: "namex-auto-analyze-dev"
- container-name: "namex-auto-analyze-dev"
- service-account: "mex-apisa-api@a083gt-dev.iam.gserviceaccount.com"
+ service-name: "namex-emailer-dev"
+ container-name: "namex-emailer-dev"
cloudsql-instances: ""
+ service-account: "sa-api@a083gt-dev.iam.gserviceaccount.com"
- targetId: a083gt-test
profiles: [test]
strategy:
@@ -39,12 +39,12 @@ serialPipeline:
verify: false
deployParameters:
- values:
- deploy-env: "test"
+ deploy-env: "development"
deploy-project-id: "a083gt-test"
- service-name: "namex-auto-analyze-test"
- container-name: "namex-auto-analyze-test"
- service-account: "sa-api@a083gt-test.iam.gserviceaccount.com"
+ service-name: "namex-emailer-test"
+ container-name: "namex-emailer-test"
cloudsql-instances: ""
+ service-account: "sa-api@a083gt-test.iam.gserviceaccount.com"
- targetId: a083gt-sandbox
profiles: [sandbox]
strategy:
@@ -52,12 +52,12 @@ serialPipeline:
verify: false
deployParameters:
- values:
- deploy-env: "sandbox"
- deploy-project-id: "a083gt-integration"
- service-name: "namex-auto-analyze-sandbox"
- container-name: "namex-auto-analyze-sandbox"
- service-account: "sa-api@a083gt-integration.iam.gserviceaccount.com"
+ deploy-env: "production"
+ deploy-project-id: "a083gt-tools"
+ service-name: "namex-emailer-sandbox"
+ container-name: "namex-emailer-sandbox"
cloudsql-instances: ""
+ service-account: "sa-api@a083gt-integration.iam.gserviceaccount.com"
- targetId: a083gt-prod
profiles: [prod]
strategy:
@@ -67,9 +67,11 @@ serialPipeline:
- values:
deploy-env: "production"
deploy-project-id: "a083gt-prod"
- service-name: "namex-auto-analyze-prod"
- container-name: "namex-auto-analyze-prod"
- service-account: "sa-api@a083gt-prod.iam.gserviceaccount.com"
- max-scale: "10"
- container-concurrency: "20"
+ service-name: "namex-emailer-prod"
+ container-name: "namex-emailer-prod"
+ container-concurrency: "60"
+ resources-cpu: "8000m"
+ resources-memory: "4Gi"
cloudsql-instances: ""
+ service-account: "sa-api@a083gt-prod.iam.gserviceaccount.com"
+ max-scale: "10"
\ No newline at end of file
diff --git a/services/emailer/devops/vault.json b/services/emailer/devops/vault.json
index 7ccd49ea9..bf0dc7377 100644
--- a/services/emailer/devops/vault.json
+++ b/services/emailer/devops/vault.json
@@ -12,7 +12,8 @@
"application": [
"notify-api",
"namex-api",
- "report-api"
+ "report-api",
+ "legal-api"
]
},
{
@@ -26,14 +27,24 @@
"vault": "web-url",
"application": [
"name-request",
- "bcregistry"
+ "bcregistry",
+ "auth-web",
+ "business-registry-ui"
]
},
{
"vault": "web-url",
"application": [
"name-request",
- "bcregistry"
+ "bcregistry",
+ "auth-web",
+ "business-registry-ui"
+ ]
+ },
+ {
+ "vault": "launchdarkly",
+ "application": [
+ "namex"
]
}
]
\ No newline at end of file
diff --git a/services/emailer/devops/vaults.gcp.env b/services/emailer/devops/vaults.gcp.env
index 71c003e6a..731b7baeb 100644
--- a/services/emailer/devops/vaults.gcp.env
+++ b/services/emailer/devops/vaults.gcp.env
@@ -17,6 +17,11 @@ COLIN_URL="op://web-url/$APP_ENV/bcregistry/COLIN_URL"
BUSINESS_URL="op://web-url/$APP_ENV/bcregistry/BUSINESS_URL"
DECIDE_BUSINESS_URL="op://web-url/$APP_ENV/bcregistry/DECIDE_BUSINESS_URL"
NAME_REQUEST_URL="op://web-url/$APP_ENV/name-request/NAME_REQUEST_URL"
+AUTH_WEB_URL="op://web-url/$APP_ENV/auth-web/AUTH_WEB_URL"
+BUSINESS_REGISTRY_URL="op://web-url/$APP_ENV/business-registry-ui/BUSINESS_REGISTRY_URL"
REPORT_API_URL="op://API/$APP_ENV/report-api/REPORT_API_URL"
REPORT_API_VERSION="op://API/$APP_ENV/report-api/REPORT_API_VERSION"
+LEGAL_API_URL="op://API/$APP_ENV/legal-api/LEGAL_API_URL"
+LEGAL_API_VERSION="op://API/$APP_ENV/legal-api/LEGAL_API_VERSION"
+NAMEX_LD_SDK_ID="op://launchdarkly/$APP_ENV/namex/NAMEX_LD_SDK_ID"
SENTRY_DSN=""
diff --git a/services/emailer/flags.json b/services/emailer/flags.json
new file mode 100644
index 000000000..fe9e799c1
--- /dev/null
+++ b/services/emailer/flags.json
@@ -0,0 +1,8 @@
+{
+ "flagValues": {
+ "string-flag": "a string value",
+ "bool-flag": true,
+ "integer-flag": 10,
+ "enable-won-emails": false
+ }
+}
diff --git a/services/emailer/poetry.lock b/services/emailer/poetry.lock
index d03fb056c..60a024942 100644
--- a/services/emailer/poetry.lock
+++ b/services/emailer/poetry.lock
@@ -460,27 +460,6 @@ files = [
[package.dependencies]
Flask = ">=0.9"
-[[package]]
-name = "flask-jwt-oidc"
-version = "0.6.0"
-description = "Opinionated flask oidc client"
-optional = false
-python-versions = "^3.9"
-files = []
-develop = false
-
-[package.dependencies]
-cachelib = "^0.13.0"
-Flask = "^3"
-python-jose = "^3.3.0"
-six = "^1.16.0"
-
-[package.source]
-type = "git"
-url = "https://github.com/bolyachevets/flask-jwt-oidc.git"
-reference = "bump-flask-version"
-resolved_reference = "f023e01b537e454dfa569bc45575f1f0680daf49"
-
[[package]]
name = "flask-marshmallow"
version = "0.14.0"
@@ -1489,7 +1468,7 @@ ecdsa = "^0.18.0"
flask = "^3.0.2"
flask-caching = "^1.10.1"
flask-cors = "^4.0.0"
-flask-jwt-oidc = {git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version"}
+flask-jwt-oidc = { git = "https://github.com/seeker25/flask-jwt-oidc.git" }
flask-marshmallow = "^0.14.0"
flask-migrate = "^2.7.0"
flask-moment = "^0.11.0"
@@ -1552,7 +1531,7 @@ zipp = "^3.8.1"
type = "git"
url = "https://github.com/bcgov/namex.git"
reference = "HEAD"
-resolved_reference = "99b2af049435ae3ee41d67babf26f5caa615d055"
+resolved_reference = "810650b8fc1e4c1b83d5ba7347eda4176bf5915e"
subdirectory = "api"
[[package]]
diff --git a/services/emailer/src/namex_emailer/__init__.py b/services/emailer/src/namex_emailer/__init__.py
index c21b6588c..1f4e0ea9f 100644
--- a/services/emailer/src/namex_emailer/__init__.py
+++ b/services/emailer/src/namex_emailer/__init__.py
@@ -47,6 +47,8 @@
from .resources import register_endpoints
from .services import queue
+from namex.services import flags
+
def create_app(environment: Config = Production, **kwargs) -> Flask:
"""Return a configured Flask App using the Factory method."""
@@ -62,6 +64,7 @@ def create_app(environment: Config = Production, **kwargs) -> Flask:
send_default_pii=False,
)
+ flags.init_app(app)
queue.init_app(app)
register_endpoints(app)
diff --git a/services/emailer/src/namex_emailer/email_processors/nr_notification.py b/services/emailer/src/namex_emailer/email_processors/nr_notification.py
index 7d41f548c..435f5ce96 100644
--- a/services/emailer/src/namex_emailer/email_processors/nr_notification.py
+++ b/services/emailer/src/namex_emailer/email_processors/nr_notification.py
@@ -24,9 +24,9 @@
from jinja2 import Template
from simple_cloudevent import SimpleCloudEvent
+from namex.resources.name_requests import ReportResource
from namex_emailer.email_processors import substitute_template_parts
-from namex_emailer.services.helpers import as_legislation_timezone, format_as_report_string, query_nr_number
-
+from namex_emailer.services.helpers import as_legislation_timezone, format_as_report_string, get_magic_link, query_nr_number
class Option(Enum):
"""NR notification option."""
@@ -43,31 +43,6 @@ class Option(Enum):
-def __is_modernized(legal_type):
- modernized_list = ["GP", "DBA", "FR", "CP", "BC"]
- return legal_type in modernized_list
-
-
-def __is_colin(legal_type):
- colin_list = ["CR", "UL", "CC", "XCR", "XUL", "RLC"]
- return legal_type in colin_list
-
-
-def _is_society(legal_type):
- society_list = ["SO", "XSO"]
- return legal_type in society_list
-
-
-def __get_instruction_group(legal_type):
- if __is_modernized(legal_type):
- return "modernized"
- if __is_colin(legal_type):
- return "colin"
- if _is_society(legal_type):
- return "so"
- return ""
-
-
def process(email_info: SimpleCloudEvent, option) -> dict: # pylint: disable-msg=too-many-locals
"""
Build the email for Name Request notification.
@@ -105,12 +80,15 @@ def process(email_info: SimpleCloudEvent, option) -> dict: # pylint: disable-ms
corp_online_url = current_app.config.get("COLIN_URL")
form_page_url = current_app.config.get("CORP_FORMS_URL")
societies_url = current_app.config.get("SOCIETIES_URL")
+ magic_link = get_magic_link(nr_number, nr_data["applicants"]["emailAddress"], nr_data["applicants"]["phoneNumber"])
file_name_suffix = option.upper()
if option == Option.BEFORE_EXPIRY.value:
if "entity_type_cd" in nr_data:
legal_type = nr_data["entity_type_cd"]
- group = __get_instruction_group(legal_type)
+ request_action = nr_data["request_action_cd"]
+ corpNum = nr_data["corpNum"]
+ group = ReportResource._get_instruction_group(legal_type, request_action, corpNum)
if group:
instruction_group = "-" + group
file_name_suffix += instruction_group.upper()
@@ -130,6 +108,7 @@ def process(email_info: SimpleCloudEvent, option) -> dict: # pylint: disable-ms
corp_online_url=corp_online_url,
form_page_url=form_page_url,
societies_url=societies_url,
+ magic_link=magic_link
)
# get recipients
diff --git a/services/emailer/src/namex_emailer/email_processors/nr_result.py b/services/emailer/src/namex_emailer/email_processors/nr_result.py
index edfc783f8..e6759f995 100644
--- a/services/emailer/src/namex_emailer/email_processors/nr_result.py
+++ b/services/emailer/src/namex_emailer/email_processors/nr_result.py
@@ -10,7 +10,7 @@
from simple_cloudevent import SimpleCloudEvent
from datetime import datetime
-from namex_emailer.services.helpers import query_nr_number
+from namex_emailer.services.helpers import get_magic_link, query_nr_number
RESULT_EMAIL_SUBJECT = 'Name Request Results from Corporate Registry'
CONSENT_EMAIL_SUBJECT = 'Consent Received by Corporate Registry'
@@ -35,22 +35,28 @@ def email_consent_letter(email_info: SimpleCloudEvent):
ReportResource._update_entity_and_action_code(nr_model)
report_name = nr_number + ' - ' + CONSENT_EMAIL_SUBJECT
recipient_emails = []
+ recipient_phones = []
applicants = nr_model['applicants']
if isinstance(applicants, dict):
recipient_emails.append(applicants['emailAddress'])
+ recipient_phones.append(applicants['phoneNumber'])
else:
for applicant in applicants:
recipient_emails.append(applicant['emailAddress'])
+ recipient_phones.append(applicant['phoneNumber'])
if not nr_model['expirationDate']:
ReportResource._add_expiry_date(nr_model)
recipients = ','.join(recipient_emails)
template_path = current_app.config.get('REPORT_TEMPLATE_PATH')
file_name = 'consent'
- instruction_group = ReportResource._get_instruction_group(nr_model['entity_type_cd'])
+ legal_type = nr_model['entity_type_cd']
+ request_action = nr_model["request_action_cd"]
+ corpNum = nr_model["corpNum"]
+ instruction_group = ReportResource._get_instruction_group(legal_type, request_action, corpNum)
if instruction_group:
file_name = f"{file_name}-{instruction_group}"
email_template = Path(f'{template_path}/{file_name}.md').read_text()
- email_body = _build_email_body(email_template, nr_model)
+ email_body = _build_email_body(email_template, nr_model, recipient_emails[0], recipient_phones[0])
email = {
'recipients': recipients,
'content': {
@@ -80,17 +86,23 @@ def email_report(email_info: SimpleCloudEvent):
return make_response(jsonify(message=str(report)), status_code)
report_name = nr_number + ' - ' + RESULT_EMAIL_SUBJECT
recipient_emails = []
+ recipient_phones = []
applicants = nr_model['applicants']
if isinstance(applicants, dict):
recipient_emails.append(applicants['emailAddress'])
+ recipient_phones.append(applicants['phoneNumber'])
else:
for applicant in applicants:
recipient_emails.append(applicant['emailAddress'])
+ recipient_phones.append(applicant['phoneNumber'])
recipients = ','.join(recipient_emails)
template_path = current_app.config.get('REPORT_TEMPLATE_PATH')
email_template = Path(f'{template_path}/rejected.md').read_text()
if nr_model['stateCd'] in [State.APPROVED, State.CONDITIONAL]:
- instruction_group = ReportResource._get_instruction_group(nr_model['entity_type_cd'])
+ legal_type = nr_model['entity_type_cd']
+ request_action = nr_model["request_action_cd"]
+ corpNum = nr_model["corpNum"]
+ instruction_group = ReportResource._get_instruction_group(legal_type, request_action, corpNum)
file_name=''
if nr_model['consentFlag'] in ['Y', 'R']:
file_name = 'conditional'
@@ -103,7 +115,7 @@ def email_report(email_info: SimpleCloudEvent):
email_template = Path(f'{template_path}/{file_name}.md').read_text()
- email_body = _build_email_body(email_template, nr_model)
+ email_body = _build_email_body(email_template, nr_model, recipient_emails[0], recipient_phones[0])
email = {
'recipients': recipients,
@@ -128,7 +140,7 @@ def email_report(email_info: SimpleCloudEvent):
return handle_exception(err, 'Error retrieving the report.', 500)
-def _build_email_body(template: str, nr_model):
+def _build_email_body(template: str, nr_model, email, phone):
var_map = {
'{{NAMES_INFORMATION_URL}}': current_app.config.get('NAMES_INFORMATION_URL'),
'{{NAME_REQUEST_URL}}': current_app.config.get('NAME_REQUEST_URL'),
@@ -138,10 +150,11 @@ def _build_email_body(template: str, nr_model):
'{{CORP_ONLINE_URL}}': current_app.config.get('COLIN_URL'),
'{{CORP_FORMS_URL}}': current_app.config.get('CORP_FORMS_URL'),
'{{SOCIETIES_URL}}': current_app.config.get('SOCIETIES_URL'),
- '{{EXPIRATION_DATE}}': nr_model['expirationDate']
+ '{{EXPIRATION_DATE}}': nr_model['expirationDate'],
+ '{{MAGIC_LINK}}': get_magic_link(nr_model['nrNum'], email, phone)
}
for template_string, val in var_map.items():
if isinstance(val, datetime):
val = val.strftime(DATE_FORMAT)
template = template.replace(template_string, val)
- return template
\ No newline at end of file
+ return template
diff --git a/services/emailer/src/namex_emailer/email_templates/NR-BEFORE-EXPIRY-NEW.html b/services/emailer/src/namex_emailer/email_templates/NR-BEFORE-EXPIRY-NEW.html
new file mode 100644
index 000000000..6fafa0eb3
--- /dev/null
+++ b/services/emailer/src/namex_emailer/email_templates/NR-BEFORE-EXPIRY-NEW.html
@@ -0,0 +1,79 @@
+# Your Name Request is Expiring Soon
+
+Your Name Request {{ nr_number }} for **{{ legal_name }}** is due to expire on {{ expiration_date }}.
+
+If your name request expires:
+
+* The approved name will no longer be reserved for completing your application
+* You will need to submit and pay for a new Name Request
+
+Complete your application with this name before it expires. If you are not ready to complete your application yet, you can renew this Name Request to extend the expiry date.
+
+---
+
+# Option 1: Complete your Application
+
+At the moment, there are two systems you can use to complete your application.
+
+**Use BC Registries**
+
+Complete your application using BC Registries and Online Services, if the following statements apply to you:
+
+* This is your first time registering a business in B.C.
+
+* You plan to only use the following basic filings:
+ * Incorporation Application
+ * Change of Address
+ * Change of Director
+ * Business Alterations
+
+* You will be managing your own business information online instead of using a law firm or another third party.
+
+**Follow these steps to complete your application**
+
+If you have a BC Registries Account:
+
+1. Click on this link: [BC Registries Business Dashboard]({{ magic_link }})
+2. If you’re not signed in, log in with your BC Registries Account
+3. Click “Incorporate”
+
+If you don’t have a BC Registries account:
+
+1. [create one here]({{ decide_business_url }}) and then come back to this email
+2. Click on this link: [BC Registries Business Dashboard]({{ magic_link }})
+3. If you’re not signed in, log in with your BC Registries Account
+4. Click “Incorporate”
+
+**BC Corporate Online**
+
+Use [BC Corporate Online]({{corp_online_url}}) to complete your application if you are an existing user or if you are a new user with more complex filings needs.
+
+1. Go to [BC Corporate Online]({{corp_online_url}})
+2. Complete and submit the form along with any required documentation and payment
+
+---
+
+# Option 2: Renew your Name Request
+
+You may renew your Name Request to extend the expiry date by 56 days from the original date of expiry:
+
+1. Visit [BC Registries and Online Services]({{ name_request_url }})
+2. Click on the "Manage My Name Request" tab
+3. Enter the NR number, and applicant's phone number or email address
+4. Click the "Retrieve Name Request" button
+5. Click "Renew Name Request ($30)"
+6. Make a Payment
+
+---
+
+# Your Name Request Details
+
+**Name Request Number:**
+{{ nr_number }}
+
+**Name Request Expiry Date and Time:**
+{{ expiration_date }}
+
+---
+
+[[nr-footer.html]]
diff --git a/services/emailer/src/namex_emailer/email_templates/approved-new.md b/services/emailer/src/namex_emailer/email_templates/approved-new.md
new file mode 100644
index 000000000..91d149b1b
--- /dev/null
+++ b/services/emailer/src/namex_emailer/email_templates/approved-new.md
@@ -0,0 +1,73 @@
+# Results of your Name Request
+
+Your Name Request is **approved**. Follow the steps below to complete your application using this name. If the Name Request expires before the application is completed, a new Name Request will be required.
+
+---
+
+# You\'re not done yet!
+
+At the moment, there are two systems you can use to complete your application.
+
+---
+
+# Option 1: Use BC Registries
+
+Complete your application using BC Registries and Online Services, if the following statements apply to you:
+
+- This is your first time registering a business in B.C.
+
+- You plan to only use the following basic filings:
+ - Incorporation Application
+ - Change of Address
+ - Change of Director
+ - Business Alterations
+
+- You will be managing your own business information online instead of using a law firm or another third party.
+
+**Follow these steps to complete your application**
+
+If you have a BC Registries Account:
+1. Click on this link: [BC Registries Business Dashboard]({{MAGIC_LINK}})
+2. If you’re not signed in, log in with your BC Registries Account
+3. Click “Incorporate”
+
+If you don’t have a BC Registries account:
+1. [Create an account]({{BUSINESS_URL}}) and then come back to this email
+2. Click on this link: [BC Registries Business Dashboard]({{MAGIC_LINK}})
+3. If you’re not signed in, log in with your BC Registries Account
+4. Click “Incorporate”
+
+---
+
+# Option 2: Use BC Corporate Online
+
+Use [BC Corporate Online]({{CORP_ONLINE_URL}}) to complete your application if you are an existing user or if you are a new user with more complex filings needs.
+
+1. Go to [BC Corporate Online]({{CORP_ONLINE_URL}})
+2. Complete and submit the form along with any required documentation and payment
+
+---
+
+# Your Name Request Details
+
+**Name Request Number:**
+{{NAMEREQUEST_NUMBER}}
+
+**Name Request Expiry Date and Time:**
+{{EXPIRATION_DATE}}
+
+---
+
+# Attached to this Email
+
+The following documents are attached to this email:
+
+* Results of Name Request
+
+---
+
+**Business Registry**
+BC Registries and Online Services
+Toll Free: 1-877-370-1033
+Victoria Office: 250-370-1033
+Email: [BCRegistries@gov.bc.ca](BCRegistries@gov.bc.ca)
diff --git a/services/emailer/src/namex_emailer/email_templates/conditional-new.md b/services/emailer/src/namex_emailer/email_templates/conditional-new.md
new file mode 100644
index 000000000..b7e3ebe98
--- /dev/null
+++ b/services/emailer/src/namex_emailer/email_templates/conditional-new.md
@@ -0,0 +1,79 @@
+# Results of your Name Request
+
+Your Name Request is **conditionally approved**. Follow the steps below to complete your application with this name. If the Name Request expires before the application is completed, a new Name Request will be required.
+
+---
+
+# You\'re not done yet!
+
+At the moment, there are two systems you can use to complete your application.
+
+---
+
+# Option 1: Use BC Registries
+
+Complete your application using BC Registries and Online Services, if the following statements apply to you:
+
+- This is your first time registering a business in B.C.
+
+- You plan to only use the following basic filings:
+ - Incorporation Application
+ - Change of Address
+ - Change of Director
+ - Business Alterations
+
+- You will be managing your own business information online instead of using a law firm or another third party.
+
+**Follow these steps to complete your application**
+
+If you have a BC Registries Account:
+1. Send in your consent letter to [BCRegistries@gov.bc.ca](BCRegistries@gov.bc.ca)
+2. Receive confirmation that the consent letter has been accepted
+3. Click on this link: [BC Registries Business Dashboard]({{MAGIC_LINK}})
+4. If you’re not signed in, log in with your BC Registries Account
+5. Click “Incorporate”
+
+If you don’t have a BC Registries account:
+1. Send in your consent letter to [BCRegistries@gov.bc.ca](BCRegistries@gov.bc.ca)
+2. Receive confirmation that the consent letter has been accepted
+3. [Create an account]({{BUSINESS_URL}}) and then come back to this email
+4. Click on this link: [BC Registries Business Dashboard]({{MAGIC_LINK}})
+5. If you’re not signed in, log in with your BC Registries Account
+6. Click “Incorporate”
+
+---
+
+# Option 2: Use BC Corporate Online
+
+Use [BC Corporate Online]({{CORP_ONLINE_URL}}) to complete your application if you are an existing user or if you are a new user with more complex filings needs.
+
+1. Send in your consent letter to [BCRegistries@gov.bc.ca](BCRegistries@gov.bc.ca)
+2. Receive confirmation that the consent letter has been accepted
+3. Go to [BC Corporate Online]({{CORP_ONLINE_URL}})
+4. Complete and submit the form along with any required documentation and payment
+
+---
+
+# Your Name Request Details
+
+**Name Request Number:**
+{{NAMEREQUEST_NUMBER}}
+
+**Name Request Expiry Date and Time:**
+{{EXPIRATION_DATE}}
+
+---
+
+# Attached to this Email
+
+The following documents are attached to this email:
+
+* Results of Name Request
+
+---
+
+**Business Registry**
+BC Registries and Online Services
+Toll Free: 1-877-370-1033
+Victoria Office: 250-370-1033
+Email: [BCRegistries@gov.bc.ca](BCRegistries@gov.bc.ca)
diff --git a/services/emailer/src/namex_emailer/email_templates/consent-new.md b/services/emailer/src/namex_emailer/email_templates/consent-new.md
new file mode 100644
index 000000000..eeaecf2a8
--- /dev/null
+++ b/services/emailer/src/namex_emailer/email_templates/consent-new.md
@@ -0,0 +1,73 @@
+# Results of your Name Request
+
+The consent letter for your Name Request has been received and we have **approved** your name request. Follow the steps below to complete your application using this name. If the Name Request expires before the business is registered, a new Name Request will be required.
+
+---
+
+# You\'re not done yet!
+
+At the moment, there are two systems you can use to complete your application.
+
+---
+
+# Option 1: Use BC Registries
+
+Complete your application using BC Registries and Online Services, if the following statements apply to you:
+
+- This is your first time registering a business in B.C.
+
+- You plan to only use the following basic filings:
+ - Incorporation Application
+ - Change of Address
+ - Change of Director
+ - Business Alterations
+
+- You will be managing your own business information online instead of using a law firm or another third party.
+
+**Follow these steps to complete your application**
+
+If you have a BC Registries Account:
+1. Click on this link: [BC Registries Business Dashboard]({{MAGIC_LINK}})
+2. If you’re not signed in, log in with your BC Registries Account
+3. Click “Incorporate”
+
+If you don’t have a BC Registries account:
+1. [Create an account]({{BUSINESS_URL}}) and then come back to this email
+2. Click on this link: [BC Registries Business Dashboard]({{MAGIC_LINK}})
+3. If you’re not signed in, log in with your BC Registries Account
+4. Click “Incorporate”
+
+---
+
+# Option 2: Use BC Corporate Online
+
+Use [BC Corporate Online]({{CORP_ONLINE_URL}}) to complete your application if you are an existing user or if you are a new user with more complex filings needs.
+
+1. Go to [BC Corporate Online]({{CORP_ONLINE_URL}})
+2. Complete and submit the form along with any required documentation and payment
+
+---
+
+# Your Name Request Details
+
+**Name Request Number:**
+{{NAMEREQUEST_NUMBER}}
+
+**Name Request Expiry Date and Time:**
+{{EXPIRATION_DATE}}
+
+---
+
+# Attached to this Email
+
+The following documents are attached to this email:
+
+* Results of Name Request
+
+---
+
+**Business Registry**
+BC Registries and Online Services
+Toll Free: 1-877-370-1033
+Victoria Office: 250-370-1033
+Email: [BCRegistries@gov.bc.ca](BCRegistries@gov.bc.ca)
diff --git a/services/emailer/src/namex_emailer/email_templates/template-parts/name-request/nrDetails.html b/services/emailer/src/namex_emailer/email_templates/template-parts/name-request/nrDetails.html
index 7d5ab6b4e..e247d1520 100644
--- a/services/emailer/src/namex_emailer/email_templates/template-parts/name-request/nrDetails.html
+++ b/services/emailer/src/namex_emailer/email_templates/template-parts/name-request/nrDetails.html
@@ -5,7 +5,7 @@
Business Type:
Request Status:
Request Type:
- Retrieval Time:
+ Retrieved Date and Time:
|
diff --git a/services/emailer/src/namex_emailer/resources/worker.py b/services/emailer/src/namex_emailer/resources/worker.py
index b4cd54c92..5b5a6ba44 100644
--- a/services/emailer/src/namex_emailer/resources/worker.py
+++ b/services/emailer/src/namex_emailer/resources/worker.py
@@ -133,6 +133,7 @@ def process_email(email_msg: SimpleCloudEvent): # pylint: disable=too-many-bran
def send_email(email: dict, token: str):
"""Send the email"""
+ structured_log(request, "INFO", f"Send Email: {email}")
return requests.post(
f'{current_app.config.get("NOTIFY_API_URL", "")}',
json=email,
diff --git a/services/emailer/src/namex_emailer/services/helpers.py b/services/emailer/src/namex_emailer/services/helpers.py
index 4ae233022..69f2af4e4 100644
--- a/services/emailer/src/namex_emailer/services/helpers.py
+++ b/services/emailer/src/namex_emailer/services/helpers.py
@@ -4,6 +4,7 @@
import requests
from flask import current_app
from cachetools import cached, TTLCache
+from urllib.parse import urlencode
@staticmethod
@cached(cache=TTLCache(maxsize=1, ttl=180))
@@ -54,4 +55,17 @@ def query_nr_number(identifier: str):
'Authorization': 'Bearer ' + token
})
- return nr_response
\ No newline at end of file
+ return nr_response
+
+
+@staticmethod
+def get_magic_link(nr_number, email, phone):
+ """Return a magic link."""
+ BUSINESS_REGISTRY_URL = current_app.config.get("BUSINESS_REGISTRY_URL")
+ params = {
+ 'nr': nr_number,
+ 'email': email,
+ 'phone': phone
+ }
+ encoded_params = urlencode(params)
+ return f'{BUSINESS_REGISTRY_URL}incorporateNow/?{encoded_params}'
diff --git a/services/namex-pay/config.py b/services/namex-pay/config.py
index 26dd500fb..b19d913b2 100644
--- a/services/namex-pay/config.py
+++ b/services/namex-pay/config.py
@@ -72,7 +72,10 @@ class Config(): # pylint: disable=too-few-public-methods
DB_NAME = os.getenv('NAMEX_DATABASE_NAME', '')
DB_HOST = os.getenv('NAMEX_DATABASE_HOST', '')
DB_PORT = os.getenv('NAMEX_DATABASE_PORT', '5432')
- SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
+ if DB_UNIX_SOCKET := os.getenv('NAMEX_DATABASE_UNIX_SOCKET', None):
+ SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}'
+ else:
+ SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
GCP_AUTH_KEY = os.getenv('BUSINESS_GCP_AUTH_KEY', None)
EMAILER_TOPIC = os.getenv('NAMEX_MAILER_TOPIC', '')
diff --git a/services/namex-pay/devops/gcp/clouddeploy.yaml b/services/namex-pay/devops/gcp/clouddeploy.yaml
index 73ebf0aca..5f94e1f44 100644
--- a/services/namex-pay/devops/gcp/clouddeploy.yaml
+++ b/services/namex-pay/devops/gcp/clouddeploy.yaml
@@ -31,7 +31,7 @@ serialPipeline:
service-name: "namex-pay-dev"
container-name: "namex-pay-dev"
service-account: "sa-api@a083gt-dev.iam.gserviceaccount.com"
- cloudsql-instances: ""
+ cloudsql-instances: "a083gt-dev:northamerica-northeast1:namex-db-dev"
- targetId: a083gt-test
profiles: [test]
strategy:
diff --git a/services/namex-pay/devops/vaults.gcp.env b/services/namex-pay/devops/vaults.gcp.env
index cd14be9b7..1e59f0ba4 100644
--- a/services/namex-pay/devops/vaults.gcp.env
+++ b/services/namex-pay/devops/vaults.gcp.env
@@ -1,10 +1,10 @@
SENTRY_ENABLE="op://sentry/$APP_ENV/examination/SENTRY_ENABLE"
SENTRY_DSN="op://sentry/$APP_ENV/examination/SENTRY_DSN"
-NAMEX_DATABASE_HOST="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_HOST"
-NAMEX_DATABASE_PORT="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_PORT"
-NAMEX_DATABASE_NAME="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_NAME"
-NAMEX_DATABASE_USERNAME="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_USERNAME"
-NAMEX_DATABASE_PASSWORD="op://database/$APP_ENV/namex-db/NAMEX_DATABASE_PASSWORD"
+NAMEX_DATABASE_UNIX_SOCKET="op://database/$APP_ENV/namex-db-gcp/DATABASE_UNIX_SOCKET"
+NAMEX_DATABASE_PORT="op://database/$APP_ENV/namex-db-gcp/DATABASE_PORT"
+NAMEX_DATABASE_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_NAME"
+NAMEX_DATABASE_USERNAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_USERNAME"
+NAMEX_DATABASE_PASSWORD="op://database/$APP_ENV/namex-db-gcp/DATABASE_PASSWORD"
PAY_API_URL="op://API/$APP_ENV/pay-api/PAY_API_URL"
PAY_API_VERSION="op://API/$APP_ENV/pay-api/PAY_API_VERSION"
AUDIENCE="op://gcp-queue/$APP_ENV/base/AUDIENCE"
diff --git a/services/namex-pay/poetry.lock b/services/namex-pay/poetry.lock
index 91992e09e..dd7253e32 100644
--- a/services/namex-pay/poetry.lock
+++ b/services/namex-pay/poetry.lock
@@ -621,7 +621,7 @@ Flask = ">=0.9"
[[package]]
name = "flask-jwt-oidc"
-version = "0.6.0"
+version = "0.7.0"
description = "Opinionated flask oidc client"
optional = false
python-versions = "^3.9"
@@ -629,16 +629,16 @@ files = []
develop = false
[package.dependencies]
-cachelib = "^0.13.0"
-Flask = "^3"
+cachelib = "0.*"
+Flask = ">=2"
python-jose = "^3.3.0"
six = "^1.16.0"
[package.source]
type = "git"
-url = "https://github.com/bolyachevets/flask-jwt-oidc.git"
-reference = "bump-flask-version"
-resolved_reference = "f023e01b537e454dfa569bc45575f1f0680daf49"
+url = "https://github.com/seeker25/flask-jwt-oidc.git"
+reference = "HEAD"
+resolved_reference = "563f01ef6453eb0ea1cc0a2d71c6665350c853ff"
[[package]]
name = "flask-marshmallow"
@@ -1845,7 +1845,7 @@ ecdsa = "^0.18.0"
flask = "^3.0.2"
flask-caching = "^1.10.1"
flask-cors = "^4.0.0"
-flask-jwt-oidc = {git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version"}
+flask-jwt-oidc = {git = "https://github.com/seeker25/flask-jwt-oidc.git"}
flask-marshmallow = "^0.14.0"
flask-migrate = "^2.7.0"
flask-moment = "^0.11.0"
@@ -1908,7 +1908,7 @@ zipp = "^3.8.1"
type = "git"
url = "https://github.com/bcgov/namex.git"
reference = "HEAD"
-resolved_reference = "79316e352cd643f71f43e0be3c6e553a7531273e"
+resolved_reference = "58238aaf5facc5877a003b1fed4f72a087580f2c"
subdirectory = "api"
[[package]]
diff --git a/services/solr-names-updater/devops/gcp/clouddeploy.yaml b/services/solr-names-updater/devops/gcp/clouddeploy.yaml
index c06b3ac13..42710efbb 100644
--- a/services/solr-names-updater/devops/gcp/clouddeploy.yaml
+++ b/services/solr-names-updater/devops/gcp/clouddeploy.yaml
@@ -31,7 +31,7 @@ serialPipeline:
service-name: "namex-solr-names-updater-dev"
container-name: "namex-solr-names-updater-dev"
service-account: "sa-api@a083gt-dev.iam.gserviceaccount.com"
- cloudsql-instances: ""
+ cloudsql-instances: "a083gt-dev:northamerica-northeast1:namex-db-dev"
- targetId: a083gt-test
profiles: [test]
strategy:
diff --git a/services/solr-names-updater/devops/vaults.gcp.env b/services/solr-names-updater/devops/vaults.gcp.env
index e68b69572..16f23df7e 100644
--- a/services/solr-names-updater/devops/vaults.gcp.env
+++ b/services/solr-names-updater/devops/vaults.gcp.env
@@ -1,8 +1,8 @@
-DATABASE_USERNAME="op://namex/$APP_ENV/postgres-namex/DATABASE_USERNAME"
-DATABASE_PASSWORD="op://namex/$APP_ENV/postgres-namex/DATABASE_PASSWORD"
-DATABASE_PORT="op://namex/$APP_ENV/postgres-namex/DATABASE_PORT"
-DATABASE_NAME="op://namex/$APP_ENV/postgres-namex/DATABASE_NAME"
-DATABASE_HOST="op://namex/$APP_ENV/postgres-namex/DATABASE_HOST"
+DATABASE_USERNAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_USERNAME"
+DATABASE_PASSWORD="op://database/$APP_ENV/namex-db-gcp/DATABASE_PASSWORD"
+DATABASE_PORT="op://database/$APP_ENV/namex-db-gcp/DATABASE_PORT"
+DATABASE_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_NAME"
+DATABASE_UNIX_SOCKET="op://database/$APP_ENV/namex-db-gcp/DATABASE_UNIX_SOCKET"
SOLR_FEEDER_API_URL="op://namex/$APP_ENV/solr-updater/SOLR_FEEDER_API_URL"
JWT_OIDC_AUDIENCE="op://namex/$APP_ENV/jwt/JWT_OIDC_AUDIENCE"
JWT_OIDC_CLIENT_SECRET="op://namex/$APP_ENV/jwt/JWT_OIDC_CLIENT_SECRET"
diff --git a/services/solr-names-updater/poetry.lock b/services/solr-names-updater/poetry.lock
index 0b415a66a..d0494fcfe 100644
--- a/services/solr-names-updater/poetry.lock
+++ b/services/solr-names-updater/poetry.lock
@@ -602,7 +602,7 @@ Flask = ">=0.9"
[[package]]
name = "flask-jwt-oidc"
-version = "0.6.0"
+version = "0.7.0"
description = "Opinionated flask oidc client"
optional = false
python-versions = "^3.9"
@@ -610,16 +610,16 @@ files = []
develop = false
[package.dependencies]
-cachelib = "^0.13.0"
-Flask = "^3"
+cachelib = "0.*"
+Flask = ">=2"
python-jose = "^3.3.0"
six = "^1.16.0"
[package.source]
type = "git"
-url = "https://github.com/bolyachevets/flask-jwt-oidc.git"
-reference = "bump-flask-version"
-resolved_reference = "f023e01b537e454dfa569bc45575f1f0680daf49"
+url = "https://github.com/seeker25/flask-jwt-oidc.git"
+reference = "HEAD"
+resolved_reference = "563f01ef6453eb0ea1cc0a2d71c6665350c853ff"
[[package]]
name = "flask-marshmallow"
@@ -1782,7 +1782,7 @@ ecdsa = "^0.18.0"
flask = "^3.0.2"
flask-caching = "^1.10.1"
flask-cors = "^4.0.0"
-flask-jwt-oidc = {git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version"}
+flask-jwt-oidc = {git = "https://github.com/seeker25/flask-jwt-oidc.git"}
flask-marshmallow = "^0.14.0"
flask-migrate = "^2.7.0"
flask-moment = "^0.11.0"
@@ -1845,7 +1845,7 @@ zipp = "^3.8.1"
type = "git"
url = "https://github.com/bcgov/namex.git"
reference = "HEAD"
-resolved_reference = "79316e352cd643f71f43e0be3c6e553a7531273e"
+resolved_reference = "5e3d013d788ec027147fac6ee3be8df10cacc37d"
subdirectory = "api"
[[package]]
diff --git a/services/solr-names-updater/src/solr_names_updater/resources/worker.py b/services/solr-names-updater/src/solr_names_updater/resources/worker.py
index 224e5a8d6..12717cbc4 100644
--- a/services/solr-names-updater/src/solr_names_updater/resources/worker.py
+++ b/services/solr-names-updater/src/solr_names_updater/resources/worker.py
@@ -62,6 +62,10 @@ def worker():
structured_log(request, message=f'Begin process_nr_state_change for nr_event_msg: {ce}')
process_names_event_message(ce, current_app)
structured_log(request, message=f'Completed process_nr_state_change for nr_event_msg: {ce}')
+ elif is_processable_firm(ce):
+ structured_log(request, message=f'Begin process_nr_state_change for firm, nr_event_msg: {ce}')
+ process_names_event_message_firm(ce, current_app)
+ structured_log(request, message=f'Completed process_nr_state_change for firm, nr_event_msg: {ce}')
else:
# Skip processing of message as it isn't a message type this queue listener processes
structured_log(request, message=f'Skipping processing of nr event message as message type is not supported: {ce}')
@@ -103,6 +107,18 @@ def is_processable(msg: dict):
return False
+def is_processable_firm(msg: dict):
+ """Determine if message is processible and a firm."""
+ if msg and is_names_event_msg_type(msg) \
+ and (nr_num := msg.data
+ .get('request', {})
+ .get('nrNum', None)) \
+ and (nr := RequestDAO.find_by_nr(nr_num)) \
+ and nr.entity_type_cd in ('FR', 'GP'):
+ return True
+
+ return False
+
def process_names_event_message(msg: dict, flask_app: Flask):
"""Update solr accordingly based on incoming nr state changes."""
@@ -127,3 +143,25 @@ def process_names_event_message(msg: dict, flask_app: Flask):
else:
structured_log(f'skipping - no matching state change message: {msg}')
+
+def process_names_event_message_firm(msg: dict, flask_app: Flask):
+ """Update solr for the firm accordingly based on incoming nr state changes."""
+ if not flask_app or not msg:
+ raise Exception('Flask App or msg not available.')
+
+ structured_log( f'entering processing of nr event msg for firm: {msg}')
+
+ request_state_change = msg.data.get('request', None)
+
+ if request_state_change:
+ new_state = request_state_change.get('newState')
+ if new_state in ('APPROVED', 'CONDITIONAL'):
+ process_names_add(request_state_change)
+ elif new_state in ('CANCELLED', 'RESET'):
+ process_names_delete(request_state_change)
+ else:
+ structured_log(f'no names processing required for request state change message: {msg}')
+
+ else:
+ structured_log(f'skipping - no matching state change message: {msg}')
+
diff --git a/solr/README.md b/solr/README.md
new file mode 100644
index 000000000..5051a7264
--- /dev/null
+++ b/solr/README.md
@@ -0,0 +1,1950 @@
+# Namex SOLR Implementation
+
+## Search Core Config
+- luceneMatchVersion
+```
+ 6.6.3
+```
+Specifies the Lucene version compatibility
+Critical for ensuring consistent behavior across components
+
+- Data Directory [dataDir]
+```
+ ${solr.data.dir:}
+```
+**Required: No**
+
+Defines the location for index data storage
+Uses system property substitution with fallback to default
+
+Used to specify an alternate directory to hold all index data other than the default ./data under the Solr home. If replication is in use, this should match the replication configuration.
+
+Think of this like choosing where and how to store books in a library then dataDir is like choosing which building to store your books in.
+
+### Directory Factory [directoryFactory]
+```
+
+```
+
+Configures how Solr manages index storage
+Default: NRTCachingDirectoryFactory (is memory-based and optimized for Near Real Time searches)
+
+Alternatives include:
+ - StandardDirectoryFactory:
+This is Solr's default directory factory. Think of it as a smart manager that automatically chooses the best directory implementation based on your operating system and available options. It typically selects between MMapDirectory and NIOFSDirectory, making it a safe and reliable choice for most use cases. When you start Solr without specifying a directory factory, this is what you get.
+ - MMapDirectoryFactory:
+This factory uses memory-mapped files (mmap) to access the index. Imagine your index files being directly mapped into your application's memory space - when you read from that memory address, you're actually reading from the file. This approach can provide excellent performance, especially for large indices, because it leverages the operating system's virtual memory management.
+For example, when you have a 50GB index, instead of reading it chunk by chunk into memory, mmap makes it appear as if the entire index is in memory, even though physically it's still on disk. This is particularly effective on 64-bit systems with large amounts of RAM.
+ - NIOFSDirectoryFactory:
+This implementation uses Java's New I/O (NIO) package for file operations. Think of it as a modern, channel-based approach to file access. It's particularly good at handling many concurrent operations because it can use the operating system's native I/O operations. This makes it especially suitable for scenarios where you have many concurrent users searching your Solr index.
+NIOFSDirectoryFactory is often the best choice when you need reliable performance across different operating systems and don't want to deal with memory-mapping complexities.
+ - SimpleFSDirectoryFactory:
+This is the most basic implementation, using simple file system operations. Imagine it as a straightforward, no-frills approach to file access. While it might not offer the performance optimizations of other implementations, it's very reliable and can be useful in situations where you want predictable behavior without any complex memory management or operating system-specific optimizations.
+ - RAMDirectoryFactory (memory-based, non-persistent):
+This is a special case - it keeps the entire index in memory without any persistence to disk. Think of it as creating a temporary, super-fast index that disappears when Solr shuts down. It's like having a high-speed, temporary workspace.
+
+solr.StandardDirectoryFactory is filesystem based and tries to pick the best implementation for the current JVM and platform.
+
+solr.RAMDirectoryFactory is memory based, not persistent, and doesn't work with replication.
+
+### codecFactory
+```
+
+```
+**Required: No**
+
+codecFactory in Solr is essentially responsible for controlling how data is physically written to and read from the Lucene index
+
+It handles:
+ - Data Compression
+Determines how different field types are compressed
+Manages the tradeoff between storage size and retrieval speed
+Applies different compression strategies based on field characteristics
+ - Field Encoding
+Controls how various data types are encoded in the index
+Handles special cases like docValues encoding
+Manages posting list formats and term dictionary encoding
+ - Index Format
+Defines the physical structure of index files
+Controls how term vectors are stored
+Manages how stored fields are written to disk
+
+For implementation class, options include:
+ - class="solr.SchemaCodecFactory"
+ - class="solr.SimpleTextCodecFactory"
+ Creates human-readable index files
+ Useful for debugging and understanding index structure
+ Not recommended for production due to large size/poor performance
+ - class="solr.PreferredCodecFactory"
+ Allows explicit specification of codec name
+ Useful when you want to force a specific Lucene codec version
+ ```
+
+ Lucene95
+
+ ```
+ - Create own custom codec class
+
+For compressionMode, options include "BEST_SPEED" or "BEST_COMPRESSION"
+
+### indexConfig
+Control how Lucene/Solr handles index creation and management. Here are the key components from the configuration:
+ - Memory and Buffer Settings
+ - ramBufferSizeMB: Controls how much RAM Lucene can use for buffering documents before flushing to disk
+ - maxBufferedDocs: Sets a limit on number of documents that can be buffered before forcing a flush
+ - Either limit can trigger a flush when reached
+ - Lock Management
+ - lockType: Specifies the LockFactory implementation to use for index locking. Options include:
+ - native: Uses OS native file locking (default for Solr 3.6+)
+ - single: For read-only indexes or single-process scenarios
+ - simple: Uses a plain file for locking
+ - Merge Management:
+ - Merge Policy: Controls how index segments are merged. Default is TieredMergePolicy since Solr/Lucene 3.3.
+ ```
+
+ 10
+ 10
+ 0.1
+
+ ```
+ Parameters include:
+ - maxMergeAtOnce: Controls the maximum number of segments to merge at the same time
+ - segmentsPerTier: Defines how many segments are allowed in each tier
+ - noCFSRatio: Controls which segments should use the compound file format
+ - Merge Scheduler: Controls how merges are performed. Can be concurrent (background threads) or serial
+ - Deletion Policy: Configurable through deletionPolicy. Controls how old commit points are removed. Parameters include:
+ - maxCommitsToKeep: Number of commit points to retain
+ - maxOptimizedCommitsToKeep: Number of optimized commit points to keep
+ - maxCommitAge: Age-based deletion of commit points
+ - infoStream: Optional debugging feature that can write detailed indexing information to a specified file
+
+### updateHandler
+ - UpdateLog
+ ```
+
+ ${solr.ulog.dir:}
+ ${solr.ulog.numVersionBuckets:65536}
+
+ ```
+ Enables transaction logging for:
+ - Real-time get operations
+ - Durability guarantees
+ - SolrCloud replica recovery
+
+ **dir**: Specifies where transaction logs are stored
+ **numVersionBuckets**: Controls version tracking buckets (default: 65536). Higher values reduce synchronization overhead during high-volume indexing. Each bucket requires 8 bytes of heap space.
+ - AutoCommit
+ ```
+
+ ${solr.autoCommit.maxTime:15000}
+ false
+
+ ```
+ Performs `hard` commits automatically based on:
+ - `maxTime`: Time since last document addition (default: 15000ms)
+ - `openSearcher`: If false, flushes to storage without opening new searcher
+
+ Hard commits ensure data durability but are resource-intensive. Alternative: Use `commitWithin` when adding documents
+ - autoSoftCommit
+ ```
+
+ ${solr.autoSoftCommit.maxTime:-1}
+
+ ```
+ Performs `soft` commits and it :
+ - Makes changes visible to searches
+ - Doesn't ensure data is synced to disk
+ - is faster than hard commits
+ - is better for near-realtime search scenarios
+
+ -1 value disables soft auto-commits
+ - Event Listeners
+ - `postCommit`: Fires after every commit
+ - `postOptimize`: Fires after optimize commands
+
+### indexReaderFactory
+IndexReaderFactory allows:
+ - Allows customization of how Solr reads its index
+ - Provides flexibility to implement alternative IndexReader behaviors
+
+Default implementation uses disk-based index reading
+
+```
+
+ Some Value
+
+```
+
+Note:
+ - Experimental Feature Status
+ - May conflict with ReplicationHandler (SOLR-1366)
+ - ReplicationHandler assumes disk-resident index
+ - Custom implementations might break replication functionality
+
+### query
+ - maxBooleanClauses
+ ```
+ 2048
+ ```
+ Sets maximum number of clauses in Boolean queries
+Global Lucene property affecting all SolrCores
+Note: Last initialized SolrCore determines the value
+ - Cache
+ - class implementation
+ - LRUCache: Based on synchronized LinkedHashMap
+ - FastLRUCache: Based on ConcurrentHashMap
+ Better for high hit ratios (>75%)
+ Faster gets, slower puts in single-threaded operations
+ - size: Maximum entries in cache
+ - initialSize: Initial capacity
+ - autowarmCount: Items to prepopulate from old cache
+ - maxRamMB: Maximum RAM usage (optional)
+ - filterCache
+ ```
+
+ ```
+ Caches document sets matching queries
+ Used for filter operations
+ - queryResultCache
+ ```
+
+ ```
+ Stores ordered lists of document IDs
+ Based on query, sort, and document range
+ - documentCache
+ ```
+
+ ```
+ Caches Lucene Document objects
+ Stores field values
+
+- enableLazyFieldLoading ` true`
+ Loads unrequested stored fields only when needed
+ Improves performance for large text fields
+
+- queryResult
+ ```
+ 20
+ 200
+ ```
+ Controls caching behavior for query results optimizes for pagination scenarios
+
+ queryResultWindowSize: When a search is requested, a superset of the requested number of document ids are collected. For example, if a search for a particular query requests matching documents 10 through 19, and queryWindowSize is 50, then documents 0 through 49 will be collected and cached. Any further requests in that range can be satisfied via the cache.
+
+ queryResultMaxDocsCached: Maximum number of documents to cache for any entry in the queryResultCache
+
+- Event Listeners
+ ```
+
+
+ ```
+ QuerySenderListener takes an array of NamedList and executes a local query request for each NamedList in sequence
+
+ newSearcher: Fired when new searcher is prepared
+firstSearcher: Fired for initial searcher
+
+- useColdSearcher
+ ```
+ false
+ ```
+ Controls behavior when no warmed searcher is available
+ - false: Requests block until searcher is warmed
+ - true: Uses unwarmed searcher immediately
+
+### requestDispatcher
+ - requestParsers
+ ```
+
+ ```
+ - enableRemoteStreaming: Allows use of stream.file and stream.url parameters. Note: Requires authentication when enabled
+Enables fetching remote files
+ - multipartUploadLimitInKB: Maximum size for multipart file uploads
+ - formdataUploadLimitInKB: Maximum size for POST form data. Handles application/x-www-form-urlencoded data.
+ - addHttpRequestToContext: Controls HttpServletRequest inclusion. Adds request object to SolrQueryRequest context.
+ - httpCaching
+ ```
+
+ ```
+ never304="true": Disables HTTP 304 (Not Modified) responses. Prevents Solr from sending caching-related headers
+
+ never304="false" for automatic cache header generation
+ - cacheControl
+ ```
+
+ max-age=30, public
+
+ ```
+ Sets Cache-Control header
+ Can specify max-age and visibility
+ Works even with never304="true"
+ - Dynamic Caching Headers
+ ```
+
+ max-age=30, public
+
+ ```
+ - lastModifiedFrom:
+ - "openTime": Based on current Searcher opening time
+ - "dirLastMod": Based on physical index modification time
+ - etagSeed: Forces different ETag values
+ - Cache Validation Options:
+ - Generates Last-Modified headers
+ - Handles If-Modified-Since requests
+ - Manages ETag validation
+ - Responds to If-None-Match requests
+
+### Request Handlers
+ - Default Search Handler [/seach]
+ ```
+
+
+ explicit
+ 10
+ json
+ true
+
+
+ ```
+ - echoParams: Shows which parameters were used in the response
+ - rows: Returns 10 results by default
+ - wt: Returns results in JSON format
+ - indent: Pretty prints the JSON response
+
+ The following sections are commented out in code.
+ ```
+
+ ```
+ This section would append additional parameters to every query. These parameters cannot be overridden by clients.
+ ```
+
+ ```
+ This section would define fixed parameters that cannot be changed by clients.
+ ```
+
+ ```
+ This section allows customization of the search components chain, either replacing or extending the default components.
+ - Data Import Handler [/dataimport]
+ ```
+
+
+ solr-data-config.xml
+
+
+ ```
+ Creates an endpoint at /dataimport that handles data import operations. `solr.DataImportHandler` is Solr's built-in handler for managing data imports.
+
+ Data import configuration is stored in a file named solr-data-config.xml. This external configuration file defines:
+ - Data sources (databases, files, etc.)
+ - Entity relationships
+ - Field mappings
+ - Import queries or specifications
+
+ Operations available through this handler:
+ - full-import: Complete data import
+ - delta-import: Incremental update
+ - status: Check import status
+ - reload-config: Reload configuration
+ - abort: Stop running import
+ - Query handler [/query]
+ ```
+
+
+ explicit
+ json
+ true
+
+
+ ```
+ Creates an endpoint at /query
+ Uses Solr's standard SearchHandler class for processing queries
+
+ - echoParams: Set to "explicit" to show which parameters were used in the response
+ - wt (writer type): Set to "json" to return results in JSON format
+ - indent: Set to "true" to format the JSON response with proper indentation for readability
+
+ Difference from `/select` is that no default rows parameter (will use Solr's system default)
+
+### Browse Handler [/browse]
+ ```
+
+
+ explicit
+
+
+ ```
+ Creates an endpoint at /browse
+ Uses standard solr.SearchHandler class
+ `useParams` references parameter sets defined in `initParams` in the configuration:
+- query: Might contain common query parameters
+- facets: Would define faceting behavior
+- velocity: Parameters for Velocity template rendering
+- browse: Browse-specific parameters
+
+ #### initParams
+ ```
+
+
+ _text_
+
+
+ ```
+ - Path specification
+ ``
+ - /update/**: All update handlers (the ** wildcard means all sub-paths)
+ - /query: Query handler
+ - /select: Select handler
+ - /tvrh: Term Vector Request Handler
+ - /elevate: Query elevation handler
+ - /spell: Spell checking handler
+ - /browse: Browse handler
+ - Default Param
+ ```
+
+ _text_
+
+ ```
+ df (default field) is set to `_text_`
+ This means when no specific field is specified in a query, Solr will search in the _text_ field
+
+ It means:
+ - Provides a consistent default search field across multiple handlers
+ - Makes maintenance easier since you only need to change the default field in one place
+ - The `_text_` field typically contains content from multiple fields
+
+### Update handler [/update]
+```
+
+
+ true
+ ignored_
+ _text_
+
+
+```
+Endpoint: /update/extract
+startup="lazy": Handler loads only when first requested (saves resources)
+Uses Solr's ExtractingRequestHandler class for Tika-based content extraction
+
+This handler is particularly useful for:
+ - Indexing documents
+ - Search applications
+ - Knowledge bases
+
+Attributes include:
+ - lowernames: Converts all field names to lowercase
+ - fmap.meta: Maps metadata fields to prefix `ignored_` (ignores them)
+ - fmap.content: Maps extracted content to the `text` field
+
+### Spell Check Component
+```
+
+ text_general
+
+ default
+ _text_
+ solr.DirectSolrSpellChecker
+ internal
+ 0.5
+ 2
+ 1
+ 5
+ 4
+ 0.01
+
+
+```
+
+Attributes inclues:
+ - query: Basic query processing
+ - facet: Faceted search functionality
+ - mlt: More Like This feature
+ - highlight: Result highlighting
+ - stats: Statistical functions
+ - debug: Debugging information
+ - accuracy: 0.5 minimum accuracy for suggestions
+ - maxEdits: Maximum 2 character edits allowed
+ - minPrefix: Requires 1 character prefix match
+ - maxInspections: Checks up to 5 terms per result
+ - minQueryLength: Terms must be at least 4 characters
+ - maxQueryFrequency: Term must appear in less than 1% of docs
+ - classname: solr.DirectSolrSpellChecker or solr.WordBreakSolrSpellChecker
+
+SpellCheckComponent can be hooked into the request handler that handles normal user queries so that a separate request is not needed to get suggestions.
+
+For example:
+```
+
+
+ default
+ on
+ true
+ 10
+ 5
+ 5
+ true
+ true
+ 10
+ 5
+
+
+ spellcheck
+
+
+```
+
+### Term Vector Component
+```
+
+
+
+ true
+
+
+ tvComponent
+
+
+```
+
+``
+Defines a component named "tvComponent"
+Uses Solr's TermVectorComponent class
+Provides access to term vector information from the index
+
+``
+Creates a /tvrh endpoint
+Uses standard SearchHandler
+Lazy loading enabled (loads only when first used)
+
+Attribute `tv` enables term vector retrieval by default.
+
+This can be used for:
+- Analyzing term frequencies
+- Examining document vectors
+- Text analysis
+- Feature extraction
+
+### Terms Component
+```
+
+
+
+
+ true
+ false
+
+
+ terms
+
+
+```
+
+``
+Defines a component named "terms"
+Uses Solr's TermsComponent class
+Used for accessing term information and statistics
+
+``
+Creates a /terms endpoint
+Uses standard SearchHandler
+Lazy loading enabled (loads only when requested)
+
+`terms`: Terms component is enabled by default
+`distrib`: Distributed search is disabled by default
+
+### Query Elevation Component
+```
+
+ string
+ elevate.xml
+
+
+
+
+ explicit
+
+
+ elevator
+
+
+```
+
+```
+
+ string
+ elevate.xml
+
+```
+Uses QueryElevationComponent class
+Queries analyzed as 'string' type
+Configuration stored in 'elevate.xml'
+
+``
+Creates an /elevate endpoint
+Uses standard SearchHandler
+Lazy loading enabled
+
+`echoParams`: Shows which parameters were used in the response
+
+This enables you to configure the top results for a given query regardless of the normal lucene scoring.
+
+### Highlighting Component
+
+```
+
+
+
+
+ 100
+
+
+
+
+
+ 70
+ 0.5
+ [-\w ,/\n\"']{20,200}
+
+
+
+
+
+ ]]>
+ ]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ,,
+ ,,
+ ,,
+ ,,
+ ,]]>
+ ]]>
+
+
+
+
+
+ 10
+ .,!?
+
+
+
+
+
+ WORD
+ en
+ US
+
+
+
+
+```
+
+**Default fragmenter**
+```
+
+
+ 100
+
+
+```
+Creates fixed-size fragments (100 characters)
+For basic highlighting needs
+
+**Regex Fragmenter**
+```
+
+
+ 70
+ 0.5
+ [-\w ,/\n\"']{20,200}
+
+
+```
+Uses regular expressions for intelligent fragmentation
+Fragment size: 70 characters
+Allows 50% size variation (slop)
+Pattern matches words, punctuation, and whitespace
+For sentence-based highlighting
+
+**HTML Formatter**
+```
+
+
+ ]]>
+ ]]>
+
+
+```
+Default HTML formatting
+Wraps highlighted terms in tags
+Customizable pre/post tags
+
+**fragListBuilder**
+```
+
+
+
+```
+Simple: Basic fragment collection
+Single: Creates one fragment per field
+Weighted (Default): Uses scoring to select best fragments
+
+**Default Fragment Builder**
+``
+ Orders fragments by score
+ Optional separator character for multi-valued fields
+
+**Colored Fragment Builder**
+```
+
+
+ ,,
+ ,,
+ ,,
+ ,,
+ ,]]>
+ ]]>
+
+
+```
+Provides multiple background colors
+Useful for distinguishing different matches
+Includes 10 predefined colors
+
+**Simple Boundary Scanner**
+```
+
+
+ 10
+ .,!?
+
+
+```
+Scans up to 10 characters for boundaries
+Uses punctuation and whitespace as boundaries
+
+**Break Iterator Scanner**
+```
+
+
+ WORD
+ en
+ US
+
+
+```
+Language-aware boundary detection
+Supports WORD, CHARACTER, LINE, and SENTENCE boundaries
+Locale-specific processing
+Multi-language content
+
+### Suggest Component
+```
+
+
+ name
+ name_suggest
+ BlendedInfixLookupFactory
+ position_linear
+ weight
+ text_stemmed
+ false
+ false
+ false
+
+
+
+
+
+ json
+ true
+ true
+ name
+ 10
+
+
+ suggest
+
+
+```
+This component is used for:
+- Autocomplete functionality
+- Search suggestions
+- Type-ahead features
+- Weighted suggestions
+- Partial word matching
+
+Component Attributes include:
+ - `field`: Uses the field "name_suggest" for suggestions
+ - `lookupImpl`: Uses BlendedInfixLookupFactory for partial matching
+ - `blenderType`: Linear position-based blending
+ - `weightFieldValue`: Weights suggestions using "weight" field
+ - `suggestAnalyzerFieldType`: Uses text_stemmed analyzer
+ - Highlighting disabled
+ - Dictionary not rebuilt automatically
+
+Handler Attributes include:
+ - `wt`: Returns JSON format
+ - `indent`: Indented output
+ - `suggest`: Suggestion enabled
+ - `suggest.dictionary`: name dictionary
+ - `suggest.count`: Returns top 10 suggestions
+
+### Update Request Processer Chain
+```
+
+
+
+
+ [^\w-\.]
+ _
+
+
+
+
+
+
+ yyyy-MM-dd'T'HH:mm:ss.SSSZ
+ yyyy-MM-dd'T'HH:mm:ss,SSSZ
+ yyyy-MM-dd'T'HH:mm:ss.SSS
+ yyyy-MM-dd'T'HH:mm:ss,SSS
+ yyyy-MM-dd'T'HH:mm:ssZ
+ yyyy-MM-dd'T'HH:mm:ss
+ yyyy-MM-dd'T'HH:mmZ
+ yyyy-MM-dd'T'HH:mm
+ yyyy-MM-dd HH:mm:ss.SSSZ
+ yyyy-MM-dd HH:mm:ss,SSSZ
+ yyyy-MM-dd HH:mm:ss.SSS
+ yyyy-MM-dd HH:mm:ss,SSS
+ yyyy-MM-dd HH:mm:ssZ
+ yyyy-MM-dd HH:mm:ss
+ yyyy-MM-dd HH:mmZ
+ yyyy-MM-dd HH:mm
+ yyyy-MM-dd
+
+
+
+ strings
+
+ java.lang.Boolean
+ booleans
+
+
+ java.util.Date
+ tdates
+
+
+ java.lang.Long
+ java.lang.Integer
+ tlongs
+
+
+ java.lang.Number
+ tdoubles
+
+
+
+
+
+
+
+```
+
+**UUID Processor**
+``
+Generates unique IDs for documents if none provided
+
+**Remove Blank Field Processor**
+``
+Removes null values, empty strings, black spaces. Reduces index size.
+
+**Field Name Mutating Processor**
+```
+
+ [^\w-\.]
+ _
+
+```
+Replaces invalid characters with underscores
+Ensures field name compatibility
+Maintains naming consistency
+Prevents indexing errors
+
+Configuration:
+ - pattern: Matches non-word characters except hyphens and dots
+ - replacement: Uses underscore as replacement character
+
+**Parse Boolean Field Processor**
+``
+Identifies boolean-like values
+Converts to proper boolean type
+Handles various boolean representations
+Ensures type consistency
+
+**Parse Long Field Processor**
+``
+Identifies numeric values
+Converts to long type
+Handles integer overflow
+Maintains precision
+
+**Parse Double Field Processor**
+``
+Identifies decimal values
+Converts to double type
+Handles scientific notation
+Maintains precision
+
+**Parse Date Field Processor**
+```
+
+
+ yyyy-MM-dd'T'HH:mm:ss.SSSZ
+ yyyy-MM-dd'T'HH:mm:ss,SSSZ
+ yyyy-MM-dd'T'HH:mm:ss.SSS
+ yyyy-MM-dd'T'HH:mm:ss,SSS
+ yyyy-MM-dd'T'HH:mm:ssZ
+ yyyy-MM-dd'T'HH:mm:ss
+ yyyy-MM-dd'T'HH:mmZ
+ yyyy-MM-dd'T'HH:mm
+ yyyy-MM-dd HH:mm:ss.SSSZ
+ yyyy-MM-dd HH:mm:ss,SSSZ
+ yyyy-MM-dd HH:mm:ss.SSS
+ yyyy-MM-dd HH:mm:ss,SSS
+ yyyy-MM-dd HH:mm:ssZ
+ yyyy-MM-dd HH:mm:ss
+ yyyy-MM-dd HH:mmZ
+ yyyy-MM-dd HH:mm
+ yyyy-MM-dd
+
+
+```
+Supports multiple date formats
+Handles timezone conversions
+Processes millisecond precision
+Standardizes date representation
+
+**Add Schema Fields Processor**
+```
+
+ strings
+
+
+
+
+```
+Adds new fields automatically
+Maps Java types to Solr types
+Handles type inference
+Maintains schema consistency
+
+Type Mappings:
+ - Boolean Mapping
+ ```
+
+ java.lang.Boolean
+ booleans
+
+ ```
+ - Date Mapping
+ ```
+
+ java.util.Date
+ tdates
+
+ ```
+ - Integer Mapping
+ ```
+
+ java.lang.Long
+ java.lang.Integer
+ tlongs
+
+ ```
+ - Number Mapping
+ ```
+
+ java.lang.Number
+ tdoubles
+
+ ```
+
+**Log Update Processor**
+``
+Records update operations
+Provides debugging information
+Tracks document changes
+Assists in troubleshooting
+
+**Distributed Update Processor**
+``
+Manages shard distribution
+Handles replication
+Ensures consistency
+Manages cluster updates
+
+**Run Update Processor**
+``
+Commits changes
+Finalizes updates
+Ensures persistence
+Triggers index updates
+
+### Query Response Writer
+```
+
+
+
+ ${velocity.template.base.dir:}
+ ${velocity.solr.resource.loader.enabled:true}
+ ${velocity.params.resource.loader.enabled:false}
+
+
+
+ 5
+
+```
+
+**JSON Response Writer**
+Default JSON format writer
+No additional configuration needed
+Returns results in JSON format
+
+**Velocity Response Writer**
+Template-based response formatting
+Lazy loading
+Configurable template directory
+Resource loader settings
+Parameter-based template loading control
+
+**XSLT Response Writer**
+Transforms XML output using XSLT
+Uses templates from conf/xslt directory
+5-second cache lifetime for XSLT files
+Allows custom XML transformations
+
+
+# Search Core `solr-data-config.xml`
+
+### LEAR Data Source
+```
+
+```
+Uses a custom JDBC data source implementation
+References a JNDI datasource named `jdbc/lear`
+Primary source for certain business types
+
+### COLIN Data Source
+```
+
+```
+References a JNDI datasource named `jdbc/colin`
+
+### LEAR Entity
+```
+SELECT
+ identifier,
+ legal_name as name,
+ legal_type as legalType,
+ state as status,
+ tax_id as bn
+FROM businesses
+WHERE legal_type in ('BEN', 'CP', 'SP', 'GP')
+```
+Field mappings
+Handles Benefit Company, Cooperative, Sole Proprietorship, General Partnership
+
+### COLIN Entity
+```
+SELECT
+ c.corp_num as identifier,
+ c.corp_typ_cd as legalType,
+ c.bn_15 as bn,
+ CASE cs.state_typ_cd
+ when 'ACT' then 'ACTIVE'
+ when 'HIS' then 'HISTORICAL'
+ when 'HLD' then 'LIQUIDATION'
+ else cs.state_typ_cd
+ END as status,
+ cn.corp_nme as name
+FROM corporation c
+ join corp_state cs on cs.corp_num = c.corp_num
+ join corp_name cn on cn.corp_num = c.corp_num
+WHERE c.corp_typ_cd not in ('BEN','CP','GP','SP')
+ and cs.end_event_id is null
+ and cn.end_event_id is null
+ and cn.corp_name_typ_cd in ('CO', 'NB')
+```
+Joins corporation, corp_state, corp_name tables
+Status mappings
+Field mappings
+Filters - excludes business types handled by LEAR, includes only active records and legal type
+
+# Search Core Schema Config
+
+### Fields Definition
+#### Base Fields
+```
+
+
+
+
+
+```
+identifier: Unique key field (text_basic)
+name: Business name (text_basic)
+legalType: Legal entity type (string)
+status: Business status (string)
+bn: Business number (text_basic)
+
+#### Suggestion and Selection Fields
+```
+
+
+
+
+
+
+
+
+
+```
+name_suggest: Used for providing name suggestions in search (phrase_basic, Stored: Yes, Indexed: Yes)
+name_select: Used for advanced selection queries with stemming (text_stemmed, Stored: Yes, Indexed: Yes)
+name_single_term: Supports single-term queries using n-gram analysis (text_ngram, Stored: Yes, Indexed: Yes)
+name_stem_agro: Applies aggressive stemming to names for broader matching (text_stemmed_agro, Stored: Yes, Indexed: Yes)
+name_synonym: Supports synonym expansion during queries (text_synonym, Stored: Yes, Indexed: Yes)
+identifier_select: Enables identifier selection with n-gram analysis (phrase_ngram, Stored: Yes, Indexed: Yes)
+bn_select: Supports selection queries on the bn field (phrase_ngram, Stored: Yes, Indexed: Yes)
+weight: A floating-point field used for boosting or ranking (float, Stored: Yes, Indexed: Yes)
+
+#### Unique Key Field
+`identifier`
+Specifies the identifier field as the unique key for each document in the index. This ensures that each document is uniquely identifiable.
+
+#### Field Types
+##### Custom
+**Text**
+```
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+**text_basic**
+Basic text field that tokenizes text into lowercase tokens without further analysis
+Class: solr.TextField
+Analyzer: Uses a lowercase tokenizer.
+
+**text_stemmed**
+Text field with minimal stemming, suitable for English text
+Class: solr.TextField
+Analyzer: Applies whitespace tokenization, standard filtering, lowercasing, word delimiter graph filtering, and minimal English stemming.
+
+**text_stemmed_agro**
+Provides more aggressive stemming for broader matching
+Class: solr.TextField
+Analyzer: Similar to text_stemmed but uses the Porter stemming algorithm for aggressive stemming.
+
+**text_ngram**
+Supports partial and wildcard matching through n-grams
+Class: solr.TextField
+Analyzer:
+ - Index-Time: Applies n-gram filtering with minGramSize 1 and maxGramSize 15.
+ - Query-Time: Does not apply n-gram filtering.
+
+**text_synonym**
+Handles synonyms during query time to improve search results
+Class: solr.TextField
+Analyzer:
+ - Index-Time: Applies stemming and removes duplicates.
+ - Query-Time: Adds synonym expansion from a JDBC data source.
+
+**phrase_basic**
+Suitable for exact phrase matching
+Class: solr.TextField
+Analyzer: Uses keyword tokenizer and lowercases the entire input as a single token.
+
+**phrase_ngram**
+Enhances phrase matching with n-gram analysis for partial matches
+Class: solr.TextField
+Analyzer:
+ - Index-Time: Applies n-gram filtering with minGramSize 3 and maxGramSize 15.
+ - Query-Time: Does not apply n-gram filtering.
+
+**Numeric**
+```
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+Point-Based Numeric Fields (Efficient for numeric searches)
+ - pint: Integer
+ - pfloat: Float
+ - plong: Long
+ - pdouble: Double
+ - pints, pfloats, plongs, pdoubles: Multi-valued versions
+
+Trie-Based Numeric Fields (Traditional numeric fields)
+ - int, float, long, double
+ - ints, floats, longs, doubles: Multi-valued versions
+
+Attributes:
+ - docValues: Enabled for efficient sorting and faceting.
+ - recisionStep: Controls indexing precision; 0 disables multi-precision indexing.
+
+**Date**
+```
+
+
+
+
+
+
+```
+Point-Based Date Fields
+Efficient date fields for range queries
+ - pdate
+ - pdates: Multi-valued version
+
+Trie-Based Date Fields
+Traditional date fields with precision step for range queries
+ - date
+ - dates: Multi-valued version
+
+#### Copy Fields
+```
+
+
+
+
+
+
+
+```
+Copy fields are used to copy the contents of one field to another at index time. This is often used to index a field in multiple ways or to create composite fields. It enhance search capabilities by indexing data differently without altering the original data.
+
+#### Analyzers and Filters
+```
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+
+**Index-Time Analyzer**: Processes data when documents are added to the index.
+**Query-Time Analyzer**: Processes search queries.
+
+**solr.LowerCaseTokenizerFactory**: Converts text to lowercase and tokenizes it.
+**solr.WhitespaceTokenizerFactory**: Splits text based on whitespace.
+**solr.KeywordTokenizerFactory**: Treats the entire input as a single token.
+
+#### Filters
+Filters modify tokens produced by tokenizers.
+ - solr.StandardFilterFactory
+ Normalizes tokens (removes punctuation, etc).
+ - solr.LowerCaseFilterFactory
+ Converts tokens to lowercase.
+ - solr.WordDelimiterGraphFilterFactory
+ ``
+ Splits words into subwords and performs various transformations.
+ Attributes:
+ - catenateAll="1": Concatenates all subwords.
+ - splitOnNumerics="0": Does not split tokens on numerics.
+ - generateWordParts="0" and generateNumberParts="0": Prevents generation of word and number parts.
+ - solr.FlattenGraphFilterFactory
+ Simplifies the token graph, useful when dealing with synonyms or other filters that produce multiple tokens.
+ - Stemming Filters
+ - solr.EnglishMinimalStemFilterFactory: Applies minimal stemming for English.
+ - solr.PorterStemFilterFactory: Applies Porter stemming algorithm for aggressive stemming.
+ - solr.SnowballPorterFilterFactory: Uses Snowball stemming for the specified language.
+ - solr.NGramFilterFactory
+ - Generates n-grams (substrings) from tokens.
+ Attributes:
+ - minGramSize and maxGramSize: Define the size range of n-grams.
+ - Synonym Filters
+ ```
+
+ ```
+ com.s24.search.solr.analysis.jdbc.JdbcSynonymFilterFactory
+ Fetches synonyms from a JDBC data source.
+ Attributes:
+ - dataSource: JDBC data source name.
+ - sql: SQL query to retrieve synonyms.
+ - ignoreMissingDatabase: Ignores errors if the database is missing.
+ - ignoreCase: Case-insensitive matching.
+ - expand: Whether to expand synonyms.
+ - solr.RemoveDuplicatesTokenFilterFactory
+ Removes duplicate tokens from the token stream.
+
+## Trademark Core
+```
+
+ text/plain; charset=UTF-8
+
+```
+Includes additional content-type settings - JSON responses are written as plain text
+
+**`solr-data-config.xml`**
+```
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+Uses FileDataSource to read XML files.
+
+Has a two-level entity structure:
+ - processor entity: Uses FileListEntityProcessor to find XML files
+ - trademark entity: Uses XPathEntityProcessor to extract trademark data
+
+Reads from XML files in /opt/solr/trademarks_data/latest
+
+Simpler field structure:
+ - application_number
+ - category
+ - status
+ - name
+ - description
+
+**`managed-schema`**
+It includes extensive multi-language support.
+
+```
+
+
+
+
+```
+
+```
+
+
+
+```
+
+ - text_name_copy: Simple whitespace tokenization with lowercase
+ - text_name_singular_synonyms: Complex analysis chain for trademark names with stemming and synonyms
+ - text_name_compressed: Special field type that removes company designations and normalizes text
+
+Complex name compression processing
+```
+
+
+
+
+
+
+ ...
+
+
+```
+
+Name Field Configuration
+``
+
+termVectors="true" for more advanced text analysis
+
+Unique Copy Field Directives
+```
+
+
+
+```
+
+## possible.conflicts Core
+
+**Query Parser Configuration**
+```
+
+
+
+```
+Provides framework for custom query parsing implementations
+Allows registration of multiple parser plugins
+Can be used with defType parameter or LocalParams
+Enables customization of query syntax interpretation
+
+**Function Parser Configuration**
+```
+
+
+
+```
+Enables custom function query implementations
+Allows registration of multiple ValueSourceParsers
+Can be used with the "func" QParser
+Supports custom scoring and ranking functions
+
+**Document Transformers Configuration**
+```
+
+
+
+
+
+
+
+
+
+```
+Enables document transformation during query response
+Supports integration with external data sources
+Allows dynamic value augmentation
+Provides boosting and editorial marking capabilities
+
+**Select RequestHandler Configuration**
+```
+
+
+ explicit
+ 10
+ ...
+
+
+```
+
+**`solr-data-config.xml`**
+```
+
+
+
+```
+`bc_registries` for corporate conflicts
+`bc_registries_names` for name conflicts
+Clear separation of concerns
+ - Name: corp-conflicts-ds (identifies the corporate conflicts data source)
+ Type: Uses com.s24.search.solr.analysis.jdbc.DataImportJdbcDataSource
+ DataSource: Points to jdbc/bcrs_corps JNDI resource
+ Purpose: Handles corporate registry conflicts
+ - Name: name-conflicts-ds (identifies the name conflicts data source)
+ Type: Uses same DataImport JDBC data source type
+ DataSource: Points to jdbc/bcrs_names JNDI resource
+ Purpose: Handles business name conflicts
+
+```
+
+
+
+```
+Uses solr_dataimport_conflicts_vw views
+Encapsulates conflict detection logic
+Consistent naming across schemas
+ - Name: corp-conflicts
+ DataSource: References corp-conflicts-ds
+ Primary Key: Uses id field
+ Query: Selects from bc_registries.solr_dataimport_conflicts_vw view
+ Schema: Uses bc_registries schema
+ - Name: name-conflicts
+ DataSource: References name-conflicts-ds
+ Primary Key: Uses id field
+ Query: Selects from bc_registries_names.solr_dataimport_conflicts_vw view
+ Schema: Uses bc_registries_names schema
+
+**`managed-schmea`**
+
+Unique Required Fields
+```
+
+
+
+```
+
+Special Purpose Fields
+```
+
+
+
+```
+
+Name Analysis Fields
+```
+
+
+
+
+```
+
+ - `text_name_copy`
+ Uses WhitespaceTokenizer
+ Simple lowercase processing
+ No synonym expansion
+
+ - `text_name_singular_synonyms`
+ Includes snowball stemming
+ Synonym handling at query time
+ Duplicate removal
+
+ - `text_name_compressed`
+ Removes business designations
+ Strips whitespace and non-alpha characters
+ KeywordTokenizer for whole phrase matching
+
+ - `name_no_synonyms`
+ Complex character filtering
+ British Columbia abbreviation handling
+ No synonym expansion
+
+Phonetic Analysis Fields
+```
+
+
+```
+
+ - `cobrs_phonetic`
+ Custom phonetic algorithm
+ Number word conversion
+ Special character handling
+ British Columbia abbreviation normalization
+
+ - `dblmetaphone_name`
+ Double Metaphone algorithm
+ Special character handling
+ British Columbia abbreviation normalization
+
+Special Processing Fields
+```
+
+
+
+```
+ - `text_name_exact_match`
+ Removes business designations
+ Removes special characters
+ Compressed format for exact matching
+
+
+ - `txt_starts_with`
+ Complex stopword removal
+ Special character handling
+ British Columbia abbreviation normalization
+
+
+ - `contains_exact_phrase`
+ Minimal processing
+ Preserves word boundaries
+ Lowercase conversion
+
+Currency Symbol Handling
+```
+
+
+```
+
+British Columbia Abbreviations
+```
+
+```
+
+Token filter
+```
+Complex Work Handling
+
+
+```
+
+```
+Special Processing
+
+
+
+```
+
+## Names Core
+
+**`solr-data-config.xml`**
+```
+
+
+
+
+
+
+```
+
+Uses a custom JDBC data source implementation com.s24.search.solr.analysis.jdbc.DataImportJdbcDataSource
+References a JDBC data source named "jdbc/bcrs_names"
+This appears to be connecting to a BC Registries names database
+```
+
+```
+
+Defines a single entity named "name_instance"
+Uses "id" as the primary key field
+Imports data from a view named solr_dataimport_names_vw in the bc_registries_names schema
+The view likely contains pre-formatted data ready for Solr indexing
+```
+
+
+
+```
+
+**`managed-schema`**
+Core Business Name Fields
+```
+
+
+
+
+
+
+
+
+
+
+```
+
+Specialized Name Analysis Fields
+```
+
+
+
+
+
+```
+
+Name-Specific Field Types
+```
+
+
+
+
+
+
+
+
+
+
+```
+
+```
+
+
+
+
+
+
+
+
+
+
+
+```
+
+```
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+
+```
+
+
+
+
+
+
+
+
+
+```
+
+Copy field
+```
+
+
+
+
+```
\ No newline at end of file
|