diff --git a/.github/workflows/text-followup-handler.yml b/.github/workflows/text-followup-handler.yml new file mode 100644 index 000000000..802df79bf --- /dev/null +++ b/.github/workflows/text-followup-handler.yml @@ -0,0 +1,82 @@ +name: Text Followup Handler +on: + push: + branches: [ main ] + tags: [ "handler-text-followup-[0-9]+.[0-9]+.[0-9]+" ] + paths: [ "handlers/text-followup-handler/**" ] + pull_request: + branches: [ main ] + paths: [ "handlers/text-followup-handler/**" ] + workflow_run: + workflows: [ "Schemas (Trigger)" ] + types: + - completed + workflow_dispatch: +env: + REGISTRY: ghcr.io + IMAGE_NAME: shared-reality-lab/image-handler-text-followup +jobs: + lint: + name: PEP 8 style check. + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install flake8 + run: pip install flake8 + - name: Check with flake8 + run: python -m flake8 ./handlers/text-followup-handler --show-source + build-and-push-image: + name: Build and Push to Registry + needs: lint + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + submodules: true + - name: Log into GHCR + uses: docker/login-action@v2 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Get Correct Tags + run: | + if [[ ${{ github.ref }} =~ ^refs/tags/handler-text-followup-[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "TAGGED=true" >> $GITHUB_ENV + else + echo "TAGGED=false" >> $GITHUB_ENV + fi + - name: Get timestamp + run: echo "timestamp=$(date -u +'%Y-%m-%dT%H.%M')" >> $GITHUB_ENV + - name: Extract metadata + id: meta + uses: docker/metadata-action@v4 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + flavor: | + latest=${{ env.TAGGED }} + tags: | + type=match,enable=${{ env.TAGGED }},priority=300,pattern=handler-text-followup-(\d+.\d+.\d+),group=1 + type=raw,priority=200,value=unstable + type=raw,priority=100,value=${{ env.timestamp }} + labels: | + org.opencontainers.image.title=IMAGE Handler Text Followup + org.opencontainers.image.description=Handler to create responses for followup queries. + org.opencontainers.image.authors=IMAGE Project + org.opencontainers.image.licenses=AGPL-3.0-or-later + maintainer=IMAGE Project + - name: Build and push + uses: docker/build-push-action@v3 + with: + context: . + file: ./handlers/text-followup-handler/Dockerfile + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/text-followup.yml b/.github/workflows/text-followup.yml new file mode 100644 index 000000000..42d7899e3 --- /dev/null +++ b/.github/workflows/text-followup.yml @@ -0,0 +1,83 @@ +name: Graphic Caption +on: + push: + branches: [ main, text-followup ] + tags: [ "preprocessor-text-followup-[0-9]+.[0-9]+.[0-9]+" ] + paths: [ "preprocessors/text-followup/**" ] + pull_request: + branches: [ main ] + paths: [ "preprocessors/text-followup/**" ] + workflow_run: + workflows: [ "Schemas (Trigger)" ] + types: + - completed + workflow_dispatch: +env: + REGISTRY: ghcr.io + IMAGE_NAME: shared-reality-lab/image-preprocessor-text-followup +jobs: + lint: + name: PEP 8 style check. + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install flake8 + run: pip install flake8 + - name: Check with flake8 + run: python -m flake8 ./preprocessors/text-followup --show-source + build-and-push-image: + name: Build and Push to Registry + needs: lint + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + submodules: true + - name: Log into GHCR + uses: docker/login-action@v2 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Get Correct Tags + run: | + if [[ ${{ github.ref }} =~ ^refs/tags/preprocessor-text-followup-[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "TAGGED=true" >> $GITHUB_ENV + else + echo "TAGGED=false" >> $GITHUB_ENV + fi + - name: Get timestamp + run: echo "timestamp=$(date -u +'%Y-%m-%dT%H.%M')" >> $GITHUB_ENV + - name: Extract metadata + id: meta + uses: docker/metadata-action@v4 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + flavor: | + latest=${{ env.TAGGED }} + tags: | + type=match,enable=${{ env.TAGGED }},priority=300,pattern=preprocessor-text-followup-(\d+.\d+.\d+),group=1 + type=raw,priority=200,value=unstable + type=raw,priority=100,value=${{ env.timestamp }} + labels: | + org.opencontainers.image.title=IMAGE Preprocessor Text Followup + org.opencontainers.image.description=Responds to followup queries from user. + org.opencontainers.image.authors=IMAGE Project + org.opencontainers.image.documentation=https://github.com/Shared-Reality-Lab/IMAGE-server/tree/main/preprocessors/text-followup/README.md + org.opencontainers.image.licenses=AGPL-3.0-or-later + maintainer=IMAGE Project + - name: Build and push + uses: docker/build-push-action@v3 + with: + context: . + file: ./preprocessors/text-followup/Dockerfile + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.gitignore b/.gitignore index 97d813856..2e12dbe79 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,13 @@ config/chart.env config/apis-and-selection.env config/vision_api_key.json config/ollama.env +config/healthcheck.env +config/image-summary.env +config/imagelogs.env +config/make_request.env +config/pp_daily_test.env +config/restoreunstable.env +config/sendimagereq.env +config/image_daily_cron.env *.sh docker-compose.override.yml diff --git a/build.yml b/build.yml index 7e42697ee..0cefe8c96 100644 --- a/build.yml +++ b/build.yml @@ -96,6 +96,11 @@ services: context: . dockerfile: ./preprocessors/graphic-caption/Dockerfile image: "graphic-caption:latest" + text-followup: + build: + context: . + dockerfile: ./preprocessors/text-followup/Dockerfile + image: "text-followup:latest" emotion-recognition: build: context: . @@ -263,6 +268,11 @@ services: context: . dockerfile: ./handlers/svg-action-recognition/Dockerfile image: "svg-action-recognition:latest" + text-followup-handler: + build: + context: . + dockerfile: ./handlers/text-followup-handler/Dockerfile + image: "text-followup-handler:latest" # Supercollider on Fedora 34 supercollider-base: build: diff --git a/docker-compose.yml b/docker-compose.yml index 2dd461dc5..2708db28e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,8 +21,8 @@ services: profiles: [production, test, default] image: 'memcached:1.6' restart: unless-stopped - # cache memory size - 4GB - command: -m 4096 + # cache memory size - 4GB, maximum item size - 5MB + command: -m 4096 -I 5m espnet-tts: profiles: [production, test, default] @@ -104,6 +104,17 @@ services: env_file: ./config/ollama.env + text-followup: + profiles: [test, default] + image: ghcr.io/shared-reality-lab/image-preprocessor-text-followup:${REGISTRY_TAG} + restart: unless-stopped + labels: + ca.mcgill.a11y.image.preprocessor: 1 + ca.mcgill.a11y.image.port: 5000 + ca.mcgill.a11y.image.cacheTimeout: 0 + ca.mcgill.a11y.image.route: "followup" + env_file: + ./config/ollama.env graphic-tagger: profiles: [production, test, default] @@ -291,6 +302,14 @@ services: labels: ca.mcgill.a11y.image.handler: enable + text-followup-handler: + profiles: [test, default] + image: ghcr.io/shared-reality-lab/image-handler-text-followup:${REGISTRY_TAG} + restart: unless-stopped + labels: + ca.mcgill.a11y.image.handler: enable + ca.mcgill.a11y.image.route: "followup" + # end - common services # start - unicorn exclusive services diff --git a/handlers/autour-handler/Dockerfile b/handlers/autour-handler/Dockerfile index e84bd7628..b266f47bc 100644 --- a/handlers/autour-handler/Dockerfile +++ b/handlers/autour-handler/Dockerfile @@ -13,6 +13,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ COPY --from=builder --chown=node:node /usr/src/app/dist ./dist @@ -22,4 +24,7 @@ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/handlers/autour-handler/src/server.ts b/handlers/autour-handler/src/server.ts index 8ff0ad0b9..3d05acd54 100644 --- a/handlers/autour-handler/src/server.ts +++ b/handlers/autour-handler/src/server.ts @@ -55,7 +55,10 @@ app.post("/handler", async (req, res) => { } const autourData = preprocessors["ca.mcgill.a11y.image.preprocessor.autour"]; - if (autourData["places"].length === 0) { + + // Filter places we will not use before checking for non-zero length. + const places = autourData["places"].filter((p: { "cat": number }) => !filterCategories.includes(p["cat"])); + if (places.length === 0) { console.warn("No places detected despite running."); const response = { "request_uuid": req.body["request_uuid"], @@ -89,9 +92,8 @@ app.post("/handler", async (req, res) => { return; } - // Sort and filter POIs - // Do this before since TTS is time consuming - const places = autourData["places"].filter((p: { "cat": number }) => !filterCategories.includes(p["cat"])); + // Sort and filter POIs by distance. + // Do this before TTS since it is time consuming const source = new LatLon(autourData["lat"], autourData["lon"]); for (const place of places) { const dest = new LatLon(place["ll"][0], place["ll"][1]); @@ -136,7 +138,7 @@ app.post("/handler", async (req, res) => { const translateSegments = []; // Combine map description with data to translate translateSegments.push(description); translateSegments.push(...segments); - + const translated:string[] = await fetch( "http://multilang-support/service/translate", { "method": "POST", "headers": { @@ -159,7 +161,6 @@ app.post("/handler", async (req, res) => { for(let i = 1; i < translated.length; i++) { segments[i - 1] = translated[i]; } - } catch (e) { console.error(e); console.debug(`Cannot translate to ${targetLanguage}`); @@ -170,7 +171,7 @@ app.post("/handler", async (req, res) => { return; } } - + // Forming Response let ttsResponse; try { @@ -331,6 +332,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log(`Started server on port ${port}`); }); diff --git a/handlers/hello-handler/Dockerfile b/handlers/hello-handler/Dockerfile index 8460f4cc1..681c718d2 100644 --- a/handlers/hello-handler/Dockerfile +++ b/handlers/hello-handler/Dockerfile @@ -1,5 +1,7 @@ FROM node:alpine +RUN apk add --no-cache curl + WORKDIR /usr/src/app # Apparently splittig this up is good for layers @@ -17,4 +19,7 @@ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "dist/server.js" ] diff --git a/handlers/hello-handler/src/server.ts b/handlers/hello-handler/src/server.ts index 70eaaac49..a1fdf630b 100644 --- a/handlers/hello-handler/src/server.ts +++ b/handlers/hello-handler/src/server.ts @@ -94,6 +94,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log(`Started server on port ${port}`); }); diff --git a/handlers/hello-svg-handler/Dockerfile b/handlers/hello-svg-handler/Dockerfile index f7b8cd505..e73e6e56b 100644 --- a/handlers/hello-svg-handler/Dockerfile +++ b/handlers/hello-svg-handler/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/hello-svg-handler/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "hello_svg:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] diff --git a/handlers/hello-svg-handler/hello_svg.py b/handlers/hello-svg-handler/hello_svg.py index ba4ec8c97..45c6d4bfe 100644 --- a/handlers/hello-svg-handler/hello_svg.py +++ b/handlers/hello-svg-handler/hello_svg.py @@ -21,6 +21,8 @@ import logging import time import drawSvg as draw +from datetime import datetime + app = Flask(__name__) @@ -114,5 +116,16 @@ def handle(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=False) diff --git a/handlers/high-charts/Dockerfile b/handlers/high-charts/Dockerfile index b40433c64..b93815754 100644 --- a/handlers/high-charts/Dockerfile +++ b/handlers/high-charts/Dockerfile @@ -9,6 +9,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/dist ./dist COPY --from=builder --chown=node:node /usr/src/app/node_modules ./node_modules @@ -17,4 +19,7 @@ COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/handlers/high-charts/src/server.ts b/handlers/high-charts/src/server.ts index 82bc5bc22..33511c663 100644 --- a/handlers/high-charts/src/server.ts +++ b/handlers/high-charts/src/server.ts @@ -257,6 +257,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log("Started server on port " + port); }); diff --git a/handlers/map-tactile-svg/Dockerfile b/handlers/map-tactile-svg/Dockerfile index ca0a7ed5f..91f2e6f45 100644 --- a/handlers/map-tactile-svg/Dockerfile +++ b/handlers/map-tactile-svg/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.11 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/map-tactile-svg/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "map-svg:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] diff --git a/handlers/map-tactile-svg/map-svg.py b/handlers/map-tactile-svg/map-svg.py index a3c46bc7d..2282d1c87 100644 --- a/handlers/map-tactile-svg/map-svg.py +++ b/handlers/map-tactile-svg/map-svg.py @@ -21,6 +21,7 @@ import logging import time import drawSvg as draw +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -467,5 +468,16 @@ def notNoneorBlank(x): return False +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/motd/Dockerfile b/handlers/motd/Dockerfile index 8cf2268d9..b39376e0e 100644 --- a/handlers/motd/Dockerfile +++ b/handlers/motd/Dockerfile @@ -8,6 +8,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/dist ./dist COPY --from=builder --chown=node:node /usr/src/app/node_modules ./node_modules @@ -16,4 +18,7 @@ COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/handlers/motd/src/server.ts b/handlers/motd/src/server.ts index caf5c1ce2..2489271cb 100644 --- a/handlers/motd/src/server.ts +++ b/handlers/motd/src/server.ts @@ -86,6 +86,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log("Started server on port " + port); }); diff --git a/handlers/ocr-handler/Dockerfile b/handlers/ocr-handler/Dockerfile index 367e2500f..7bc845ee4 100644 --- a/handlers/ocr-handler/Dockerfile +++ b/handlers/ocr-handler/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.9.5-alpine -RUN adduser --disabled-password python +RUN apk add --no-cache curl && adduser --disabled-password python + WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -18,4 +19,7 @@ EXPOSE 80 ENV FLASK_APP=server.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "gunicorn", "server:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] diff --git a/handlers/ocr-handler/server.py b/handlers/ocr-handler/server.py index 395bf4b1f..075f9b91a 100644 --- a/handlers/ocr-handler/server.py +++ b/handlers/ocr-handler/server.py @@ -20,6 +20,7 @@ import logging import jsonschema from flask import Flask, request, jsonify +from datetime import datetime app = Flask(__name__) @@ -189,5 +190,16 @@ def get_article(word): return 'a ' +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=80, debug=True) diff --git a/handlers/osm-streets-handler/Dockerfile b/handlers/osm-streets-handler/Dockerfile index f7c5cbc43..8e32fb99f 100644 --- a/handlers/osm-streets-handler/Dockerfile +++ b/handlers/osm-streets-handler/Dockerfile @@ -9,6 +9,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/dist ./dist COPY --from=builder --chown=node:node /usr/src/app/node_modules ./node_modules @@ -17,4 +19,7 @@ COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/handlers/osm-streets-handler/src/server.ts b/handlers/osm-streets-handler/src/server.ts index 608edc69c..c0f906d59 100644 --- a/handlers/osm-streets-handler/src/server.ts +++ b/handlers/osm-streets-handler/src/server.ts @@ -215,6 +215,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log("Started server on port " + port); }); diff --git a/handlers/photo-audio-handler/Dockerfile b/handlers/photo-audio-handler/Dockerfile index 7a02c1a9e..73d864e2a 100644 --- a/handlers/photo-audio-handler/Dockerfile +++ b/handlers/photo-audio-handler/Dockerfile @@ -9,6 +9,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/dist ./dist COPY --from=builder --chown=node:node /usr/src/app/node_modules ./node_modules @@ -17,4 +19,7 @@ COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/handlers/photo-audio-handler/package-lock.json b/handlers/photo-audio-handler/package-lock.json index aa1612d3e..4a124978e 100644 --- a/handlers/photo-audio-handler/package-lock.json +++ b/handlers/photo-audio-handler/package-lock.json @@ -195,20 +195,47 @@ } }, "node_modules/@serialport/bindings-cpp": { - "version": "10.8.0", - "resolved": "https://registry.npmjs.org/@serialport/bindings-cpp/-/bindings-cpp-10.8.0.tgz", - "integrity": "sha512-OMQNJz5kJblbmZN5UgJXLwi2XNtVLxSKmq5VyWuXQVsUIJD4l9UGHnLPqM5LD9u3HPZgDI5w7iYN7gxkQNZJUw==", + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/@serialport/bindings-cpp/-/bindings-cpp-12.0.1.tgz", + "integrity": "sha512-r2XOwY2dDvbW7dKqSPIk2gzsr6M6Qpe9+/Ngs94fNaNlcTRCV02PfaoDmRgcubpNVVcLATlxSxPTIDw12dbKOg==", "hasInstallScript": true, "optional": true, "dependencies": { "@serialport/bindings-interface": "1.2.2", - "@serialport/parser-readline": "^10.2.1", - "debug": "^4.3.2", - "node-addon-api": "^5.0.0", - "node-gyp-build": "^4.3.0" + "@serialport/parser-readline": "11.0.0", + "debug": "4.3.4", + "node-addon-api": "7.0.0", + "node-gyp-build": "4.6.0" }, "engines": { - "node": ">=12.17.0 <13.0 || >=14.0.0" + "node": ">=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/serialport/donate" + } + }, + "node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-delimiter/-/parser-delimiter-11.0.0.tgz", + "integrity": "sha512-aZLJhlRTjSmEwllLG7S4J8s8ctRAS0cbvCpO87smLvl3e4BgzbVgF6Z6zaJd3Aji2uSiYgfedCdNc4L6W+1E2g==", + "optional": true, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://opencollective.com/serialport/donate" + } + }, + "node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-readline/-/parser-readline-11.0.0.tgz", + "integrity": "sha512-rRAivhRkT3YO28WjmmG4FQX6L+KMb5/ikhyylRfzWPw0nSXy97+u07peS9CbHqaNvJkMhH1locp2H36aGMOEIA==", + "optional": true, + "dependencies": { + "@serialport/parser-delimiter": "11.0.0" + }, + "engines": { + "node": ">=12.0.0" }, "funding": { "url": "https://opencollective.com/serialport/donate" @@ -224,9 +251,9 @@ } }, "node_modules/@serialport/parser-byte-length": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-byte-length/-/parser-byte-length-10.5.0.tgz", - "integrity": "sha512-eHhr4lHKboq1OagyaXAqkemQ1XyoqbLQC8XJbvccm95o476TmEdW5d7AElwZV28kWprPW68ZXdGF2VXCkJgS2w==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-byte-length/-/parser-byte-length-12.0.0.tgz", + "integrity": "sha512-0ei0txFAj+s6FTiCJFBJ1T2hpKkX8Md0Pu6dqMrYoirjPskDLJRgZGLqoy3/lnU1bkvHpnJO+9oJ3PB9v8rNlg==", "optional": true, "engines": { "node": ">=12.0.0" @@ -236,9 +263,9 @@ } }, "node_modules/@serialport/parser-cctalk": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-cctalk/-/parser-cctalk-10.5.0.tgz", - "integrity": "sha512-Iwsdr03xmCKAiibLSr7b3w6ZUTBNiS+PwbDQXdKU/clutXjuoex83XvsOtYVcNZmwJlVNhAUbkG+FJzWwIa4DA==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-cctalk/-/parser-cctalk-12.0.0.tgz", + "integrity": "sha512-0PfLzO9t2X5ufKuBO34DQKLXrCCqS9xz2D0pfuaLNeTkyGUBv426zxoMf3rsMRodDOZNbFblu3Ae84MOQXjnZw==", "optional": true, "engines": { "node": ">=12.0.0" @@ -248,9 +275,9 @@ } }, "node_modules/@serialport/parser-delimiter": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-delimiter/-/parser-delimiter-10.5.0.tgz", - "integrity": "sha512-/uR/yT3jmrcwnl2FJU/2ySvwgo5+XpksDUR4NF/nwTS5i3CcuKS+FKi/tLzy1k8F+rCx5JzpiK+koqPqOUWArA==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-delimiter/-/parser-delimiter-12.0.0.tgz", + "integrity": "sha512-gu26tVt5lQoybhorLTPsH2j2LnX3AOP2x/34+DUSTNaUTzu2fBXw+isVjQJpUBFWu6aeQRZw5bJol5X9Gxjblw==", "optional": true, "engines": { "node": ">=12.0.0" @@ -260,9 +287,9 @@ } }, "node_modules/@serialport/parser-inter-byte-timeout": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-inter-byte-timeout/-/parser-inter-byte-timeout-10.5.0.tgz", - "integrity": "sha512-WPvVlSx98HmmUF9jjK6y9mMp3Wnv6JQA0cUxLeZBgS74TibOuYG3fuUxUWGJALgAXotOYMxfXSezJ/vSnQrkhQ==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-inter-byte-timeout/-/parser-inter-byte-timeout-12.0.0.tgz", + "integrity": "sha512-GnCh8K0NAESfhCuXAt+FfBRz1Cf9CzIgXfp7SdMgXwrtuUnCC/yuRTUFWRvuzhYKoAo1TL0hhUo77SFHUH1T/w==", "optional": true, "engines": { "node": ">=12.0.0" @@ -272,21 +299,21 @@ } }, "node_modules/@serialport/parser-packet-length": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-packet-length/-/parser-packet-length-10.5.0.tgz", - "integrity": "sha512-jkpC/8w4/gUBRa2Teyn7URv1D7T//0lGj27/4u9AojpDVXsR6dtdcTG7b7dNirXDlOrSLvvN7aS5/GNaRlEByw==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-packet-length/-/parser-packet-length-12.0.0.tgz", + "integrity": "sha512-p1hiCRqvGHHLCN/8ZiPUY/G0zrxd7gtZs251n+cfNTn+87rwcdUeu9Dps3Aadx30/sOGGFL6brIRGK4l/t7MuQ==", "optional": true, "engines": { "node": ">=8.6.0" } }, "node_modules/@serialport/parser-readline": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-readline/-/parser-readline-10.5.0.tgz", - "integrity": "sha512-0aXJknodcl94W9zSjvU+sLdXiyEG2rqjQmvBWZCr8wJZjWEtv3RgrnYiWq4i2OTOyC8C/oPK8ZjpBjQptRsoJQ==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-readline/-/parser-readline-12.0.0.tgz", + "integrity": "sha512-O7cywCWC8PiOMvo/gglEBfAkLjp/SENEML46BXDykfKP5mTPM46XMaX1L0waWU6DXJpBgjaL7+yX6VriVPbN4w==", "optional": true, "dependencies": { - "@serialport/parser-delimiter": "10.5.0" + "@serialport/parser-delimiter": "12.0.0" }, "engines": { "node": ">=12.0.0" @@ -296,9 +323,9 @@ } }, "node_modules/@serialport/parser-ready": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-ready/-/parser-ready-10.5.0.tgz", - "integrity": "sha512-QIf65LTvUoxqWWHBpgYOL+soldLIIyD1bwuWelukem2yDZVWwEjR288cLQ558BgYxH4U+jLAQahhqoyN1I7BaA==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-ready/-/parser-ready-12.0.0.tgz", + "integrity": "sha512-ygDwj3O4SDpZlbrRUraoXIoIqb8sM7aMKryGjYTIF0JRnKeB1ys8+wIp0RFMdFbO62YriUDextHB5Um5cKFSWg==", "optional": true, "engines": { "node": ">=12.0.0" @@ -308,9 +335,9 @@ } }, "node_modules/@serialport/parser-regex": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-regex/-/parser-regex-10.5.0.tgz", - "integrity": "sha512-9jnr9+PCxRoLjtGs7uxwsFqvho+rxuJlW6ZWSB7oqfzshEZWXtTJgJRgac/RuLft4hRlrmRz5XU40i3uoL4HKw==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-regex/-/parser-regex-12.0.0.tgz", + "integrity": "sha512-dCAVh4P/pZrLcPv9NJ2mvPRBg64L5jXuiRxIlyxxdZGH4WubwXVXY/kBTihQmiAMPxbT3yshSX8f2+feqWsxqA==", "optional": true, "engines": { "node": ">=12.0.0" @@ -320,9 +347,9 @@ } }, "node_modules/@serialport/parser-slip-encoder": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-slip-encoder/-/parser-slip-encoder-10.5.0.tgz", - "integrity": "sha512-wP8m+uXQdkWSa//3n+VvfjLthlabwd9NiG6kegf0fYweLWio8j4pJRL7t9eTh2Lbc7zdxuO0r8ducFzO0m8CQw==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-slip-encoder/-/parser-slip-encoder-12.0.0.tgz", + "integrity": "sha512-0APxDGR9YvJXTRfY+uRGhzOhTpU5akSH183RUcwzN7QXh8/1jwFsFLCu0grmAUfi+fItCkR+Xr1TcNJLR13VNA==", "optional": true, "engines": { "node": ">=12.0.0" @@ -332,9 +359,9 @@ } }, "node_modules/@serialport/parser-spacepacket": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/parser-spacepacket/-/parser-spacepacket-10.5.0.tgz", - "integrity": "sha512-BEZ/HAEMwOd8xfuJSeI/823IR/jtnThovh7ils90rXD4DPL1ZmrP4abAIEktwe42RobZjIPfA4PaVfyO0Fjfhg==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/parser-spacepacket/-/parser-spacepacket-12.0.0.tgz", + "integrity": "sha512-dozONxhPC/78pntuxpz/NOtVps8qIc/UZzdc/LuPvVsqCoJXiRxOg6ZtCP/W58iibJDKPZPAWPGYeZt9DJxI+Q==", "optional": true, "engines": { "node": ">=12.0.0" @@ -344,13 +371,13 @@ } }, "node_modules/@serialport/stream": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/@serialport/stream/-/stream-10.5.0.tgz", - "integrity": "sha512-gbcUdvq9Kyv2HsnywS7QjnEB28g+6OGB5Z8TLP7X+UPpoMIWoUsoQIq5Kt0ZTgMoWn3JGM2lqwTsSHF+1qhniA==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@serialport/stream/-/stream-12.0.0.tgz", + "integrity": "sha512-9On64rhzuqKdOQyiYLYv2lQOh3TZU/D3+IWCR5gk0alPel2nwpp4YwDEGiUBfrQZEdQ6xww0PWkzqth4wqwX3Q==", "optional": true, "dependencies": { "@serialport/bindings-interface": "1.2.2", - "debug": "^4.3.2" + "debug": "4.3.4" }, "engines": { "node": ">=12.0.0" @@ -762,9 +789,9 @@ "dev": true }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", @@ -774,7 +801,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -827,16 +854,25 @@ "node": ">= 0.8" } }, - "node_modules/call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", "dependencies": { - "es-define-property": "^1.0.0", "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" @@ -914,9 +950,9 @@ } }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "engines": { "node": ">= 0.6" } @@ -927,9 +963,9 @@ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, "dependencies": { "path-key": "^3.1.0", @@ -963,22 +999,6 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -1020,26 +1040,36 @@ "node": ">=6.0.0" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "engines": { "node": ">= 0.8" } }, "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", "engines": { "node": ">= 0.4" } @@ -1052,6 +1082,17 @@ "node": ">= 0.4" } }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -1284,36 +1325,36 @@ } }, "node_modules/express": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", - "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -1322,6 +1363,10 @@ }, "engines": { "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/express/node_modules/debug": { @@ -1416,12 +1461,12 @@ } }, "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "dependencies": { "debug": "2.6.9", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", @@ -1511,15 +1556,20 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", + "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.0", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -1528,6 +1578,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -1596,11 +1658,11 @@ } }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -1621,32 +1683,10 @@ "node": ">=8" } }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "dependencies": { - "es-define-property": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-proto": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "engines": { "node": ">= 0.4" }, @@ -1877,6 +1917,14 @@ "node": ">=10" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -1886,9 +1934,12 @@ } }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/merge2": { "version": "1.4.1", @@ -1908,12 +1959,12 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -1989,9 +2040,9 @@ } }, "node_modules/node-addon-api": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", - "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.0.0.tgz", + "integrity": "sha512-vgbBJTS4m5/KkE16t5Ly0WW9hz46swAstv0hYYwMtbG7AznRhNyfLRe8HZAiWIpcHzoO7HxhLuBQj9rJ/Ho0ZA==", "optional": true }, "node_modules/node-gyp-build": { @@ -2006,9 +2057,12 @@ } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -2051,17 +2105,17 @@ } }, "node_modules/osc": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/osc/-/osc-2.4.4.tgz", - "integrity": "sha512-YJr2bUCQMc9BIaq1LXgqYpt5Ii7wNy2n0e0BkQiCSziMNrrsYHhH5OlExNBgCrQsum60EgXZ32lFsvR4aUf+ew==", + "version": "2.4.5", + "resolved": "https://registry.npmjs.org/osc/-/osc-2.4.5.tgz", + "integrity": "sha512-Nc4/qcl+vA/CMxiKS1xrYgzjfnyB3W94gZnrkn3eTzihlndbEml6+wj1YQNKBI4r+qrw3obCcEoU7SVH/OxpxA==", "dependencies": { "long": "4.0.0", "slip": "1.0.2", "wolfy87-eventemitter": "5.2.9", - "ws": "8.13.0" + "ws": "8.18.0" }, "optionalDependencies": { - "serialport": "10.5.0" + "serialport": "12.0.0" } }, "node_modules/p-limit": { @@ -2142,9 +2196,9 @@ } }, "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" }, "node_modules/path-type": { "version": "4.0.0", @@ -2205,11 +2259,11 @@ } }, "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -2365,9 +2419,9 @@ } }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -2400,69 +2454,61 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/send/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/serialport": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/serialport/-/serialport-10.5.0.tgz", - "integrity": "sha512-7OYLDsu5i6bbv3lU81pGy076xe0JwpK6b49G6RjNvGibstUqQkI+I3/X491yBGtf4gaqUdOgoU1/5KZ/XxL4dw==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/serialport/-/serialport-12.0.0.tgz", + "integrity": "sha512-AmH3D9hHPFmnF/oq/rvigfiAouAKyK/TjnrkwZRYSFZxNggJxwvbAbfYrLeuvq7ktUdhuHdVdSjj852Z55R+uA==", "optional": true, "dependencies": { "@serialport/binding-mock": "10.2.2", - "@serialport/bindings-cpp": "10.8.0", - "@serialport/parser-byte-length": "10.5.0", - "@serialport/parser-cctalk": "10.5.0", - "@serialport/parser-delimiter": "10.5.0", - "@serialport/parser-inter-byte-timeout": "10.5.0", - "@serialport/parser-packet-length": "10.5.0", - "@serialport/parser-readline": "10.5.0", - "@serialport/parser-ready": "10.5.0", - "@serialport/parser-regex": "10.5.0", - "@serialport/parser-slip-encoder": "10.5.0", - "@serialport/parser-spacepacket": "10.5.0", - "@serialport/stream": "10.5.0", - "debug": "^4.3.3" - }, - "engines": { - "node": ">=12.0.0" + "@serialport/bindings-cpp": "12.0.1", + "@serialport/parser-byte-length": "12.0.0", + "@serialport/parser-cctalk": "12.0.0", + "@serialport/parser-delimiter": "12.0.0", + "@serialport/parser-inter-byte-timeout": "12.0.0", + "@serialport/parser-packet-length": "12.0.0", + "@serialport/parser-readline": "12.0.0", + "@serialport/parser-ready": "12.0.0", + "@serialport/parser-regex": "12.0.0", + "@serialport/parser-slip-encoder": "12.0.0", + "@serialport/parser-spacepacket": "12.0.0", + "@serialport/stream": "12.0.0", + "debug": "4.3.4" + }, + "engines": { + "node": ">=16.0.0" }, "funding": { "url": "https://opencollective.com/serialport/donate" } }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "dependencies": { - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.18.0" + "send": "0.19.0" }, "engines": { "node": ">= 0.8.0" } }, - "node_modules/set-function-length": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", - "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -2490,14 +2536,65 @@ } }, "node_modules/side-channel": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", - "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", "dependencies": { - "call-bind": "^1.0.7", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -2727,9 +2824,9 @@ "dev": true }, "node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", "engines": { "node": ">=10.0.0" }, diff --git a/handlers/photo-audio-handler/src/server.ts b/handlers/photo-audio-handler/src/server.ts index f087bc8a7..2ca7544cf 100644 --- a/handlers/photo-audio-handler/src/server.ts +++ b/handlers/photo-audio-handler/src/server.ts @@ -62,21 +62,24 @@ app.post("/handler", async (req, res) => { const objDet = preprocessors["ca.mcgill.a11y.image.preprocessor.objectDetection"]; const objGroup = preprocessors["ca.mcgill.a11y.image.preprocessor.grouping"]; const action = preprocessors["ca.mcgill.a11y.image.preprocessor.actionRecognition"]; - const collageDetector = preprocessors["ca.mcgill.a11y.image.preprocessor.collageDetector"] + const collageDetector = preprocessors["ca.mcgill.a11y.image.preprocessor.collageDetector"]; + const graphicCaption = preprocessors["ca.mcgill.a11y.image.preprocessor.graphic-caption"]; const targetLanguage = req.body["language"]; // Ignore secondCat since it isn't useful on its own - if (!(semseg && semseg?.segments) && !(objDet && objDet?.objects) && !objGroup) { + if (!(semseg && semseg?.segments) && !(objDet && objDet?.objects) && !objGroup && !graphicCaption) { console.debug("No usable preprocessor data! Can't render."); const response = utils.generateEmptyResponse(req.body["request_uuid"]); res.json(response); return; } // Filter objects - utils.filterObjectsBySize(objDet, objGroup); + if (objDet) { + utils.filterObjectsBySize(objDet, objGroup); + } - if (semseg?.segments.length === 0 && objDet?.objects.length === 0) { - console.debug("No segments or objects detected! Can't render."); + if (semseg?.segments.length === 0 && objDet?.objects.length === 0 && !graphicCaption) { + console.debug("No segments, objects or caption detected! Can't render."); const response = utils.generateEmptyResponse(req.body["request_uuid"]); res.json(response); return; @@ -101,6 +104,14 @@ app.post("/handler", async (req, res) => { } ttsData.push({"value": utils.generateIntro(secondCat), "type": "text"}); + + if (graphicCaption && graphicCaption["caption"]) { + ttsData.push(...utils.generateCaption(graphicCaption)); + if ((semseg && semseg["segments"].length > 0) || (objDet && objGroup && objDet["objects"].length > 0)) { + ttsData.push({"value": "It also", "type": "text"}); + } + } + if (semseg && semseg["segments"].length > 0) { // Use all segments returned for now. // Filtering may be helpful later. @@ -109,7 +120,7 @@ app.post("/handler", async (req, res) => { ttsData.push({"value": "It also", "type": "text"}); } } - + if (objDet && objGroup && objDet["objects"].length > 0) { ttsData.push(...utils.generateObjDet(objDet, objGroup, action)); } @@ -134,7 +145,7 @@ app.post("/handler", async (req, res) => { targetLanguage ); console.debug("Mapping translated values to ttsData") - + for (let i = 0; i < ttsData.length; i++) { ttsData[i]["value"] = translatedValues[i]; } @@ -162,7 +173,7 @@ app.post("/handler", async (req, res) => { } else { console.debug("Skipped text rendering."); } - + // Construct SimpleAudio (if requested) if (hasSimple || hasSegment) { @@ -274,7 +285,7 @@ app.post("/handler", async (req, res) => { } } - // Translate renderings' description before sending response + // Translate renderings' description before sending response if (targetLanguage !== "en") { try { console.debug("Translating renderings description to " + targetLanguage); @@ -305,6 +316,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log("Started server on port " + port); }); diff --git a/handlers/photo-audio-handler/src/utils.ts b/handlers/photo-audio-handler/src/utils.ts index ddf9ec2d7..764d973ca 100644 --- a/handlers/photo-audio-handler/src/utils.ts +++ b/handlers/photo-audio-handler/src/utils.ts @@ -90,31 +90,44 @@ export function generateSemSeg(semSeg: { "segments": Record[] } } export function filterObjectsBySize(objDet: ObjDet, objGroup: ObjGroup) { - const groupsToDelete: number[] = []; - for (const group of objGroup["grouped"]) { - const objs = objDet["objects"].filter((x: { "ID": number }) => group["IDs"].includes(x["ID"])) as Obj[]; - const totalArea = objs.map(a => a.area).reduce((a, b) => a+ b, 0); - if (totalArea < MIN_OBJ_AREA) { - groupsToDelete.push(objGroup["grouped"].indexOf(group)); + if (objGroup) { + const groupsToDelete: number[] = []; + + for (const group of objGroup["grouped"]) { + const objs = objDet["objects"].filter((x: { "ID": number }) => group["IDs"].includes(x["ID"])) as Obj[]; + const totalArea = objs.map(a => a.area).reduce((a, b) => a+ b, 0); + if (totalArea < MIN_OBJ_AREA) { + groupsToDelete.push(objGroup["grouped"].indexOf(group)); + } } - } - for (const idx of groupsToDelete) { - for (const objId of objGroup["grouped"][idx]["IDs"]) { - objDet["objects"].splice(objDet["objects"].findIndex(obj => obj["ID"] === objId), 1); + for (const idx of groupsToDelete) { + for (const objId of objGroup["grouped"][idx]["IDs"]) { + objDet["objects"].splice(objDet["objects"].findIndex(obj => obj["ID"] === objId), 1); + } + delete objGroup["grouped"][idx]; } - delete objGroup["grouped"][idx]; - } - const ungroupedToDelete: number[] = []; - for (const idx of objGroup["ungrouped"]) { - const obj = objDet["objects"].find((x: { "ID": number }) => x["ID"] === idx); - if (obj && obj.area < MIN_OBJ_AREA) { - ungroupedToDelete.push(idx); + const ungroupedToDelete: number[] = []; + for (const idx of objGroup["ungrouped"]) { + const obj = objDet["objects"].find((x: { "ID": number }) => x["ID"] === idx); + if (obj && obj.area < MIN_OBJ_AREA) { + ungroupedToDelete.push(idx); + } + } + for (const idx of ungroupedToDelete) { + objDet["objects"].splice(objDet["objects"].findIndex(obj => obj["ID"] === idx), 1); + objGroup["ungrouped"].splice(objGroup["ungrouped"].indexOf(idx), 1); + } + } else { + const objectsToDelete: number[] = []; + for (const idx in objDet["objects"]) { + if (objDet["objects"][idx].area < MIN_OBJ_AREA) { + objectsToDelete.push(objDet["objects"][idx]["ID"]); + } + } + for (const idx of objectsToDelete) { + objDet["objects"].splice(objDet["objects"].findIndex(obj => obj["ID"] === idx), 1); } - } - for (const idx of ungroupedToDelete) { - objDet["objects"].splice(objDet["objects"].findIndex(obj => obj["ID"] === idx), 1); - objGroup["ungrouped"].splice(objGroup["ungrouped"].indexOf(idx), 1); } } @@ -247,6 +260,17 @@ export function generateObjDet(objDet: ObjDet, objGroup: ObjGroup, actionRec: Ac return objects; } +export function generateCaption(capObj: {caption: string}): TTSSegment[] { + return [{ + "type": "text", + "value": "has the following description:" + }, { + "type": "text", + "value": capObj["caption"], + "label": "Generated caption" + }]; +} + /** * Get translation from multilang-support service * @param inputSegment array of text to be translated diff --git a/handlers/photo-audio-haptics-handler/Dockerfile b/handlers/photo-audio-haptics-handler/Dockerfile index 338e671e0..2eb80cf1a 100644 --- a/handlers/photo-audio-haptics-handler/Dockerfile +++ b/handlers/photo-audio-haptics-handler/Dockerfile @@ -9,6 +9,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ COPY --from=builder --chown=node:node /usr/src/app/dist ./dist @@ -17,4 +19,7 @@ COPY --from=builder --chown=node:node /usr/src/app/node_modules ./node_modules ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/handlers/photo-audio-haptics-handler/src/server.ts b/handlers/photo-audio-haptics-handler/src/server.ts index 98fc8ecaa..64a0130e7 100644 --- a/handlers/photo-audio-haptics-handler/src/server.ts +++ b/handlers/photo-audio-haptics-handler/src/server.ts @@ -265,6 +265,10 @@ app.post("/handler", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + // Run the server app.listen(port, () => { console.log(`Started server on port ${port}`); diff --git a/handlers/photo-tactile-svg/Dockerfile b/handlers/photo-tactile-svg/Dockerfile index 620ab7b51..e2d09a15a 100644 --- a/handlers/photo-tactile-svg/Dockerfile +++ b/handlers/photo-tactile-svg/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/photo-tactile-svg/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "tactile_svg:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/handlers/photo-tactile-svg/tactile_svg.py b/handlers/photo-tactile-svg/tactile_svg.py index c6b31d148..f859e9733 100644 --- a/handlers/photo-tactile-svg/tactile_svg.py +++ b/handlers/photo-tactile-svg/tactile_svg.py @@ -22,6 +22,7 @@ import time import drawSvg as draw import inflect +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -313,5 +314,16 @@ def handle(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/svg-action-recognition/Dockerfile b/handlers/svg-action-recognition/Dockerfile index 249d768a0..151e8100e 100644 --- a/handlers/svg-action-recognition/Dockerfile +++ b/handlers/svg-action-recognition/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/svg-action-recognition/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "ar_svg:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/handlers/svg-action-recognition/ar_svg.py b/handlers/svg-action-recognition/ar_svg.py index 89e290184..4769d68be 100644 --- a/handlers/svg-action-recognition/ar_svg.py +++ b/handlers/svg-action-recognition/ar_svg.py @@ -21,6 +21,7 @@ import logging import time import drawSvg as draw +from datetime import datetime app = Flask(__name__) @@ -237,5 +238,16 @@ def handle(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/svg-depth-map/Dockerfile b/handlers/svg-depth-map/Dockerfile index 0b0cc1a05..1eab0bb9f 100644 --- a/handlers/svg-depth-map/Dockerfile +++ b/handlers/svg-depth-map/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/svg-depth-map/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "svg-depth-map:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] diff --git a/handlers/svg-depth-map/svg-depth-map.py b/handlers/svg-depth-map/svg-depth-map.py index 7c5e612a0..3ffb23b27 100644 --- a/handlers/svg-depth-map/svg-depth-map.py +++ b/handlers/svg-depth-map/svg-depth-map.py @@ -22,6 +22,7 @@ import time import svgwrite import base64 +from datetime import datetime app = Flask(__name__) @@ -189,5 +190,16 @@ def handle(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/svg-object-detection/Dockerfile b/handlers/svg-object-detection/Dockerfile index aebb626de..d82f2b7ef 100644 --- a/handlers/svg-object-detection/Dockerfile +++ b/handlers/svg-object-detection/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/svg-object-detection/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "od_svg:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/handlers/svg-object-detection/od_svg.py b/handlers/svg-object-detection/od_svg.py index 6025219ce..bc0a686cc 100644 --- a/handlers/svg-object-detection/od_svg.py +++ b/handlers/svg-object-detection/od_svg.py @@ -21,6 +21,7 @@ import logging import time import drawSvg as draw +from datetime import datetime app = Flask(__name__) @@ -213,5 +214,16 @@ def handle(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/svg-open-street-map/Dockerfile b/handlers/svg-open-street-map/Dockerfile index e4ab7b6e8..c3c23944d 100644 --- a/handlers/svg-open-street-map/Dockerfile +++ b/handlers/svg-open-street-map/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.11 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/svg-open-street-map/ /usr/src/app EXPOSE 5000 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD ["gunicorn", "osm-svg:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/handlers/svg-open-street-map/osm-svg.py b/handlers/svg-open-street-map/osm-svg.py index 75d23baec..e2ffce66c 100644 --- a/handlers/svg-open-street-map/osm-svg.py +++ b/handlers/svg-open-street-map/osm-svg.py @@ -21,6 +21,8 @@ import logging import time import drawSvg as draw +from datetime import datetime + app = Flask(__name__) # Configure the logging settings @@ -345,5 +347,16 @@ def return_stroke_width(street_type): return stroke_width +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=5000, debug=True) diff --git a/handlers/svg-semantic-seg/Dockerfile b/handlers/svg-semantic-seg/Dockerfile index 0452da465..81b0f3b08 100644 --- a/handlers/svg-semantic-seg/Dockerfile +++ b/handlers/svg-semantic-seg/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10 -RUN apt-get install libcairo2 +RUN apt-get install libcairo2 curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -15,4 +15,7 @@ COPY /handlers/svg-semantic-seg/ /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "sem_seg_svg:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/handlers/svg-semantic-seg/sem_seg_svg.py b/handlers/svg-semantic-seg/sem_seg_svg.py index 62bf47812..5764acf13 100644 --- a/handlers/svg-semantic-seg/sem_seg_svg.py +++ b/handlers/svg-semantic-seg/sem_seg_svg.py @@ -22,6 +22,7 @@ import time import drawSvg as draw import random +from datetime import datetime app = Flask(__name__) @@ -197,5 +198,16 @@ def handle(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/text-followup-handler/Dockerfile b/handlers/text-followup-handler/Dockerfile new file mode 100644 index 000000000..ac89d1300 --- /dev/null +++ b/handlers/text-followup-handler/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.13 + +RUN apt-get install libcairo2 + +RUN adduser --disabled-password python +WORKDIR /usr/src/app +ENV PATH="/home/python/.local/bin:${PATH}" + +RUN pip install --upgrade pip +COPY /handlers/text-followup-handler/requirements.txt /usr/src/app/requirements.txt +RUN pip install -r requirements.txt + +COPY /schemas /usr/src/app/schemas +COPY /handlers/text-followup-handler/ /usr/src/app + +EXPOSE 80 +USER python +CMD ["gunicorn", "followup:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug" ] diff --git a/handlers/text-followup-handler/README.md b/handlers/text-followup-handler/README.md new file mode 100644 index 000000000..1eada8015 --- /dev/null +++ b/handlers/text-followup-handler/README.md @@ -0,0 +1,12 @@ +# Text Followup Handler + +Alpha quality: Insufficiently refined to be tested by end-users. + +![license: AGPL](https://camo.githubusercontent.com/b53b1136762ea55ee6a2d641c9f8283b8335a79b3cb95cbab5a988e678e269b8/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f6c6963656e73652d4147504c2d73756363657373) [GitHub Container Registry Package](https://github.com/Shared-Reality-Lab/IMAGE-server/pkgs/container/image-handler-text-followup) + +## What is this? + +This is a [handler](https://github.com/Shared-Reality-Lab/IMAGE-server/wiki/2.-Handlers,-Preprocessors-and-Services#handlers=) component that returns a text-only response to a followup query posed by the user. + +Data from text-followup preprocessor are used to generate the response. +Only the brief response from the preprocessor is included in the handler response. \ No newline at end of file diff --git a/handlers/text-followup-handler/followup.py b/handlers/text-followup-handler/followup.py new file mode 100644 index 000000000..0ed181b62 --- /dev/null +++ b/handlers/text-followup-handler/followup.py @@ -0,0 +1,185 @@ +# Copyright (c) 2025 IMAGE Project, Shared Reality Lab, McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# You should have received a copy of the GNU Affero General Public License +# and our Additional Terms along with this program. +# If not, see +# . + +from flask import Flask, jsonify, request +import json +import jsonschema +from jsonschema.exceptions import ValidationError +import logging +import time + +app = Flask(__name__) +logging.basicConfig(level=logging.DEBUG) + + +@app.route("/handler", methods=["POST"]) +def handle(): + logging.debug("Received request") + # Load necessary schema files + with open("./schemas/definitions.json") as f: + definitions_schema = json.load(f) + with open("./schemas/request.schema.json") as f: + request_schema = json.load(f) + with open("./schemas/handler-response.schema.json") as f: + response_schema = json.load(f) + with open("./schemas/renderers/text.schema.json") as f: + renderer_schema = json.load(f) + + store = { + definitions_schema["$id"]: definitions_schema, + request_schema["$id"]: request_schema, + response_schema["$id"]: response_schema, + renderer_schema["$id"]: renderer_schema + } + resolver = jsonschema.RefResolver.from_schema( + request_schema, store=store + ) + # Get and validate request contents + contents = request.get_json() + try: + logging.debug("Validating request schema") + validator = jsonschema.Draft7Validator( + request_schema, resolver=resolver + ) + validator.validate(contents) + except ValidationError as e: + logging.error(e) + return jsonify("Invalid request received!"), 400 + + preprocessors = contents['preprocessors'] + + logging.debug("Checking whether renderer is supported") + if ("ca.mcgill.a11y.image.renderer.Text" not in contents["renderers"]): + logging.debug("Text Renderer is not supported") + response = { + "request_uuid": contents["request_uuid"], + "timestamp": int(time.time()), + "renderings": [] + } + try: + validator = jsonschema.Draft7Validator( + response_schema, resolver=resolver) + validator.validate(response) + except jsonschema.exceptions.ValidationError as error: + logging.error(error) + return jsonify("Invalid Preprocessor JSON format"), 500 + logging.debug("Sending response") + return response + + # Throws error when text-followup preprocessor is not found + logging.debug("Checking for text-followup " + "preprocessor responses") + if not ("ca.mcgill.a11y.image.preprocessor.text-followup" + in preprocessors): + logging.debug("Text-followup preprocessor not found") + response = { + "request_uuid": contents["request_uuid"], + "timestamp": int(time.time()), + "renderings": [] + } + try: + validator = jsonschema.Draft7Validator( + response_schema, resolver=resolver) + validator.validate(response) + except jsonschema.exceptions.ValidationError as error: + logging.error(error) + return jsonify("Invalid Preprocessor JSON format"), 500 + logging.debug("Sending response") + return response + + # Checking for graphic and dimensions + logging.debug("Checking whether graphic and" + " dimensions are available") + if "graphic" in contents and "dimensions" in contents: + # If an existing graphic exists, often it is + # best to use that for convenience. + # see the following for SVG coordinate info: + # developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Positions + logging.debug("Graphic has dimensions defined") + else: + logging.debug("Graphic and/or dimensions are not defined") + response = { + "request_uuid": contents["request_uuid"], + "timestamp": int(time.time()), + "renderings": [] + } + try: + validator = jsonschema.Draft7Validator( + response_schema, resolver=resolver) + validator.validate(response) + except jsonschema.exceptions.ValidationError as error: + logging.error(error) + return jsonify("Invalid Preprocessor JSON format"), 500 + logging.debug("Sending response") + return response + # Checking whether this is an actual follow up query + logging.debug("Checking whether this is " + "an actual follow up query") + if "followup" not in contents: + logging.debug("Follow-up query is not defined") + response = { + "request_uuid": contents["request_uuid"], + "timestamp": int(time.time()), + "renderings": [] + } + try: + validator = jsonschema.Draft7Validator( + response_schema, resolver=resolver) + validator.validate(response) + except jsonschema.exceptions.ValidationError as error: + logging.error(error) + return jsonify("Invalid Preprocessor JSON format"), 500 + logging.debug("Sending response") + return response + + data = {"text": (preprocessors["ca.mcgill.a11y.image.preprocessor.text" + "-followup"]["response_brief"])} + + rendering = { + "type_id": "ca.mcgill.a11y.image.renderer.Text", + "description": "Response to a follow-up query", + "data": data + } + + try: + validator = jsonschema.Draft7Validator( + renderer_schema, resolver=resolver + ) + validator.validate(data) + except ValidationError as e: + logging.error(e) + logging.debug("Failed to validate the response renderer!") + return jsonify("Failed to validate the response renderer"), 500 + response = { + "request_uuid": contents["request_uuid"], + "timestamp": int(time.time()), + "renderings": [rendering] + } + try: + validator = jsonschema.Draft7Validator( + response_schema, resolver=resolver + ) + validator.validate(response) + except ValidationError as e: + logging.debug("Failed to generate a valid response") + logging.error(e) + return jsonify("Failed to generate a valid response"), 500 + logging.debug("Sending response") + return response + + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=80, debug=True) diff --git a/handlers/text-followup-handler/requirements.txt b/handlers/text-followup-handler/requirements.txt new file mode 100644 index 000000000..83ff7cdf5 --- /dev/null +++ b/handlers/text-followup-handler/requirements.txt @@ -0,0 +1,3 @@ +jsonschema==4.23.0 +Flask==3.0.3 +gunicorn \ No newline at end of file diff --git a/orchestrator/Dockerfile b/orchestrator/Dockerfile index 21c0f1cd1..fad1888c8 100644 --- a/orchestrator/Dockerfile +++ b/orchestrator/Dockerfile @@ -10,7 +10,7 @@ RUN npm run build && npm prune --production FROM node:alpine AS final -RUN apk add memcached supercronic +RUN apk add memcached supercronic curl # Set up for logging WORKDIR /var/log/IMAGE @@ -29,4 +29,7 @@ ENV NODE_ENV=production EXPOSE 8080 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:8080/health || exit 1 + CMD supercronic -quiet clean-cron & node dist/server.js diff --git a/orchestrator/README.md b/orchestrator/README.md index 1f2a1f58a..321826e48 100644 --- a/orchestrator/README.md +++ b/orchestrator/README.md @@ -85,7 +85,7 @@ This docker group ID changes from system to system and needs to be checked manua IMAGE uses Memcached as in-memory data store. Cache is implemented using [MemJS](https://www.npmjs.com/package/memjs). Following is the confugration to enable Cache for preprocessors: -- Cache size is configured in the docker-compose in the commad attribute under memcached service `command: -m 4096` implies cache size of 4GB. +- Cache size and max item size is configured in the docker-compose in the command attribute under memcached service `command: -m 4096 -I 5m` implies cache size of 4GB and maximum item size of 5MB. - Cache timeout is configured at the preprocessor level, with the label `ca.mcgill.a11y.image.cacheTimeout` . Label value is the timeout value in seconds. Timeout value of 0 indicates that Cache is disabled for a preprocessor. Missing `ca.mcgill.a11y.image.cacheTimeout` label on the preprocessor will default to timeout value of 0. diff --git a/orchestrator/src/server.ts b/orchestrator/src/server.ts index 5efe98c5a..7f6f067c6 100644 --- a/orchestrator/src/server.ts +++ b/orchestrator/src/server.ts @@ -167,23 +167,38 @@ async function runPreprocessorsParallel(data: Record, preproces data["preprocessors"] = {}; } let currentPriorityGroup: number | undefined = undefined; - let promises: Promise[] = []; // array to hold promises for preprocessor executions within the current priority group + const queue: (string | number)[][] = []; //Microservice queue for preprocessors and handlers + + + //function that dequeues everything in the queue at once, executes them and waits for them to finish processing + const processQueue = async (): Promise => { + try { + await Promise.all(queue.map(preprocessor => executePreprocessor(preprocessor, data))); + } catch (error) { + console.error(`One or more of the promises failed at priority group ${currentPriorityGroup}.`, error); + } + finally { //empty the queue + queue.length = 0; + } + }; for (const preprocessor of preprocessors) { - // check if priority group changes - if so, wait for the current promises to finish + //If the priority group changes, process the queue and move to the next group if (preprocessor[2] !== currentPriorityGroup) { - if (promises.length > 0) { - await Promise.all(promises); // wait for all preprocessors in the current group - promises = []; // reset promises for the new priority group + if (queue.length > 0) { + await processQueue(); //Process everything in the queue } currentPriorityGroup = Number(preprocessor[2]); console.debug(`Now on priority group ${currentPriorityGroup}`); } - // add the execution of the current preprocessor to the promises array - promises.push(executePreprocessor(preprocessor, data)); + + //Add the preprocessor to the queue + queue.push(preprocessor); } - if (promises.length > 0) { - await Promise.all(promises); // wait for remaining promises + + //Process any remaining items in the queue + if (queue.length > 0) { + await processQueue(); } return data; @@ -462,6 +477,11 @@ app.get("/authenticate/:uuid/:check", async (req, res) => { } }); +// Healthcheck endpoint +app.get('/health', (req, res) => { + res.status(200).json({ status: 'healthy', timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log(`Started server on port ${port}`); }); diff --git a/preprocessors/action-recognition/Dockerfile b/preprocessors/action-recognition/Dockerfile index c466bae65..65f541f2b 100644 --- a/preprocessors/action-recognition/Dockerfile +++ b/preprocessors/action-recognition/Dockerfile @@ -7,7 +7,7 @@ ENV PYTHONUNBUFFERED=TRUE ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && \ - apt-get install --no-install-recommends wget -y && \ + apt-get install --no-install-recommends wget curl -y && \ rm -rf /var/lib/apt/lists/* && \ adduser --disabled-password python @@ -26,4 +26,7 @@ COPY --chown=python:python /preprocessors/action-recognition /app EXPOSE 5000 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + ENTRYPOINT [ "gunicorn", "action-recognition:app", "-b", "0.0.0.0:5000","--capture-output" ] \ No newline at end of file diff --git a/preprocessors/action-recognition/action-recognition.py b/preprocessors/action-recognition/action-recognition.py index 37d27ff73..29c27bab8 100644 --- a/preprocessors/action-recognition/action-recognition.py +++ b/preprocessors/action-recognition/action-recognition.py @@ -27,6 +27,7 @@ import torch from PIL import Image from io import BytesIO +from datetime import datetime from utils import detect, Classifier @@ -188,5 +189,16 @@ def run(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/autour/Dockerfile b/preprocessors/autour/Dockerfile index b92c271b6..5c55891e9 100644 --- a/preprocessors/autour/Dockerfile +++ b/preprocessors/autour/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.11-alpine3.20 -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -15,4 +16,7 @@ EXPOSE 5000 ENV FLASK_APP=autour.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "autour:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/autour/autour.py b/preprocessors/autour/autour.py index 9473d5792..ac5518f52 100644 --- a/preprocessors/autour/autour.py +++ b/preprocessors/autour/autour.py @@ -21,6 +21,7 @@ import jsonschema import requests from flask import Flask, request, jsonify +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -236,5 +237,16 @@ def check_google_response(place_response): return True +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/celebrity-detector/Dockerfile b/preprocessors/celebrity-detector/Dockerfile index 9f1299fdb..ea467b85b 100644 --- a/preprocessors/celebrity-detector/Dockerfile +++ b/preprocessors/celebrity-detector/Dockerfile @@ -4,7 +4,7 @@ RUN adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" -RUN apt-get update && apt-get install -y python3-opencv wget && rm -rf /var/lib/apt/lists/* +RUN apt-get update && apt-get install -y python3-opencv wget curl && rm -rf /var/lib/apt/lists/* COPY /preprocessors/celebrity-detector/requirements.txt /app/requirements.txt @@ -17,4 +17,7 @@ COPY /preprocessors/celebrity-detector /app EXPOSE 5000 ENV FLASK_APP=celebrity-detector.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "celebrity-detector:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/celebrity-detector/celebrity-detector.py b/preprocessors/celebrity-detector/celebrity-detector.py index 629ceb297..4d04242b5 100644 --- a/preprocessors/celebrity-detector/celebrity-detector.py +++ b/preprocessors/celebrity-detector/celebrity-detector.py @@ -27,6 +27,7 @@ from flask import Flask, request, jsonify import cv2 import numpy as np +from datetime import datetime app = Flask(__name__) @@ -219,5 +220,16 @@ def categorise(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5001, debug=True) diff --git a/preprocessors/clothes-detector/Dockerfile b/preprocessors/clothes-detector/Dockerfile index 170315854..ae8c19633 100644 --- a/preprocessors/clothes-detector/Dockerfile +++ b/preprocessors/clothes-detector/Dockerfile @@ -15,8 +15,7 @@ ENV PATH="/home/python/.local/bin:${PATH}" #install system dependencies in one layer RUN apt-get update && \ - apt-get install -y \ - python3-opencv wget build-essential \ + apt-get install -y curl python3-opencv wget build-essential \ libfreetype6-dev libpng-dev libharfbuzz-dev \ libcairo2-dev python3-dev p7zip p7zip-full \ unace zip unzip && \ diff --git a/preprocessors/clothes-detector/clothes.py b/preprocessors/clothes-detector/clothes.py index 63cdde022..b6777681e 100644 --- a/preprocessors/clothes-detector/clothes.py +++ b/preprocessors/clothes-detector/clothes.py @@ -34,6 +34,7 @@ from colorthief import ColorThief from yolo.utils.utils import load_classes from predictors.YOLOv3 import YOLOv3Predictor +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.NOTSET) @@ -195,5 +196,16 @@ def categorise(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5001, debug=True) diff --git a/preprocessors/collage-detector/Dockerfile b/preprocessors/collage-detector/Dockerfile index aba2367b7..36a66603d 100644 --- a/preprocessors/collage-detector/Dockerfile +++ b/preprocessors/collage-detector/Dockerfile @@ -4,7 +4,7 @@ RUN adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" -RUN apt-get update && apt-get install -y python3-opencv wget && apt-get clean && rm -rf /var/lib/apt/lists/* +RUN apt-get update && apt-get install -y python3-opencv wget curl && apt-get clean && rm -rf /var/lib/apt/lists/* COPY /preprocessors/collage-detector/requirements.txt /app/requirements.txt RUN pip3 install --upgrade pip && \ @@ -16,4 +16,7 @@ COPY /preprocessors/collage-detector/ /app EXPOSE 5000 ENV FLASK_APP=detect.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "detect:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/preprocessors/collage-detector/detect.py b/preprocessors/collage-detector/detect.py index 1e0c3280e..01e1d650f 100644 --- a/preprocessors/collage-detector/detect.py +++ b/preprocessors/collage-detector/detect.py @@ -23,6 +23,7 @@ import jsonschema import logging import time +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -96,6 +97,17 @@ def detect_collage(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) detect_collage() diff --git a/preprocessors/content-categoriser/Dockerfile b/preprocessors/content-categoriser/Dockerfile index 1553b63d1..25038fc60 100644 --- a/preprocessors/content-categoriser/Dockerfile +++ b/preprocessors/content-categoriser/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.11-alpine3.20 -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -15,4 +16,7 @@ COPY /preprocessors/content-categoriser/ /app EXPOSE 5000 ENV FLASK_APP=categoriser.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "categoriser:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/content-categoriser/categoriser.py b/preprocessors/content-categoriser/categoriser.py index b63aa6258..cf0d2f207 100644 --- a/preprocessors/content-categoriser/categoriser.py +++ b/preprocessors/content-categoriser/categoriser.py @@ -21,6 +21,7 @@ import jsonschema import logging import os +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -90,13 +91,17 @@ def categorise(): "Which of the following categories best " \ "describes this image, selecting from this enum: " possible_categories = "photograph, chart, text, other" + # override with prompt from environment variable only if it exists + prompt = os.getenv('CONTENT_CATEGORISER_PROMPT_OVERRIDE', prompt) + prompt += "[" + possible_categories + "]" + logging.debug("prompt: " + prompt) request_data = { "model": ollama_model, - "prompt": prompt + "[" + possible_categories + "]", + "prompt": prompt, "images": [graphic_b64], "stream": "false", - # TODO: figure out if "format": json, should actually work + "format": "json", "temperature": 0.0, "keep_alive": -1 # keep model loaded in memory indefinitely } @@ -174,6 +179,17 @@ def categorise(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) categorise() diff --git a/preprocessors/depth-map-gen/Dockerfile b/preprocessors/depth-map-gen/Dockerfile index a557cf206..1c9cb300f 100644 --- a/preprocessors/depth-map-gen/Dockerfile +++ b/preprocessors/depth-map-gen/Dockerfile @@ -4,7 +4,7 @@ ARG CUDNN="9" FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-runtime #system dependencies -RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-opencv wget git && rm -rf /var/lib/apt/lists/* +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-opencv wget git curl && rm -rf /var/lib/apt/lists/* #user/environment setup RUN adduser --disabled-password python @@ -28,6 +28,8 @@ RUN git clone https://github.com/aim-uofa/AdelaiDepth && \ COPY /preprocessors/depth-map-gen/ /app EXPOSE 5000 - USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "depth-map-generator:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/depth-map-gen/depth-map-generator.py b/preprocessors/depth-map-gen/depth-map-generator.py index bc92ddbbf..df709af1d 100644 --- a/preprocessors/depth-map-gen/depth-map-generator.py +++ b/preprocessors/depth-map-gen/depth-map-generator.py @@ -30,6 +30,7 @@ import logging import base64 from lib.multi_depth_model_woauxi import RelDepthModel +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -195,6 +196,17 @@ def depthgenerator(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == '__main__': app.run(host='0.0.0.0', port=5000, debug=True) depthgenerator() diff --git a/preprocessors/graphic-caption/Dockerfile b/preprocessors/graphic-caption/Dockerfile index f4751bab9..bfab05fc6 100644 --- a/preprocessors/graphic-caption/Dockerfile +++ b/preprocessors/graphic-caption/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.11-alpine3.20 -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -14,4 +15,7 @@ COPY /preprocessors/graphic-caption/ /app EXPOSE 5000 ENV FLASK_APP=caption.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "caption:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/graphic-caption/caption.py b/preprocessors/graphic-caption/caption.py index 75d248960..dcd6b77b5 100644 --- a/preprocessors/graphic-caption/caption.py +++ b/preprocessors/graphic-caption/caption.py @@ -21,6 +21,7 @@ import jsonschema import logging import os +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -82,13 +83,16 @@ def categorise(): logging.debug("OLLAMA_API_KEY looks properly formatted: " + api_key[:3] + "[redacted]") else: - logging.warn("OLLAMA_API_KEY usually starts with sk-, " - "but this one starts with: " + api_key[:3]) + logging.warning("OLLAMA_API_KEY usually starts with sk-, " + "but this one starts with: " + api_key[:3]) prompt = "I am blind, so I cannot see this image. " \ "Tell me the most important aspects of it, including " \ "style, content, and the most significant aspect of the image." \ "Answer with maximum one sentence. " + prompt = os.getenv('GRAPHIC_CAPTION_PROMPT_OVERRIDE', prompt) + logging.debug("prompt: " + prompt) + request_data = { "model": ollama_model, "prompt": prompt, @@ -149,6 +153,17 @@ def categorise(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) categorise() diff --git a/preprocessors/graphic-tagger/Dockerfile b/preprocessors/graphic-tagger/Dockerfile index 673485e8f..c4ba0500f 100644 --- a/preprocessors/graphic-tagger/Dockerfile +++ b/preprocessors/graphic-tagger/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.11-alpine3.20 -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" ENV FLASK_APP=azure_api.py @@ -14,4 +15,7 @@ COPY /preprocessors/graphic-tagger /app EXPOSE 5000 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "azure_api:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/graphic-tagger/azure_api.py b/preprocessors/graphic-tagger/azure_api.py index e987628be..7fdb86219 100644 --- a/preprocessors/graphic-tagger/azure_api.py +++ b/preprocessors/graphic-tagger/azure_api.py @@ -26,6 +26,7 @@ import base64 import os from flask import Flask, request, jsonify +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -191,5 +192,16 @@ def categorise(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/grouping/Dockerfile b/preprocessors/grouping/Dockerfile index 8108d089c..c27846376 100644 --- a/preprocessors/grouping/Dockerfile +++ b/preprocessors/grouping/Dockerfile @@ -2,7 +2,6 @@ FROM python:3.11-alpine3.20 RUN apk add --no-cache curl && \ adduser --disabled-password python - WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" diff --git a/preprocessors/grouping/grouping.py b/preprocessors/grouping/grouping.py index cb1ea01ba..a1f2c9c91 100644 --- a/preprocessors/grouping/grouping.py +++ b/preprocessors/grouping/grouping.py @@ -22,6 +22,7 @@ import collections from math import sqrt from operator import itemgetter +from datetime import datetime app = Flask(__name__) @@ -136,12 +137,15 @@ def readImage(): return response -@app.route('/health', methods=['GET']) +@app.route("/health", methods=["GET"]) def health(): """ - health check endpoint to verify if the service is up. + Health check endpoint to verify if the service is running """ - return jsonify({"status": "healthy"}), 200 + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 if __name__ == "__main__": diff --git a/preprocessors/hello-preprocessor/Dockerfile b/preprocessors/hello-preprocessor/Dockerfile index 5b69bdedf..a07f8edd1 100644 --- a/preprocessors/hello-preprocessor/Dockerfile +++ b/preprocessors/hello-preprocessor/Dockerfile @@ -1,5 +1,7 @@ FROM node:alpine +RUN apk add --no-cache curl + WORKDIR /usr/src/app # Apparently splittig this up is good for layers @@ -11,8 +13,9 @@ COPY /preprocessors/hello-preprocessor/ . RUN npm run build ENV NODE_ENV=production - EXPOSE 8080 - USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:8080/health || exit 1 + CMD [ "node", "dist/server.js" ] diff --git a/preprocessors/hello-preprocessor/src/server.ts b/preprocessors/hello-preprocessor/src/server.ts index 5e40788cd..2103a0c95 100644 --- a/preprocessors/hello-preprocessor/src/server.ts +++ b/preprocessors/hello-preprocessor/src/server.ts @@ -53,6 +53,11 @@ app.post("/preprocessor", (req, res) => { } }); +// Healthcheck endpoint +app.get('/health', (req, res) => { + res.status(200).json({ status: 'healthy', timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log(`Started server on port ${port}`); }); diff --git a/preprocessors/line-charts/Dockerfile b/preprocessors/line-charts/Dockerfile index 7ae3128fb..33502e65a 100644 --- a/preprocessors/line-charts/Dockerfile +++ b/preprocessors/line-charts/Dockerfile @@ -1,6 +1,8 @@ FROM python:3.11-alpine3.20 -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python + WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" ENV FLASK_APP=charts.py @@ -14,6 +16,8 @@ COPY /schemas /app/schemas COPY /preprocessors/line-charts/ /app EXPOSE 5000 - USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "charts:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/line-charts/charts.py b/preprocessors/line-charts/charts.py index 082d0d80c..e0d26c561 100644 --- a/preprocessors/line-charts/charts.py +++ b/preprocessors/line-charts/charts.py @@ -23,6 +23,7 @@ from charts_utils import getLowerPointsOnLeft, getHigherPointsOnLeft from charts_utils import getLowerPointsOnRight, getHigherPointsOnRight +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -129,5 +130,16 @@ def get_chart_info(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/mmsemseg/segment.py b/preprocessors/mmsemseg/segment.py index 2f272f647..8290193c0 100644 --- a/preprocessors/mmsemseg/segment.py +++ b/preprocessors/mmsemseg/segment.py @@ -34,6 +34,7 @@ from time import time import logging +from datetime import datetime # configuration and checkpoint files BEIT_CONFIG = "/app/config/upernet_beit-base_8x2_640x640_160k_ade20k.py" @@ -241,12 +242,15 @@ def segment(): return response -@app.route('/health', methods=['GET']) +@app.route("/health", methods=["GET"]) def health(): """ - health check endpoint to verify if the service is up. + Health check endpoint to verify if the service is running """ - return jsonify({"status": "healthy"}), 200 + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 if __name__ == "__main__": diff --git a/preprocessors/ner/Dockerfile b/preprocessors/ner/Dockerfile index 29b4f7404..7413b4b54 100644 --- a/preprocessors/ner/Dockerfile +++ b/preprocessors/ner/Dockerfile @@ -8,6 +8,7 @@ RUN apt-get update && \ python3-pip \ gcc \ git \ + curl \ && apt-get clean && \ rm -rf /var/lib/apt/lists/* @@ -26,4 +27,7 @@ EXPOSE 5000 ENV FLASK_APP=ner.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "ner:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/ner/ner.py b/preprocessors/ner/ner.py index 3683b4614..b04cc2bef 100644 --- a/preprocessors/ner/ner.py +++ b/preprocessors/ner/ner.py @@ -24,7 +24,7 @@ import jsonschema from bs4 import BeautifulSoup from flask import Flask, request, jsonify - +from datetime import datetime import nltk import clipscore @@ -255,6 +255,17 @@ def main(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == '__main__': app.run(host='0.0.0.0', port=5000, debug=True) main() diff --git a/preprocessors/nominatim/Dockerfile b/preprocessors/nominatim/Dockerfile index ed296ba73..e8ac519df 100644 --- a/preprocessors/nominatim/Dockerfile +++ b/preprocessors/nominatim/Dockerfile @@ -9,6 +9,8 @@ RUN npm run build && npm prune --production FROM node:alpine as final +RUN apk add --no-cache curl + WORKDIR /usr/src/app COPY --from=builder --chown=node:node /usr/src/app/dist ./dist COPY --from=builder --chown=node:node /usr/src/app/node_modules ./node_modules @@ -17,4 +19,7 @@ COPY --from=builder --chown=node:node /usr/src/app/package*.json ./ ENV NODE_ENV=production EXPOSE 80 USER node + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "node", "." ] diff --git a/preprocessors/nominatim/src/server.ts b/preprocessors/nominatim/src/server.ts index ff3b4618a..435ca5f1e 100644 --- a/preprocessors/nominatim/src/server.ts +++ b/preprocessors/nominatim/src/server.ts @@ -16,8 +16,7 @@ */ import Ajv from "ajv"; -import express from "express"; - +import express, { Request, Response } from "express"; import querySchemaJSON from "./schemas/request.schema.json"; import preprocessorResponseJSON from "./schemas/preprocessor-response.schema.json"; import definitionsJSON from "./schemas/definitions.json"; @@ -89,6 +88,10 @@ app.post("/preprocessor", async (req, res) => { } }); +app.get("/health", (req, res) => { + res.status(200).json({ status: "healthy", timestamp: new Date().toISOString() }); +}); + app.listen(port, () => { console.log("Started server on port " + port); }); diff --git a/preprocessors/object-depth-calculator/Dockerfile b/preprocessors/object-depth-calculator/Dockerfile index 9de94e700..78e6ee600 100644 --- a/preprocessors/object-depth-calculator/Dockerfile +++ b/preprocessors/object-depth-calculator/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.11-slim -RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-opencv wget git && rm -rf /var/lib/apt/lists/* +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-opencv wget git curl && rm -rf /var/lib/apt/lists/* RUN adduser --disabled-password python WORKDIR /app @@ -16,4 +16,7 @@ EXPOSE 5000 ENV FLASK_APP=object-depth-calculator.py ENV TORCH_HOME=/app USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "object-depth-calculator:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/object-depth-calculator/object-depth-calculator.py b/preprocessors/object-depth-calculator/object-depth-calculator.py index 7ff1a12a4..dd75486d1 100644 --- a/preprocessors/object-depth-calculator/object-depth-calculator.py +++ b/preprocessors/object-depth-calculator/object-depth-calculator.py @@ -22,6 +22,7 @@ import jsonschema import base64 import logging +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -161,6 +162,17 @@ def objectdepth(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == '__main__': app.run(host='0.0.0.0', port=5000, debug=True) objectdepth() diff --git a/preprocessors/object-detection-azure/Dockerfile b/preprocessors/object-detection-azure/Dockerfile index 869632c98..fdfe3e537 100644 --- a/preprocessors/object-detection-azure/Dockerfile +++ b/preprocessors/object-detection-azure/Dockerfile @@ -1,7 +1,9 @@ #will come back to this and sorting FROM python:3.9-alpine -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python + WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -16,4 +18,7 @@ COPY /preprocessors/object-detection-azure /app EXPOSE 5000 ENV FLASK_APP=objdetect.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "objdetect:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/object-detection-azure/objdetect.py b/preprocessors/object-detection-azure/objdetect.py index 19894d837..9f864a6f0 100644 --- a/preprocessors/object-detection-azure/objdetect.py +++ b/preprocessors/object-detection-azure/objdetect.py @@ -23,6 +23,7 @@ import base64 import os from flask import Flask, request, jsonify +from datetime import datetime app = Flask(__name__) logging.basicConfig(level=logging.DEBUG) @@ -196,5 +197,16 @@ def categorise(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/ocr/ocr.py b/preprocessors/ocr/ocr.py index 1e0111bf2..4e48ff3b9 100644 --- a/preprocessors/ocr/ocr.py +++ b/preprocessors/ocr/ocr.py @@ -23,6 +23,7 @@ import io import base64 from flask import Flask, request, jsonify +from datetime import datetime from ocr_utils import ( process_azure_read, process_azure_ocr, @@ -151,12 +152,15 @@ def analyze_image(source, width, height, cld_srv_optn): return process_google_vision(image_b64, width, height) -@app.route('/health', methods=['GET']) +@app.route("/health", methods=["GET"]) def health(): """ - health check endpoint to verify if the service is up. + Health check endpoint to verify if the service is running """ - return jsonify({"status": "healthy"}), 200 + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 if __name__ == "__main__": diff --git a/preprocessors/openstreetmap/Dockerfile b/preprocessors/openstreetmap/Dockerfile index 3cdd5f690..8acf04720 100644 --- a/preprocessors/openstreetmap/Dockerfile +++ b/preprocessors/openstreetmap/Dockerfile @@ -1,8 +1,7 @@ -#did not test this change! FROM python:3.11-alpine3.20 -RUN adduser --disabled-password python - +RUN apk add --no-cache curl && \ + adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -17,4 +16,7 @@ EXPOSE 5000 ENV FLASK_APP=main.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "main:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/openstreetmap/main.py b/preprocessors/openstreetmap/main.py index d97309fe1..aac6aa615 100644 --- a/preprocessors/openstreetmap/main.py +++ b/preprocessors/openstreetmap/main.py @@ -2,6 +2,7 @@ import jsonschema import json import logging +from datetime import datetime from osm_service import ( get_streets, get_timestamp, @@ -167,5 +168,16 @@ def get_map_data(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/sorting/Dockerfile b/preprocessors/sorting/Dockerfile index 35970a2ec..66e49bdaa 100644 --- a/preprocessors/sorting/Dockerfile +++ b/preprocessors/sorting/Dockerfile @@ -1,7 +1,8 @@ #will come back to this and object detection FROM python:3.8-alpine -RUN adduser --disabled-password python +RUN apk add --no-cache curl && \ + adduser --disabled-password python WORKDIR /app ENV PATH="/home/python/.local/bin:${PATH}" @@ -16,4 +17,7 @@ EXPOSE 5000 ENV FLASK_APP=sorting.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "sorting:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/sorting/sorting.py b/preprocessors/sorting/sorting.py index 0ff97e9fa..fb63b41fb 100644 --- a/preprocessors/sorting/sorting.py +++ b/preprocessors/sorting/sorting.py @@ -20,6 +20,7 @@ import jsonschema import logging from math import sqrt +from datetime import datetime app = Flask(__name__) @@ -129,5 +130,16 @@ def readImage(): return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/preprocessors/text-followup/Dockerfile b/preprocessors/text-followup/Dockerfile new file mode 100644 index 000000000..d5fdaf878 --- /dev/null +++ b/preprocessors/text-followup/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.11-alpine3.20 + +RUN apk add --no-cache curl && \ + adduser --disabled-password python + +WORKDIR /app +ENV PATH="/home/python/.local/bin:${PATH}" + +COPY /preprocessors/text-followup/requirements.txt /app/requirements.txt + +RUN pip3 install --upgrade pip && \ + pip3 install -r /app/requirements.txt + +COPY /schemas /app/schemas +COPY /preprocessors/text-followup/ /app + +EXPOSE 5000 +USER python +ENV FLASK_APP=text-followup.py + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + +CMD [ "gunicorn", "text-followup:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] diff --git a/preprocessors/text-followup/README.md b/preprocessors/text-followup/README.md new file mode 100644 index 000000000..9c392e52d --- /dev/null +++ b/preprocessors/text-followup/README.md @@ -0,0 +1,50 @@ +Beta quality: Useful enough for testing by end-users. + +This preprocessor generates a text-only response +to a followup query that contains the original graphic +and a question posed by the user. +A brief and a full response are included, and together +form the full response. + +It uses an LLM model running via ollama fronted by open-webui. +There are several mandatory environment variables you must set. +Example ollama.env file: + +``` +OLLAMA_URL=https://ollama.myserver.com/ollama/api/generate +OLLAMA_API_KEY=sk-[YOUR_OLLAMA_SECRET KEY] +OLLAMA_MODEL=llava:latest +``` +Note these are not unique to this preprocessor. Due to +GPU memory limitations, we assume that all preprocessors +will use the same ollama model, to prevent them swapping +in and out of memory. + +Logging personal information should only be done on test +servers. The environment variable LOG_PII can be set +to allow this information to be logged. +In addition, you can override the prompt used by +the LLM. Example: + +``` + text-followup: + ... + environment: + LOG_PII: "true" + TEXT_FOLLOWUP_PROMPT_OVERRIDE: |- + The prompt I really want to use is... + Here is my request: + [user followup query will be added here] +``` + +## Libraries Used + +| Library | Link | Distribution License | +| ------------- | ------------- | -------------| +| Requests | [Link](https://pypi.org/project/requests/) | Apache 2.0| +| Flask | [Link](https://pypi.org/project/Flask/) | BSD-3-Clause License| +| Jsonschema | [Link](https://pypi.org/project/jsonschema/) | MIT License| +| Werkzeug | [Link](https://pypi.org/project/Werkzeug/) | BSD-3 | +| Gunicorn | [Link](https://github.com/benoitc/gunicorn) | MIT License(MIT) | + +The versions for each of these libraries is specified `requirements.txt` diff --git a/preprocessors/text-followup/requirements.txt b/preprocessors/text-followup/requirements.txt new file mode 100644 index 000000000..242f77fb5 --- /dev/null +++ b/preprocessors/text-followup/requirements.txt @@ -0,0 +1,6 @@ +Flask==3.0.3 +flask_api==3.1 +jsonschema==3.2.0 +Werkzeug==3.1.0 +gunicorn==23.0.0 +requests==2.32.3 diff --git a/preprocessors/text-followup/text-followup.py b/preprocessors/text-followup/text-followup.py new file mode 100644 index 000000000..879f275b7 --- /dev/null +++ b/preprocessors/text-followup/text-followup.py @@ -0,0 +1,230 @@ +# Copyright (c) 2021 IMAGE Project, Shared Reality Lab, McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# You should have received a copy of the GNU Affero General Public License +# and our Additional Terms along with this program. +# If not, see +# . + +from flask import Flask, request, jsonify +import requests +import json +import time +import jsonschema +import logging +import os +from datetime import datetime + +app = Flask(__name__) +logging.basicConfig(level=logging.DEBUG) + + +@app.route("/preprocessor", methods=['POST', ]) +def followup(): + logging.debug("Received request") + + # load the schemas and verify incoming data + with open('./schemas/preprocessors/text-followup.schema.json') \ + as jsonfile: + data_schema = json.load(jsonfile) + with open('./schemas/preprocessor-response.schema.json') \ + as jsonfile: + schema = json.load(jsonfile) + with open('./schemas/definitions.json') as jsonfile: + definitionSchema = json.load(jsonfile) + with open('./schemas/request.schema.json') as jsonfile: + first_schema = json.load(jsonfile) + # Following 6 lines of code from + # https://stackoverflow.com/questions/42159346 + schema_store = { + schema['$id']: schema, + definitionSchema['$id']: definitionSchema + } + resolver = jsonschema.RefResolver.from_schema( + schema, store=schema_store) + content = request.get_json() + try: + validator = jsonschema.Draft7Validator(first_schema, resolver=resolver) + validator.validate(content) + except jsonschema.exceptions.ValidationError as e: + logging.error(e) + return jsonify("Invalid Preprocessor JSON format"), 400 + + # check we received a graphic (e.g., not a map or chart request) + if "graphic" not in content: + logging.info("Request is not a graphic. Skipping...") + return "", 204 # No content + + request_uuid = content["request_uuid"] + timestamp = time.time() + name = "ca.mcgill.a11y.image.preprocessor.text-followup" + + # debugging this preprocessor is really difficult without seeing what + # ollama is returning, but this can contain PII. Until we have a safe + # way of logging PII, using manually set LOG_PII env variable + # to say whether or not we should go ahead and log potential PII + log_pii = os.getenv('LOG_PII', "false").lower() == "true" + if log_pii: + logging.warning("LOG_PII is True: potential PII will be logged!") + + # convert the uri to processable image + # source.split code referred from + # https://gist.github.com/daino3/b671b2d171b3948692887e4c484caf47 + source = content["graphic"] + graphic_b64 = source.split(",")[1] + + # TODO: crop graphic if the user has specified a region of interest + # TODO: add previous request history before new prompt + + # default prompt, which can be overriden by env var just after + general_prompt = (""" + I am blind so I cannot see this image. + Answer in a single JSON object containing two keys. + The first key is "response_brief" and is a single sentence + that can stand on its own that directly answers the specific + request at the end of this prompt. + The second key is "response_full" and provides maximum + three sentences of additional detail, + without repeating the information in the first key. + If there is no more detail you can provide, + omit the second key instead of having an empty key. + Remember to answer only in JSON, or I will be very angry! + Do not put anything before or after the JSON, + and make sure the entire response is only a single JSON block, + with both keys in the same JSON object. + Here is an example of the output JSON in the format you + are REQUIRED to follow: + { + "response_brief": "One sentence response to the user request", + "response_full": "Further details." + } + Note that the first character of output MUST be "{". + Remove all whitespace before and after the JSON. + Here is my request: + """) + # override with prompt from environment variable only if it exists + general_prompt = os.getenv('TEXT_FOLLOWUP_PROMPT_OVERRIDE', general_prompt) + user_prompt = content["followup"]["query"] + prompt = general_prompt + ' ' + user_prompt + if log_pii: + logging.debug("user followup prompt: " + prompt) + else: + logging.debug("user followup prompt: {general_prompt} [redacted]") + + # prepare ollama request + api_url = os.environ['OLLAMA_URL'] + api_key = os.environ['OLLAMA_API_KEY'] + ollama_model = os.environ['OLLAMA_MODEL'] + + logging.debug("OLLAMA_URL " + api_url) + if api_key.startswith("sk-"): + logging.debug("OLLAMA_API_KEY looks properly formatted: " + + "sk-[redacted]") + else: + logging.warning("OLLAMA_API_KEY does not start with sk-") + + request_data = { + "model": ollama_model, + "prompt": prompt, + "images": [graphic_b64], + "stream": False, + "temperature": 0.0, + "format": "json", + "keep_alive": -1 # keep model loaded in memory indefinitely + } + logging.debug("serializing json from request_data dictionary") + request_data_json = json.dumps(request_data) + + request_headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {api_key}" + } + + logging.debug("Posting request to ollama model " + ollama_model) + response = requests.post(api_url, headers=request_headers, + data=request_data_json) + logging.debug("ollama request response code: " + str(response.status_code)) + + if response.status_code == 200: + ollama_error_msg = None + try: + # strip() at end since llama often puts a newline before json + response_text = json.loads(response.text)['response'].strip() + if log_pii: + logging.debug("raw ollama response: " + response_text) + followup_response_json = json.loads(response_text) + except json.JSONDecodeError: + ollama_error_msg = "raw response does not look like json" + except KeyError: + ollama_error_msg = "no response tag found in returned json" + except TypeError: # have seen this when we just get a string back + # TODO: investigate what is actually happening here! + ollama_error_msg = "unknown error decoding json. investigate!" + finally: + if ollama_error_msg is not None: + logging.error(ollama_error_msg + " returning 204") + return jsonify("Invalid LLM results"), 204 + else: + if log_pii: + logging.error("Error {response.status_code}: {response.text}") + else: + logging.error("Error {response.status_code}: " + "[response text redacted]") + return jsonify("Invalid response from ollama"), 204 + + # check if ollama returned valid json that follows schema + try: + validator = jsonschema.Draft7Validator(data_schema) + validator.validate(followup_response_json) + except jsonschema.exceptions.ValidationError as e: + logging.error(f"JSON schema validation fail: {e.validator} {e.schema}") + if log_pii: + logging.debug(e) + return jsonify("Invalid Preprocessor JSON format"), 500 + + # create full response & check meets overall preprocessor response schema + response = { + "request_uuid": request_uuid, + "timestamp": int(timestamp), + "name": name, + "data": followup_response_json + } + try: + validator = jsonschema.Draft7Validator(schema, resolver=resolver) + validator.validate(response) + except jsonschema.exceptions.ValidationError as e: + logging.error(f"JSON schema validation fail: {e.validator} {e.schema}") + if log_pii: + logging.debug(e) # print full error only in debug, due to PII + return jsonify("Invalid Preprocessor JSON format"), 500 + + # all done; return to orchestrator + logging.debug("full response length: " + str(len(response))) + if log_pii: + logging.debug(response) + + return response + + +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + +if __name__ == "__main__": + app.run(host='0.0.0.0', port=5000, debug=True) + followup() diff --git a/preprocessors/yolov8/Dockerfile b/preprocessors/yolov8/Dockerfile index f24b24f72..921dc6574 100644 --- a/preprocessors/yolov8/Dockerfile +++ b/preprocessors/yolov8/Dockerfile @@ -6,7 +6,7 @@ ENV PATH="/usr/src/app/.local/bin:${PATH}" RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ - wget && \ + wget curl && \ rm -rf /var/lib/apt/lists/* COPY /preprocessors/yolov8/requirements.txt /usr/src/app/requirements.txt @@ -24,4 +24,7 @@ EXPOSE 5000 ENV FLASK_APP=detect.py USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:5000/health || exit 1 + CMD [ "gunicorn", "detect:app", "-b", "0.0.0.0:5000", "--capture-output", "--log-level=debug" ] \ No newline at end of file diff --git a/preprocessors/yolov8/detect.py b/preprocessors/yolov8/detect.py index 760564b35..ae2dff431 100644 --- a/preprocessors/yolov8/detect.py +++ b/preprocessors/yolov8/detect.py @@ -28,6 +28,7 @@ import logging import os +from datetime import datetime from ultralytics.nn.tasks import attempt_load_weights from ultralytics.yolo.utils import plt_settings from ultralytics.yolo.utils.torch_utils import select_device @@ -374,6 +375,17 @@ def run(weights='yolov8x.pt', return response +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + def main(): run() diff --git a/scripts/README.md b/scripts/README.md new file mode 100644 index 000000000..3d71f8501 --- /dev/null +++ b/scripts/README.md @@ -0,0 +1,4 @@ +This directory contains convenience scripts for monitoring, testing, and maintaining an IMAGE server. +Even though some of them may assume McGill-specific paths and tools, we include them in the repository since we expect they will serve as useful examples even though they may require modification for your specific environment. + +Documentation can be found as comments within each script. \ No newline at end of file diff --git a/scripts/bsu b/scripts/bsu new file mode 100755 index 000000000..8a6e4ded1 --- /dev/null +++ b/scripts/bsu @@ -0,0 +1,12 @@ +#!/usr/bin/sh + +# usage: bsu [image-name] +# build, stop, up -d an image in the docker-compose. +# useful when making changes to preprocessors and handlers, +# and wanting to bring up the new version to test it out. +# +# NOTE: Assumes name of image and container are the same. +# +# maintainer: jeffbl + +docker compose build $1 && docker compose stop $1 && docker compose up -d $1 \ No newline at end of file diff --git a/scripts/gpu_memory_docker b/scripts/gpu_memory_docker new file mode 100755 index 000000000..33dd2814b --- /dev/null +++ b/scripts/gpu_memory_docker @@ -0,0 +1,18 @@ +#!/bin/bash + +# Shows amount of GPU memory in use for each docker container across all GPUs +# +# maintainer: jeffbl +# +# TODO: Don't use the same basic command pipe twice, consolidate into something more elegant. +# TODO: Make this less fragile and less of a sed salad. + +# show overall nvidia gpu stats, which include which GPU each container is using +nvidia-smi + +# list each docker containers with PID and memory use +# TODO: incorporate GPU index for each (currently have to look at nvidia-smi above) +paste \ + <(nvidia-smi | sed '1,/|\ Processes/d' | grep -vE "Xorg|gnome" | grep '[0-9]' | awk '{print $5, $8}') \ + <(nvidia-smi | sed '1,/|\ Processes/d' | grep -vE "Xorg|gnome" | grep '[0-9]' | awk '{print $5}' | xargs -I {} sh -c 'cat /proc/{}/cgroup | cut -c 25-36' | xargs -I {} sh -c 'docker ps | grep {}') + diff --git a/scripts/image-summary b/scripts/image-summary new file mode 100755 index 000000000..79ed1ddab --- /dev/null +++ b/scripts/image-summary @@ -0,0 +1,71 @@ +#!/bin/bash +# USAGE image-summary orchestrator_name +# +# Lists preprocessors by run group (priority) and all handlers +# including git tag (e.g., unstable) + +set -e + +if [ $# -ne 1 ]; then + echo "One argument required" + echo "USAGE: image-summary orchestrator_name" + exit 1 +fi + +orchestrator_name=$1 + +# Check for commands +if ! command -v docker > /dev/null; then + echo "Docker is not installed or not on the path!" + exit 1 +fi +if ! command -v jq > /dev/null; then + echo "jq is not installed or not on the path!" + exit 1 +fi + +# Determine networks connected to the orchestrator container +orchestrator_networks=($(docker inspect $orchestrator_name | jq '.[0].NetworkSettings.Networks | to_entries | map(.value.NetworkID) | @sh' | tr -d \'\")) +handlers=() +preprocessors=() +# For all networks, get the connected, running preprocessors and handlers +for network in "${orchestrator_networks[@]}"; do + for id in "$(docker ps -f network=$network -f status=running -f label=ca.mcgill.a11y.image.preprocessor --format '{{.Label "ca.mcgill.a11y.image.preprocessor"}},{{.Names}} ({{.ID}}) using {{.Image}}')"; do + preprocessors+=("$id") + done + for id in "$(docker ps -f network=$network -f status=running -f label=ca.mcgill.a11y.image.handler=enable --format '{{.Names}} ({{.ID}}) using {{.Image}}')"; do + handlers+=("$id") + done +done + +# Order preprocessors by priority group +IFS=$'\n' sorted=($(sort <<<"${preprocessors[*]}")) +handlers=($(sort <<<"${handlers[*]}")) +unset IFS + +# Display information +group="" +echo "Containers connected to orchestrator \"$1\"" +echo "****************************************************" +echo "Preprocessors:" + + +for preprocessor in "${sorted[@]}"; do + IFS=',' + read -rasplit<<< "$preprocessor" + unset IFS + + if [[ $group -ne ${split[0]} ]]; then + group=${split[0]} + printf "\tPriority Group "$group":\n" + fi + + printf "\t\t%s\n" "${split[1]}" +done + +echo "****************************************************" +echo "Handlers:" + +for handler in "${handlers[@]}"; do + printf '\t%s\n' "${handler}" +done diff --git a/scripts/image_daily_cron b/scripts/image_daily_cron new file mode 100755 index 000000000..68e872cde --- /dev/null +++ b/scripts/image_daily_cron @@ -0,0 +1,28 @@ +#!/usr/bin/sh + +# daily cleanup script to reset test server every day and run tests + +# determine the script's directory dynamically +SCRIPT_DIR="$(dirname "$(realpath "$0")")" +ENV_FILE="$SCRIPT_DIR/../config/image_daily_cron.env" +if [ -f "$ENV_FILE" ]; then + export $(grep -v '^#' "$ENV_FILE" | xargs) +else + echo "Error: Environment file not found at $ENV_FILE" >&2 + exit 1 +fi + +RESTOREUNSTABLE="$SCRIPT_DIR/restoreunstable" +PP_DAILY_TEST="$SCRIPT_DIR/pp_daily_test" + +if [ ! -x "$RESTOREUNSTABLE" ] || [ ! -x "$PP_DAILY_TEST" ]; then + echo "Error: One or more required scripts are missing or not executable in $SCRIPTS_DIR." >&2 + ls -l "$SCRIPTS_DIR" # Show directory contents for debugging + exit 1 +fi + +# Run restoreunstable +"$RESTOREUNSTABLE" > "$LOG_DIR/restoreunstable.$(date "+%Y.%m.%d-%H.%M.%S")" 2>&1 + +# Run pp_daily_test +"$PP_DAILY_TEST" \ No newline at end of file diff --git a/scripts/imagelogs b/scripts/imagelogs new file mode 100755 index 000000000..05974816f --- /dev/null +++ b/scripts/imagelogs @@ -0,0 +1,32 @@ +#!/bin/bash +# +# A thin wrapper around `docker compose logs -f` that sorts older logs +# by timestamp, and filters out some of the more verbose output. +# Use to monitor requests as they come in, or see what went wrong with recent +# requests that are already completed. + + +SCRIPT_DIR="$(dirname "$(realpath "$0")")" + +ENV_FILE="$SCRIPT_DIR/../config/imagelogs.env" +if [ -f "$ENV_FILE" ]; then + export $(grep -v '^#' "$ENV_FILE" | xargs) +else + echo "Error: Environment file not found at $ENV_FILE" >&2 + exit 1 +fi +if [ -z "$DOCKER_DIR" ]; then + echo "Error: Required environment variable (DOCKER_DIR) is missing." >&2 + exit 1 +fi + +cd "$DOCKER_DIR" || { + echo "Error: Failed to change directory to $DOCKER_DIR" + exit 1 +} + +docker compose logs -t | grep -ivE '^(supercollider-|.*GET /(health|healthcheck.html))' | sort -k3 +echo "====================== OLD LOGS COMPLETE, FOLLOWING NEW LOG OUTPUT STARTING HERE ==========================================" +docker compose logs -t -f --tail 0 | grep -ivE '^(supercollider-|.*GET /(health|healthcheck.html))' +cd - +~ \ No newline at end of file diff --git a/scripts/make_request b/scripts/make_request new file mode 100755 index 000000000..3494c3090 --- /dev/null +++ b/scripts/make_request @@ -0,0 +1,27 @@ +#!/bin/bash +# USAGE: make_request [PATH_TO_GRAPHIC_FILE] +# example: make_request foo.jpg +# Can be used in tandem with the sendimagereq script to send the request to an IMAGE server + +SCRIPT_DIR="$(dirname "$(realpath "$0")")" +ENV_FILE="$SCRIPT_DIR/../config/make_request.env" +if [ -f "$ENV_FILE" ]; then + export $(grep -v '^#' "$ENV_FILE" | xargs) +else + echo "Error: Environment file not found at $ENV_FILE" >&2 + exit 1 +fi + +image_file=$1 + +if [ ! -f "$image_file" ]; then + echo "Not a file: $image_file!" + exit 1 +fi + +mimetype=$(file --mime-type -b "$image_file") +b64=$(base64 -w0 "$image_file") +imageurl="data:$mimetype;base64,$b64" +dims=$(convert "$image_file" -print "[%w, %h]" /dev/null) + +echo '{"request_uuid": "'$(uuidgen -r)'", "timestamp": '$(date +%s)', "graphic": "'$imageurl'", "dimensions": '$dims', "context": "", "language": "'$LANGUAGE'", "capabilities": '$CAPABILITIES', "renderers": '$RENDERERS', "preprocessors": {}}' diff --git a/scripts/pp_daily_test b/scripts/pp_daily_test new file mode 100755 index 000000000..72c0aea0d --- /dev/null +++ b/scripts/pp_daily_test @@ -0,0 +1,50 @@ +#!/usr/bin/bash + +# Make test requests based on graphics from the IMAGE-test-graphics repo: +# https://github.com/Shared-Reality-Lab/IMAGE-test-graphics +# Logs the results, and flags where changes occurred to indicate potential +# problems that should be investigated. +# Runs as part of daily cron, and posts message to Slack if anything changed. + +# Determine the script's directory dynamically +SCRIPT_DIR="$(dirname "$(realpath "$0")")" + +# lpad environment variables from scripts.env +ENV_FILE="$SCRIPT_DIR/../config/pp_daily_test.env" +if [ -f "$ENV_FILE" ]; then + export $(grep -v '^#' "$ENV_FILE" | xargs) +else + echo "Error: Environment file not found at $ENV_FILE" >&2 + exit 1 +fi + +if [ -z "$LOG_DIR" ] || [ -z "$SLACK_WEBHOOK_URL" ] || [ -z "$TEST_DIR" ]; then + echo "Error: Required environment variables (LOG_DIR, SLACK_WEBHOOK_URL, TEST_DIR) are missing." >&2 + exit 1 +fi + +now=$(date +"%m_%d_%Y") +host=$(hostname) + +cd "$TEST_DIR" || { + echo "Error: Failed to change directory to $TEST_DIR" + exit 1 +} + +# Run the testset.py script and store output +var=$(./testset.py -t daily -d --daily -s "$host") + +# Log test output +log_file="${LOG_DIR}/daily_test_${now}.log" +echo "Test run on $now from host $host" > "$log_file" +echo "Output of testset.py:" >> "$log_file" +echo "$var" >> "$log_file" + +# Check if there are changes in the output +sub="Changes" +if [[ $var == *"$sub"* ]]; then + echo "Changes occurred" + + # Send a Slack notification if changes are detected + curl -X POST -H 'Content-type: application/json' --data '{"text":"'"$host"': preprocessor output changed during automated testing."}' "$SLACK_WEBHOOK_URL" +fi diff --git a/scripts/restoreunstable b/scripts/restoreunstable new file mode 100755 index 000000000..5e3b8ca21 --- /dev/null +++ b/scripts/restoreunstable @@ -0,0 +1,101 @@ +#!/bin/bash -x + +# Resets IMAGE docker stack completely. +# Use this to get yourself out of trouble if you're testing and mess anything up. +# Also run every night on test server via cron to make sure latest is pulled and +# server is in a known state. +# removes any docker-compose overrides, and pulls images from scratch so that +# what is running reflects what is merged into main. +# +# If you don't want to delete the override file, use the following to keep it: +# ./restoreunstable -k + +# Determine the script's directory dynamically +SCRIPT_DIR="$(dirname "$(realpath "$0")")" + +# Load environment variables from restoreunstable.env +ENV_FILE="$SCRIPT_DIR/../config/restoreunstable.env" +if [ -f "$ENV_FILE" ]; then + export $(grep -v '^#' "$ENV_FILE" | xargs) +else + echo "Error: Environment file not found at $ENV_FILE" >&2 + exit 1 +fi + +delete_override=true # default to deleting override file + +# get command line arguments +# https://stackoverflow.com/questions/7069682/how-to-get-arguments-with-flags-in-bash/21128172 +while getopts 'k' flag; do + case "${flag}" in + k) delete_override='false' ;; + esac +done + +set -o errexit # halt script if anything returns error +if [ "$delete_override" = true ]; then + rm -f "$DOCKER_DIR/docker-compose.override.yml" +else + echo "DANGER! Not deleting override file!" +fi +touch "$DOCKER_DIR/docker-compose.override.yml" + +cd "$IMAGE_SERVER_DIR" + +# Check if there are any safe directories before unsetting +if git config --global --get-all safe.directory; then + echo "Unsetting all safe directories" + git config --global --unset-all safe.directory + echo "Exit code for unset: $?" +else + echo "No safe directories to unset" +fi + +# different users will be working inside the server directory, so git becomes angry. Mark it as safe to do this. +# To prevent duplicate entries, check if the directories are already listed as "safe" before adding them. +# https://medium.com/@thecodinganalyst/git-detect-dubious-ownership-in-repository-e7f33037a8f +if ! git config --global --get-all safe.directory | grep -q "$IMAGE_SERVER_DIR"; then + git config --global --add safe.directory "$IMAGE_SERVER_DIR" + echo "Added $IMAGE_SERVER_DIR to safe" +fi + +if ! git config --global --get-all safe.directory | grep -q "$IMAGE_SERVER_DIR/schemas"; then + git config --global --add safe.directory "$IMAGE_SERVER_DIR/schemas" + echo "Added $IMAGE_SERVER_DIR/schemas to safe" +fi + +# git remote URL -- HTTPS (instead of SSH) +git remote set-url origin "$GIT_REMOTE" + +# Even though we pull already built docker images, keep the repo up to date +# and sync up to any config directory changes. +git checkout main +# `could git reset --hard` here, but should probably just error out and let user deal with it +git pull +git submodule update --init + +# Pull the latest built images +docker compose pull + +# Stop everything, including any containers on the image network that +# weren't started with docker-compose +cd "$DOCKER_DIR" + +echo "about to docker compose down..." +docker compose down + +echo "about to stop all containers on image network..." +IMAGECONTAINERS=$(docker ps -q -f "network=image") +if [[ ${IMAGECONTAINERS} ]]; then + docker stop $IMAGECONTAINERS + docker container rm $IMAGECONTAINERS +fi + +# get rid of the image network so that if someone manually creates it, we get it back via compose +echo "about to remove image network" +docker network rm -f image + +# Bring all the containers back up +docker compose up -d --force-recreate + +cd - \ No newline at end of file diff --git a/scripts/sendimagereq b/scripts/sendimagereq new file mode 100755 index 000000000..3111c6d2b --- /dev/null +++ b/scripts/sendimagereq @@ -0,0 +1,24 @@ +#!/bin/bash +# +# USAGE: sendimagereq [IMAGE request or stdin] [server] +# +# Note that the make_request script can construct the request + +SCRIPT_DIR="$(dirname "$(realpath "$0")")" +ENV_FILE="$SCRIPT_DIR/../config/sendimagereq.env" +if [ -f "$ENV_FILE" ]; then + export $(grep -v '^#' "$ENV_FILE" | xargs) +else + echo "Error: Environment file not found at $ENV_FILE" >&2 + exit 1 +fi + +req_file=$1 +server=${2:-$SERVER} + +if [ -z "$req_file" ]; then + echo "No input supplied!" + exit 1 +fi + +curl -q -H "Content-Type: application/json" -d@"$req_file" "$server" \ No newline at end of file diff --git a/services/espnet-tts-fr/Dockerfile b/services/espnet-tts-fr/Dockerfile index aa9434150..61e92fd72 100644 --- a/services/espnet-tts-fr/Dockerfile +++ b/services/espnet-tts-fr/Dockerfile @@ -1,6 +1,6 @@ FROM pytorch/pytorch:1.12.1-cuda11.3-cudnn8-runtime -RUN apt-get update && apt-get install -y libsndfile1 espeak-ng build-essential && rm -rf /var/lib/apt/lists/* +RUN apt-get update && apt-get install -y libsndfile1 espeak-ng build-essential curl && rm -rf /var/lib/apt/lists/* ## phonemizedf @@ -47,5 +47,7 @@ COPY /services/espnet-tts-fr/conf/model-conf/feats_stats.npz ./ ENV TORCH_DEVICE="cpu" EXPOSE 80 +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "app:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug"] diff --git a/services/espnet-tts-fr/src/app.py b/services/espnet-tts-fr/src/app.py index ce9efdb44..e54ca902c 100644 --- a/services/espnet-tts-fr/src/app.py +++ b/services/espnet-tts-fr/src/app.py @@ -21,13 +21,14 @@ import numpy as np import soundfile as sf from espnet_util import tts, fs -from flask import Flask, Response, request +from flask import Flask, Response, request, jsonify from io import BytesIO from jsonschema import validate from torch.cuda import empty_cache from werkzeug.wsgi import FileWrapper from num2words import num2words import re # for regular expression processing +from datetime import datetime logging.basicConfig(format="%(asctime)s %(message)s") logger = logging.getLogger(__name__) @@ -169,3 +170,14 @@ def segment_tts(): }, 500 finally: empty_cache() + + +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 diff --git a/services/espnet-tts/Dockerfile b/services/espnet-tts/Dockerfile index b052048e4..6ae3f61ed 100644 --- a/services/espnet-tts/Dockerfile +++ b/services/espnet-tts/Dockerfile @@ -1,6 +1,6 @@ FROM pytorch/pytorch:1.10.0-cuda11.3-cudnn8-runtime -RUN apt-get update && apt-get install -y libsndfile1 build-essential wget && rm -rf /var/lib/apt/lists/* +RUN apt-get update && apt-get install -y libsndfile1 build-essential wget curl && rm -rf /var/lib/apt/lists/* WORKDIR /run/tts RUN adduser --disabled-password python && chown python:python . @@ -12,8 +12,8 @@ COPY /services/espnet-tts/requirements.txt . RUN pip install -r requirements.txt COPY /services/espnet-tts/src/predownload.py . -RUN python predownload.py -RUN mkdir -p /home/python/.cache/parallel_wavegan/ljspeech_full_band_melgan.v2 && \ +RUN python predownload.py && \ + mkdir -p /home/python/.cache/parallel_wavegan/ljspeech_full_band_melgan.v2 && \ wget https://image.a11y.mcgill.ca/models/espnet/train_nodev_ljspeech_full_band_melgan.v2.tar.gz -O /home/python/.cache/parallel_wavegan/ljspeech_full_band_melgan.v2.tar.gz && \ tar xzvf /home/python/.cache/parallel_wavegan/ljspeech_full_band_melgan.v2.tar.gz -C /home/python/.cache/parallel_wavegan/ljspeech_full_band_melgan.v2/ @@ -22,4 +22,6 @@ COPY /schemas/services/tts/* ./ ENV TORCH_DEVICE="cpu" EXPOSE 80 +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD ["gunicorn", "app:app", "-b", "0.0.0.0:80", "--capture-output", "--log-level=debug"] diff --git a/services/espnet-tts/src/app.py b/services/espnet-tts/src/app.py index bca2e36a4..a825776d5 100644 --- a/services/espnet-tts/src/app.py +++ b/services/espnet-tts/src/app.py @@ -21,11 +21,12 @@ import numpy as np import soundfile as sf from espnet_util import tts, fs -from flask import Flask, Response, request +from flask import Flask, Response, request, jsonify from io import BytesIO from jsonschema import validate from torch.cuda import empty_cache from werkzeug.wsgi import FileWrapper +from datetime import datetime logging.basicConfig(format="%(asctime)s %(message)s") logger = logging.getLogger(__name__) @@ -112,3 +113,14 @@ def segment_tts(): }, 500 finally: empty_cache() + + +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 diff --git a/services/monarch-link-app/Dockerfile b/services/monarch-link-app/Dockerfile index e0a131477..377932695 100644 --- a/services/monarch-link-app/Dockerfile +++ b/services/monarch-link-app/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.11-alpine3.20 -RUN apk add supercronic +RUN apk add supercronic curl RUN adduser --disabled-password python WORKDIR /usr/src/app @@ -16,5 +16,8 @@ RUN chown -R python:python /usr/src/app EXPOSE 80 USER python + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD supercronic -quiet /usr/src/app/cronjob \ & gunicorn app:app -b 0.0.0.0:80 --capture-output --log-level=debug \ No newline at end of file diff --git a/services/monarch-link-app/app.py b/services/monarch-link-app/app.py index bacbfa687..24c4e9f26 100644 --- a/services/monarch-link-app/app.py +++ b/services/monarch-link-app/app.py @@ -14,7 +14,7 @@ # If not, see # . -from flask import Flask, request, abort, Response +from flask import Flask, request, abort, Response, jsonify from flask_bcrypt import Bcrypt from flask_cors import CORS, cross_origin from datetime import datetime @@ -200,5 +200,16 @@ def home(): return "Hi" +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 + + if __name__ == "__main__": app.run(host="0.0.0.0", port=80, debug=True) diff --git a/services/multilang-support/Dockerfile b/services/multilang-support/Dockerfile index 81ce840fe..ea73b1495 100644 --- a/services/multilang-support/Dockerfile +++ b/services/multilang-support/Dockerfile @@ -2,7 +2,7 @@ FROM pytorch/pytorch:latest # See README.md#docker-image for visualisation of the directory structure WORKDIR /app -RUN apt-get update && apt-get install python3 python3-pip git git-lfs -y +RUN apt-get update && apt-get install python3 python3-pip git git-lfs curl -y # Copy requirements COPY services/multilang-support/requirements.txt /app/ @@ -27,4 +27,7 @@ COPY services/multilang-support/src/*.py /app/src/ EXPOSE 80 ENV FLASK_APP=src/app.py + +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:80/health || exit 1 + CMD [ "gunicorn", "src.translate:app", "--bind", "0.0.0.0:80", "--log-level=info", "--timeout=120"] diff --git a/services/multilang-support/src/translate.py b/services/multilang-support/src/translate.py index bce14cf46..21fb207f2 100644 --- a/services/multilang-support/src/translate.py +++ b/services/multilang-support/src/translate.py @@ -4,6 +4,7 @@ from .utils import LOGGER, Translator, SUPPORTED_LANGS import json import jsonschema +from datetime import datetime app = Flask(__name__) @@ -91,3 +92,14 @@ def translate_request(): LOGGER.debug(f"- Response SENT! Time taken: {elapsed_time} ms -") # Return response return jsonify(response), 200 + + +@app.route("/health", methods=["GET"]) +def health(): + """ + Health check endpoint to verify if the service is running + """ + return jsonify({ + "status": "healthy", + "timestamp": datetime.now().isoformat() + }), 200 diff --git a/services/supercollider-images/supercollider-alpine/Dockerfile b/services/supercollider-images/supercollider-alpine/Dockerfile index c05ee0cd1..5730b2c00 100644 --- a/services/supercollider-images/supercollider-alpine/Dockerfile +++ b/services/supercollider-images/supercollider-alpine/Dockerfile @@ -2,7 +2,7 @@ FROM alpine:latest # Default to latest version of supercollider ARG VERSION=develop -RUN apk add --no-cache git g++ cmake make pipewire pipewire-jack libsndfile-dev fftw-dev jack-dev avahi-dev readline-dev linux-headers +RUN apk add --no-cache git g++ cmake make pipewire pipewire-jack libsndfile-dev fftw-dev jack-dev avahi-dev readline-dev linux-headers curl # Despite the files being present, the cmake script fails to find them without this RUN ln -sf /usr/lib/pipewire-0.3/jack/libjack.* /usr/lib diff --git a/services/supercollider-images/supercollider-alpine/entrypoint b/services/supercollider-images/supercollider-alpine/entrypoint index d235cd03b..200d546db 100644 --- a/services/supercollider-images/supercollider-alpine/entrypoint +++ b/services/supercollider-images/supercollider-alpine/entrypoint @@ -2,4 +2,9 @@ /usr/bin/pipewire & +#healthcheck +while :; do + echo -e "HTTP/1.1 200 OK\nContent-Type: application/json\n\n{\"status\": \"healthy\"}" | nc -l -p 57110 || break +done & + exec "$@" diff --git a/services/supercollider-images/supercollider-extra/Dockerfile b/services/supercollider-images/supercollider-extra/Dockerfile index 0ea0e93ac..7e606899e 100644 --- a/services/supercollider-images/supercollider-extra/Dockerfile +++ b/services/supercollider-images/supercollider-extra/Dockerfile @@ -4,7 +4,7 @@ ARG VERSION=Version-3.13.0 USER root # Wanted by MP3 -RUN dnf install -q -y lame vorbis-tools unzip which && \ +RUN dnf install -q -y lame vorbis-tools unzip which curl && \ rm -rf /var/cache/dnf # Install sc3-plugins @@ -34,4 +34,5 @@ COPY atk_install.scd /tmp/sc/atk_install.scd RUN sclang atk_install.scd RUN rm atk_install.scd + ENTRYPOINT ["/run/entrypoint"] diff --git a/services/supercollider-images/supercollider/Dockerfile b/services/supercollider-images/supercollider/Dockerfile index b66039e7b..c5d612d02 100644 --- a/services/supercollider-images/supercollider/Dockerfile +++ b/services/supercollider-images/supercollider/Dockerfile @@ -1,7 +1,7 @@ FROM fedora:38 ARG VERSION=Version-3.13.0 -RUN dnf install -q -y pipewire git cmake gcc-c++ pipewire-jack-audio-connection-kit-devel fftw-devel libsndfile-devel avahi-devel readline-devel libatomic && \ +RUN dnf install -q -y pipewire git cmake gcc-c++ pipewire-jack-audio-connection-kit-devel fftw-devel libsndfile-devel avahi-devel readline-devel libatomic curl nc && \ rm -rf /var/cache/dnf # Despite the files being present, the cmake script fails to find them without this @@ -33,4 +33,6 @@ RUN chmod +x /run/entrypoint && \ USER sclang +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost:57110/health || exit 1 + ENTRYPOINT ["/run/entrypoint"] diff --git a/services/supercollider-images/supercollider/entrypoint b/services/supercollider-images/supercollider/entrypoint index d235cd03b..200d546db 100644 --- a/services/supercollider-images/supercollider/entrypoint +++ b/services/supercollider-images/supercollider/entrypoint @@ -2,4 +2,9 @@ /usr/bin/pipewire & +#healthcheck +while :; do + echo -e "HTTP/1.1 200 OK\nContent-Type: application/json\n\n{\"status\": \"healthy\"}" | nc -l -p 57110 || break +done & + exec "$@" diff --git a/services/tat/Dockerfile b/services/tat/Dockerfile index a3b01e1d2..1678aad39 100644 --- a/services/tat/Dockerfile +++ b/services/tat/Dockerfile @@ -1,5 +1,5 @@ # Stage 1: Build the application -FROM node:20-alpine as build +FROM node:20-alpine AS build # Set the working directory inside the container WORKDIR /app @@ -31,11 +31,17 @@ FROM nginx:alpine # Copy the build output from the previous stage to the Nginx HTML directory COPY --from=build /app/dist/editor/ /usr/share/nginx/html +# Add a static health check file +COPY services/tat/healthcheck.html /usr/share/nginx/html/ + # Copy a custom Nginx configuration file, if needed (optional) # COPY nginx.conf /etc/nginx/nginx.conf # Expose the default Nginx port EXPOSE 80 +# Healthcheck +HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=5 CMD curl -f http://localhost/healthcheck.html || exit 1 + # Start the Nginx server CMD ["nginx", "-g", "daemon off;"] \ No newline at end of file diff --git a/services/tat/healthcheck.html b/services/tat/healthcheck.html new file mode 100644 index 000000000..7d1a9afde --- /dev/null +++ b/services/tat/healthcheck.html @@ -0,0 +1,11 @@ + + + + Health Check + + +

Healthy

+

The service 'tat' is running correctly!

+ + + diff --git a/test-docker-compose.yml b/test-docker-compose.yml index 92006e014..750eb9516 100644 --- a/test-docker-compose.yml +++ b/test-docker-compose.yml @@ -114,4 +114,8 @@ services: - "traefik.http.routers.tat.tls.certresolver=myresolver" - traefik.docker.network=traefik environment: - - SERVER_NAME=unicorn.cim.mcgill.ca \ No newline at end of file + - SERVER_NAME=unicorn.cim.mcgill.ca + + text-followup: + environment: + LOG_PII: "true" \ No newline at end of file