diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 0000000..2a91433 --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,51 @@ +name: SecurityScanWorkflow + +on: + push: + branches: + - '**' + pull_request: + branches: + - '**' + +jobs: + run-security-scan: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Get list of changed files (new and modified) + id: changed-files + run: | + git fetch origin master --depth=1 + MODIFIED_FILES=$(git diff --name-only origin/master ${{ github.sha }} | tr '\n' ' ') + echo "MODIFIED_FILES=${MODIFIED_FILES}" >> $GITHUB_ENV + + - name: Set up Python environment + uses: actions/setup-python@v3 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + pip install --upgrade pip + pip install -r requirements.txt + + - name: Run Vulnerability Scanner on all changed files + run: | + if [[ -z "$MODIFIED_FILES" ]]; then + echo "No modified files to scan." + exit 0 + fi + for file in $MODIFIED_FILES; do + echo "Scanning $file..." + python3 Vulnerability_Tool/Vulnerability_Scanner_V1.4.py "$file" >> security_scan_report.txt + done + + - name: Save scan results as an artifact + uses: actions/upload-artifact@v4 + with: + name: security-scan-report + path: security_scan_report.txt diff --git a/.gitignore b/.gitignore index 32f22ef..3016664 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,7 @@ yarn-debug.log* yarn-error.log* lerna-debug.log* .pnpm-debug.log* - +.vs/ # Diagnostic reports (https://nodejs.org/api/report.html) report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json @@ -129,4 +129,7 @@ dist .pnp.* .vscode -.idea \ No newline at end of file +.ideaenv.test +.env +.env.* +*.env diff --git a/API_PatchNotes.yaml b/API_PatchNotes.yaml new file mode 100644 index 0000000..dda22fe --- /dev/null +++ b/API_PatchNotes.yaml @@ -0,0 +1,33 @@ +Nutrihelp-api: V1.4 + + + + + +Nutrihelp-api: V1.3 + + + + + +Nutrihelp-api: V1.2 + + + + + +Nutrihelp-api: V1.1 + Description: + + + ChangeLog + - Added Version Control + - + + + + +Nutrihelp-api: V1.0 + + +COME BACK TOO diff --git a/Monitor_&_Logging/loginLogger.js b/Monitor_&_Logging/loginLogger.js new file mode 100644 index 0000000..b18cde1 --- /dev/null +++ b/Monitor_&_Logging/loginLogger.js @@ -0,0 +1,24 @@ +const { createClient } = require('@supabase/supabase-js'); + +const supabase = createClient( + process.env.SUPABASE_URL, + process.env.SUPABASE_ANON_KEY +); + +async function logLoginEvent({ userId, eventType, ip, userAgent, details = {} }) { + const { error } = await supabase + .from('audit_logs') + .insert({ + user_id: userId, + event_type: eventType, + ip_address: ip, + user_agent: userAgent, + details + }); + + if (error) { + console.error('Error logging login event:', error); + } +} + +module.exports = logLoginEvent; diff --git a/PatchNotes_VersionControl.yaml b/PatchNotes_VersionControl.yaml new file mode 100644 index 0000000..f225a19 --- /dev/null +++ b/PatchNotes_VersionControl.yaml @@ -0,0 +1,235 @@ +Nutrihelp API Backend Version Control +### All changelog additions were completed via the Backend API team as a whole ### + +#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-V2024.2.3-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-# + ChangeLog: + - Added 5 new vulnerability pattens to Vulnerability scanner - Changed version to V1.1 + - Added Vulnerability Report + - Upgraded POST Meal Planning API + - Upgraded GET meal planning API + - Updated / Refactored login API's to use email instead of username + - Began development of recipe image classification api + +#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-V2024.2.2-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-# + ChangeLog: + - Implmented V1.0 Vulnerability scanner + - Added user ID and set up relationship in feedback API endpoint + - Added cooking method ID to the relation table/recipe API endpoint + - Added User Change Password API + - Added Image to User profile API + +#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-V2024.2.1-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-# + ChangeLog: + - Added Version Control + - Added additional input Fields to the New user Sign Up Form + - Added images to Recipe API + +#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-V2024.2.0-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-# +Current pages and applicaitons present before trimester commenced + +Current Controllers: +- Appointment Controller +- Contact Us Controller +- Food Data Controller +- Image Classification Controller +- Login Controller +- Meal Plan Controller +- Recipe Controller +- Sign Up Controller +- User Feedback Controller +- User Preferences Controller +- User Profile Controller + +Middleware: +- Authenticate Token + +Current models: +- Add Appointment +- Add Contact Us Msg +- Add MFA Token +- Add User +- Add User Feedback +- Create Recipe +- Create Recipe Test Sample +- Delete User Recipes +- Fetch All Allergies +- Fetch Cooking Methods +- Fetch All Cuisines +- Fetch All Dietary Requirements +- Fetch All Health Conditions +- Fetch All Ingredients +- Fetch All Spice Levels +- Fetch User Preferences +- Get Appointments +- Get User +- Get User Credentials +- Get User Profile +- Get User Recipes +- Image classification python script +- Meal Plan +- Update User Preference +- Update User Profile + +Node Models +- .bin +- @sendgrid +- @supabase +- @types +- accepts +- append-field +- argparse +- array-flatten +- asynckit +- axios +- balanced-match +- bcryptjs +- body-parser +- brace-expansion +- buffer-equal-constant-time +- buffer-from +- busboy +- bytes +- call-bind +- combined-stream +- concat-map +- concat-stream +- content-disposition +- content-type +- cookie +- cookie-signature +- core-util-is +- cors +- debug +- deepmerge +- define-data-property +- delayed-stream +- denque +- depd +- destroy +- dotenv +- ecdsa-sig-formatter +- ee-first +- encodeurl +- es-define-property +- es-errors +- escape-html +- etag +- express +- finalhandler +- follow-redirects +- form-data +- forwarded +- fresh +- fs.realpath +- function-bind +- generate-function +- get-intrinsic +- glob +- gopd +- has-property-descriptors +- has-proto +- has-symbols +- hasown +- http-errors +- iconv-lite +- inflight +- inherits +- ipaddr.js +- is-property +- isarray +- jsonwebtoken +- jwa +- jws +- lodash.includes +- lodash.isboolean +- lodash.isinteger +- lodash.isnumber +- lodash.isplainobject +- lodash.isstring +- lodash.once +- long +- lru-cache +- media-typer +- merge-descriptors +- methods +- mime +- mime-db +- mime-types +- minimatch +- minimist +- mkdirp +- ms +- multer +- mysql2 +- named-placeholders +- negotiator +- object-assign +- object-inspect +- on-finished +- once +- parseurl +- path-is-absolute +- path-to-regexp +- process-nextick-args +- proxy-addr +- proxy-from-env +- qs +- range-parser +- raw-body +- readable-stream +- safe-buffer +- safer-buffer +- semver +- send +- seq-queue +- serve-static +- set-function-length +- setprototypeof +- side-channel +- sprintf-js +- sqlstring +- statuses +- streamsearch +- string_decoder +- swagger-ui-dist +- swagger-ui-express +- toidentifier +- tr46 +- type-is +- typedarray +- undici-types +- unpipe +- util-deprecate +- utils-merge +- vary +- webidl-conversions +- whatwg-url +- wrappy +- ws +- xtend +- yallist +- yamljs + +Routes: +- Appointment +- Contact Us +- Food Data +- Image Classification +- Index +- Login +- Meal Plan +- Recipe +- Sign Up +- User Feedback +- User Preference +- User Profile + +Other: +- .env +- .gitignore.git +- dbConnection.js +- index.yaml +- package.json +- package-lock.json +- README.md +- server.js diff --git a/README.md b/README.md new file mode 100644 index 0000000..7fa4436 --- /dev/null +++ b/README.md @@ -0,0 +1,34 @@ +# NutriHelp Backend API +This is the backend API for the NutriHelp project. It is a RESTful API that provides the necessary endpoints for the frontend to interact with the database. + +## Installation +1. Open a terminal and navigate to the directory where you want to clone the repository. +2. Run the following command to clone the repository: +```bash +git clone https://github.com/Gopher-Industries/Nutrihelp-api +``` +3. Navigate to the project directory: +```bash +cd Nutrihelp-api +``` +4. Install the required dependencies (including python dependencies): +```bash +npm install +pip install -r requirements.txt +npm install node-fetch +``` +5. Contact a project maintainer to get the `.env` file that contains the necessary environment variables and place it in the root of the project directory. +6. Start the server: +```bash +npm start +``` +A message should appear in the terminal saying `Server running on port 80`. +You can now access the API at `http://localhost:80`. + +## Endpoints +The API is documented using OpenAPI 3.0, located in `index.yaml`. +You can view the documentation by navigating to `http://localhost:80/api-docs` in your browser. + + +/\ Please refer to the "PatchNotes_VersionControl" file for /\ +/\ recent updates and changes made through each version. /\ diff --git a/Vulnerability_Tool/Identified Vulnerabilities Report.docx b/Vulnerability_Tool/Identified Vulnerabilities Report.docx new file mode 100644 index 0000000..af11626 Binary files /dev/null and b/Vulnerability_Tool/Identified Vulnerabilities Report.docx differ diff --git a/Vulnerability_Tool/Vulnerability Report.docx b/Vulnerability_Tool/Vulnerability Report.docx new file mode 100644 index 0000000..4aed10b Binary files /dev/null and b/Vulnerability_Tool/Vulnerability Report.docx differ diff --git a/Vulnerability_Tool/Vulnerability_Scanner_V1.1.py b/Vulnerability_Tool/Vulnerability_Scanner_V1.1.py new file mode 100644 index 0000000..cabe7cc --- /dev/null +++ b/Vulnerability_Tool/Vulnerability_Scanner_V1.1.py @@ -0,0 +1,113 @@ +# Importing modules to assist with vulnerability scanning and detecting +import os +import re + +# Define text Colour +class Colour: + GREEN = '\033[92m' + RED = '\033[91m' + BLUE = '\033[94m' + YELLOW = '\033[93m' + V_PATTEN_NAME = '\033[38;5;208m' # Orange names + NORMAL = '\033[0m' + +# Define Vulnerability Pattern +V_Patterns = { + "Sql_Injection": re.compile(r'\.query\s*\(.*\+.*\)'), + "XSS": re.compile(r'res\.send\s*\(.*\+.*\)'), + "Command_Injection": re.compile(r'exec\s*\(.*\+.*\)'), + "insecure_file_handling": re.compile(r'fs\.unlink\s*\(.*\)'), + "insecure_file_upload": re.compile(r'multer\s*\(\s*{.*dest.*}\s*\)'), +# New Vulnerability Pattern identifications Added in "V1.1" + "Eval_Function": re.compile(r'eval\s*\(.*\)'), + "Directory_Movement": re.compile(r'fs\.readFile\s*\(.*\.\.\/.*\)'), + "Insecure_Token_Generation": re.compile(r'Math\.random\s*\(\)'), + "Dangerous_Permission_Level": re.compile(r'fs\.chmod\s*\(.*\)'), + "Redirects": re.compile(r'res\.redirect\s*\(.*req\.query\..*\)') + +} +# Opening the files for processing +def AnalyseFile(FileLocation): + vulnerabilities = {key: [] for key in V_Patterns.keys()} + try: + with open(FileLocation, 'r', encoding='utf-8') as file: + Data = file.read() + except Exception as e: + print(f"Error reading file {FileLocation}: {e}") + return None + +# Check for vulnerabilities based on pre set V_Patterns + for key, pattern in V_Patterns.items(): + matches = pattern.findall(Data) + if matches: + vulnerabilities[key].extend(matches) + + return vulnerabilities + +# Formatting files for list +def list_files(): + return [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.js')] + +def OrderedF(Dataset): + print("|--------------------------------|\n| JavaScript files for Analysis: |\n|--------------------------------|") + for i, file in enumerate(Dataset, 1): + print(f"{i} - {file}") + +# Result box for outcome of vulnerability scan +def PrintOutcome(Data): + Outside = max(len(line) for line in Data.splitlines()) + 4 + print('|' + '-' * (Outside - 2) + '|') + for line in Data.splitlines(): + print(f"| {line.ljust(Outside - 4)} |") + print('|' + '-' * (Outside - 2) + '|') + +# Catches not JavaScript files in directory +def main(): + Dataset = list_files() + if not Dataset: + print("No .js files found") + return + +# Terminate program when "end" is entered in + while True: + OrderedF(Dataset) + User_Input = input("\nPlease enter a file number from the listed options\nor\nType 'end' to quit the application \n> ") + if User_Input == 'end': + break + +# Catches an input ouside of the file number range + try: + file_index = int(User_Input) - 1 + if file_index < 0 or file_index >= len(Dataset): + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease enter the file number from the listed options") + continue + + JsFile = Dataset[file_index] + print(f"{Colour.YELLOW}\nAnalysing: {Colour.NORMAL}{JsFile}") + vulnerabilities = AnalyseFile(JsFile) + +# This should not get called. However, is left here to future proof the application + if not vulnerabilities: + Outcome = f"Could not read file: {JsFile}" + +# No vulnerabilities have been located + elif not any(vulnerabilities.values()): + Outcome = f"{Colour.GREEN}No vulnerabilities found.{Colour.NORMAL}" + +# Lists the potentiaal vulnerability found + else: + Outcome = f"{Colour.RED}Potential Vulnerability Found: {Colour.NORMAL}\n" + for key, found in vulnerabilities.items(): + if found: + Outcome += f"{Colour.V_PATTEN_NAME} {key.replace('_', ' ').title()} vulnerabilities:{Colour.NORMAL}\n" + for q in found: + Outcome += f" - {q}\n" + +# Print Result + PrintOutcome(Outcome) +# Triggers invalid input - chance to try again + except ValueError: + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease Input a number.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/Vulnerability_Tool/Vulnerability_Scanner_V1.2.py b/Vulnerability_Tool/Vulnerability_Scanner_V1.2.py new file mode 100644 index 0000000..1e931ba --- /dev/null +++ b/Vulnerability_Tool/Vulnerability_Scanner_V1.2.py @@ -0,0 +1,119 @@ +# Importing modules to assist with vulnerability scanning and detecting +import os +import re + +# Define text Colour +class Colour: + GREEN = '\033[92m' + RED = '\033[91m' + BLUE = '\033[94m' + YELLOW = '\033[93m' + V_PATTEN_NAME = '\033[38;5;208m' # Orange names + NORMAL = '\033[0m' + +# Define Vulnerability Pattern +V_Patterns = { + "Sql_Injection": re.compile(r'\.query\s*\(.*\+.*\)'), + "XSS": re.compile(r'res\.send\s*\(.*\+.*\)'), + "Command_Injection": re.compile(r'exec\s*\(.*\+.*\)'), + "insecure_file_handling": re.compile(r'fs\.unlink\s*\(.*\)'), + "insecure_file_upload": re.compile(r'multer\s*\(\s*{.*dest.*}\s*\)'), +# New Vulnerability Pattern Identifications Added in "V1.1" + "Eval_Function": re.compile(r'eval\s*\(.*\)'), + "Directory_Movement": re.compile(r'fs\.readFile\s*\(.*\.\.\/.*\)'), + "Insecure_Token_Generation": re.compile(r'Math\.random\s*\(\)'), + "Dangerous_Permission_Level": re.compile(r'fs\.chmod\s*\(.*\)'), + "Redirects": re.compile(r'res\.redirect\s*\(.*req\.query\..*\)'), +# New Vulnerability Pattern Identifications Added in "V1.2" + "API_Key_Hardcoded": re.compile(r'api_key\s*=\s*[\'"].*[\'"]'), + "Weak_Hashing_Algorithm": re.compile(r'(md5|sha1|des)\s*\('), + "Plainetext_Credentials": re.compile(r'(username|password)\s*=\s*[\'"].*[\'"]'), + "Insecure_SSL_Configeration": re.compile(r'server\.listen\s*\(.*http.*\)'), + "HTTP_Called": re.compile(r'http\.get\s*\(.*\)') + +} +# Opening the files for processing +def AnalyseFile(FileLocation): + vulnerabilities = {key: [] for key in V_Patterns.keys()} + try: + with open(FileLocation, 'r', encoding='utf-8') as file: + Data = file.read() + except Exception as e: + print(f"Error reading file {FileLocation}: {e}") + return None + +# Check for vulnerabilities based on pre set V_Patterns + for key, pattern in V_Patterns.items(): + matches = pattern.findall(Data) + if matches: + vulnerabilities[key].extend(matches) + + return vulnerabilities + +# Formatting files for list +def list_files(): + return [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.js')] + +def OrderedF(Dataset): + print("|--------------------------------|\n| JavaScript files for Analysis: |\n|--------------------------------|") + for i, file in enumerate(Dataset, 1): + print(f"{i} - {file}") + +# Result box for outcome of vulnerability scan +def PrintOutcome(Data): + Outside = max(len(line) for line in Data.splitlines()) + 4 + print('|' + '-' * (Outside - 2) + '|') + for line in Data.splitlines(): + print(f"| {line.ljust(Outside - 4)} |") + print('|' + '-' * (Outside - 2) + '|') + +# Catches not JavaScript files in directory +def main(): + Dataset = list_files() + if not Dataset: + print("No .js files found") + return + +# Terminate program when "end" is entered in + while True: + OrderedF(Dataset) + User_Input = input("\nPlease enter a file number from the listed options\nor\nType 'end' to quit the application \n> ") + if User_Input == 'end': + break + +# Catches an input ouside of the file number range + try: + file_index = int(User_Input) - 1 + if file_index < 0 or file_index >= len(Dataset): + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease enter the file number from the listed options") + continue + + JsFile = Dataset[file_index] + print(f"{Colour.YELLOW}\nAnalysing: {Colour.NORMAL}{JsFile}") + vulnerabilities = AnalyseFile(JsFile) + +# This should not get called. However, is left here to future proof the application + if not vulnerabilities: + Outcome = f"Could not read file: {JsFile}" + +# No vulnerabilities have been located + elif not any(vulnerabilities.values()): + Outcome = f"{Colour.GREEN}No vulnerabilities found.{Colour.NORMAL}" + +# Lists the potentiaal vulnerability found + else: + Outcome = f"{Colour.RED}Potential Vulnerability Found: {Colour.NORMAL}\n" + for key, found in vulnerabilities.items(): + if found: + Outcome += f"{Colour.V_PATTEN_NAME} {key.replace('_', ' ').title()} vulnerabilities:{Colour.NORMAL}\n" + for q in found: + Outcome += f" - {q}\n" + +# Print Result + PrintOutcome(Outcome) +# Triggers invalid input - chance to try again + except ValueError: + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease Input a number.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/Vulnerability_Tool/Vulnerability_Scanner_V1.3.py b/Vulnerability_Tool/Vulnerability_Scanner_V1.3.py new file mode 100644 index 0000000..b1828c8 --- /dev/null +++ b/Vulnerability_Tool/Vulnerability_Scanner_V1.3.py @@ -0,0 +1,125 @@ +# Importing modules to assist with vulnerability scanning and detecting +import os +import re + +# Define text Colour +class Colour: + GREEN = '\033[92m' + RED = '\033[91m' + BLUE = '\033[94m' + YELLOW = '\033[93m' + V_PATTEN_NAME = '\033[38;5;208m' # Orange names + NORMAL = '\033[0m' + +# Define Vulnerability Pattern +V_Patterns = { + "Sql_Injection": re.compile(r'\.query\s*\(.*\+.*\)'), + "XSS": re.compile(r'res\.send\s*\(.*\+.*\)'), + "Command_Injection": re.compile(r'exec\s*\(.*\+.*\)'), + "insecure_file_handling": re.compile(r'fs\.unlink\s*\(.*\)'), + "insecure_file_upload": re.compile(r'multer\s*\(\s*{.*dest.*}\s*\)'), +# New Vulnerability Pattern Identifications Added in "V1.1" + "Eval_Function": re.compile(r'eval\s*\(.*\)'), + "Directory_Movement": re.compile(r'fs\.readFile\s*\(.*\.\.\/.*\)'), + "Insecure_Token_Generation": re.compile(r'Math\.random\s*\(\)'), + "Dangerous_Permission_Level": re.compile(r'fs\.chmod\s*\(.*\)'), + "Redirects": re.compile(r'res\.redirect\s*\(.*req\.query\..*\)'), +# New Vulnerability Pattern Identifications Added in "V1.2" + "API_Key_Hardcoded": re.compile(r'api_key\s*=\s*[\'"].*[\'"]'), + "Weak_Hashing_Algorithm": re.compile(r'(md5|sha1|des)\s*\('), + "Planetext_Credentials": re.compile(r'(username|password)\s*=\s*[\'"].*[\'"]'), + "Insecure_SSL_Configeration": re.compile(r'server\.listen\s*\(.*http.*\)'), + "HTTP_Called": re.compile(r'http\.get\s*\(.*\)'), +# New Vulnerability Pattern Identifications Added in "V1.3" + "Sensitive_Data_Logging": re.compile(r'console\.(log|debug|error|warn)\s*\(.*(password|secret|key|token).*\)'), + "JSON_Parsing_No_Validation": re.compile(r'JSON\.parse\s*\(.*req\.(body|query|params).*\)'), + "Environment_Variables_In_Planetext": re.compile(r'process\.env\.[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*[\'"].+[\'"]'), + "Debug_Left_Exposed": re.compile(r'app\.get\s*\([\'"].*debug.*[\'"],.*\)'), + "Insecure_File_Paths": re.compile(r'(fs\.(readFile|writeFile))\s*\(.*req\.(body|query|params)\.path.*\)'), + "Unsecured_Spawn": re.compile(r'spawn\s*\(.*\)') +} +# Opening the files for processing +def AnalyseFile(FileLocation): + vulnerabilities = {key: [] for key in V_Patterns.keys()} + try: + with open(FileLocation, 'r', encoding='utf-8') as file: + Data = file.read() + except Exception as e: + print(f"Error reading file {FileLocation}: {e}") + return None + +# Check for vulnerabilities based on pre set V_Patterns + for key, pattern in V_Patterns.items(): + matches = pattern.findall(Data) + if matches: + vulnerabilities[key].extend(matches) + + return vulnerabilities + +# Formatting files for list +def list_files(): + return [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.js')] + +def OrderedF(Dataset): + print("|--------------------------------|\n| JavaScript files for Analysis: |\n|--------------------------------|") + for i, file in enumerate(Dataset, 1): + print(f"{i} - {file}") + +# Result box for outcome of vulnerability scan +def PrintOutcome(Data): + Outside = max(len(line) for line in Data.splitlines()) + 4 + print('|' + '-' * (Outside - 2) + '|') + for line in Data.splitlines(): + print(f"| {line.ljust(Outside - 4)} |") + print('|' + '-' * (Outside - 2) + '|') + +# Catches not JavaScript files in directory +def main(): + Dataset = list_files() + if not Dataset: + print("No .js files found") + return + +# Terminate program when "end" is entered in + while True: + OrderedF(Dataset) + User_Input = input("\nPlease enter a file number from the listed options\nor\nType 'end' to quit the application \n> ") + if User_Input == 'end': + break + +# Catches an input ouside of the file number range + try: + file_index = int(User_Input) - 1 + if file_index < 0 or file_index >= len(Dataset): + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease enter the file number from the listed options") + continue + + JsFile = Dataset[file_index] + print(f"{Colour.YELLOW}\nAnalysing: {Colour.NORMAL}{JsFile}") + vulnerabilities = AnalyseFile(JsFile) + +# This should not get called. However, is left here to future proof the application + if not vulnerabilities: + Outcome = f"Could not read file: {JsFile}" + +# No vulnerabilities have been located + elif not any(vulnerabilities.values()): + Outcome = f"{Colour.GREEN}No vulnerabilities found.{Colour.NORMAL}" + +# Lists the potentiaal vulnerability found + else: + Outcome = f"{Colour.RED}Potential Vulnerability Found: {Colour.NORMAL}\n" + for key, found in vulnerabilities.items(): + if found: + Outcome += f"{Colour.V_PATTEN_NAME} {key.replace('_', ' ').title()} vulnerabilities:{Colour.NORMAL}\n" + for q in found: + Outcome += f" - {q}\n" + +# Print Result + PrintOutcome(Outcome) +# Triggers invalid input - chance to try again + except ValueError: + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease Input a number.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/Vulnerability_Tool/Vulnerability_Scanner_V1.4.py b/Vulnerability_Tool/Vulnerability_Scanner_V1.4.py new file mode 100644 index 0000000..c50e28c --- /dev/null +++ b/Vulnerability_Tool/Vulnerability_Scanner_V1.4.py @@ -0,0 +1,160 @@ +import os +import re +import sys +import docx + +# Define Vulnerability Patterns for JavaScript files +JS_Patterns = { + "Sql_Injection": re.compile(r'\.query\s*\(.*\+.*\)'), + "XSS": re.compile(r'res\.send\s*\(.*\+.*\)'), + "Command_Injection": re.compile(r'exec\s*\(.*\+.*\)'), + "insecure_file_handling": re.compile(r'fs\.unlink\s*\(.*\)'), + "insecure_file_upload": re.compile(r'multer\s*\(\s*{.*dest.*}\s*\)'), + "Eval_Function": re.compile(r'eval\s*\(.*\)'), + "Directory_Movement": re.compile(r'fs\.readFile\s*\(.*\.\./.*\)'), + "Insecure_Token_Generation": re.compile(r'Math\.random\s*\(\)'), + "Dangerous_Permission_Level": re.compile(r'fs\.chmod\s*\(.*\)'), + "Redirects": re.compile(r'res\.redirect\s*\(.*req\.query\..*\)'), + "API_Key_Hardcoded": re.compile(r'api_key\s*=\s*[\'"]\S+[\'"]'), + "Weak_Hashing_Algorithm": re.compile(r'(md5|sha1|des)\s*\('), + "Planetext_Credentials": re.compile(r'(username|password)\s*=\s*[\'"]\S+[\'"]'), + "Insecure_SSL_Config": re.compile(r'server\.listen\s*\(.*http.*\)'), + "HTTP_Called": re.compile(r'http\.get\s*\(.*\)'), + "Sensitive_Data_Logging": re.compile(r'console\.(log|debug|error|warn)\s*\(.*(password|secret|key|token).*\)'), + "JSON_Parsing_No_Validation": re.compile(r'JSON\.parse\s*\(.*req\.(body|query|params).*\)'), + "Environment_Variables_In_Planetext": re.compile(r'process\.env\.[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*[\'"]\S+[\'"]'), + "Debug_Left_Exposed": re.compile(r'app\.get\s*\([\'"]\.\*/debug.*[\'"]'), + "Insecure_File_Paths": re.compile(r'(fs\.(readFile|writeFile))\s*\(.*req\.(body|query|params)\.path.*\)'), + "Unsecured_Spawn": re.compile(r'spawn\s*\(.*\)') +} + +Python_Patterns = { + "Eval_Function": re.compile(r'eval\s*\(.*\)'), + "Exec_Function": re.compile(r'exec\s*\(.*\)'), + "OS_Command_Injection": re.compile(r'os\.(system|popen)\s*\(.*\)'), + "Subprocess_Injection": re.compile(r'subprocess\.(Popen|call|run)\s*\(.*\)'), + "Pickle_Load": re.compile(r'pickle\.load\s*\(.*\)'), + "Hardcoded_Credentials": re.compile(r'(username|password)\s*=\s*[\'"]\S+[\'"]'), + "Weak_Hashing_Algorithm": re.compile(r'(md5|sha1|des)\s*\('), + "Insecure_Random": re.compile(r'random\.randint\s*\(.*\)'), + "Unverified_SSL": re.compile(r'requests\.get\s*\(.*verify\s*=\s*False\)'), + "Dangerous_File_Access": re.compile(r'open\s*\(.*\)'), + "Environment_Variables_Exposure": re.compile(r'os\.environ\[\s*[\'"]\S+[\'"]\s*\]'), + "Debug_Logging": re.compile(r'print\s*\(.*(password|secret|key|token).*\)'), + "Deserialization_Risk": re.compile(r'json\.loads\s*\(.*\)'), + "Unsecured_Spawn": re.compile(r'os\.spawn\s*\(.*\)') +} + +Word_Patterns = { + "Hardcoded_Credentials": re.compile(r'(username|password)\s*=\s*[\'"]\S+[\'"]'), + "Sensitive_Keywords": re.compile(r'(confidential|private|classified|top secret)', re.IGNORECASE), + "Email_Addresses": re.compile(r'[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+'), + "Phone_Numbers": re.compile(r'\b(?:\+\d{1,3})?[-.\s]?(\d{2,4})?[-.\s]?\d{3}[-.\s]?\d{4}\b'), + "URLs": re.compile(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+') +} + +TXT_Patterns = { + "Hardcoded_Credentials": re.compile(r'(username|password|token|secret|access[_-]?key)\s*[:=]\s*[\'"]?\S+[\'"]?', re.IGNORECASE), + "Sensitive_Keywords": re.compile(r'\b(confidential|private|classified|secret|token|proprietary)\b', re.IGNORECASE), + "Email_Addresses": re.compile(r'[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+'), + "URLs": re.compile(r'https?://[^\s]+'), + "IP_Addresses": re.compile(r'\b(?:\d{1,3}\.){3}\d{1,3}\b'), + "AWS_Credentials": re.compile(r'AKIA[0-9A-Z]{16}'), + "API_Keys": re.compile(r'(?i)(api[_-]?key|access[_-]?token)\s*[:=]\s*[\'"]?[A-Za-z0-9\-_]{20,}'), + "JWT_Tokens": re.compile(r'eyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9._-]{10,}\.[A-Za-z0-9._-]{10,}') +} + +YML_Patterns = { + "Hardcoded_Credentials": re.compile(r'(username|password|token|secret|access[_-]?key)\s*:\s*[\'"]?\S+[\'"]?', re.IGNORECASE), + "Sensitive_Keywords": re.compile(r'\b(confidential|private|classified|secret|proprietary)\b', re.IGNORECASE), + "Email_Addresses": re.compile(r'[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+'), + "URLs": re.compile(r'https?://[^\s]+'), + "IP_Addresses": re.compile(r'\b(?:\d{1,3}\.){3}\d{1,3}\b'), + "AWS_Credentials": re.compile(r'AKIA[0-9A-Z]{16}'), + "API_Keys": re.compile(r'(?i)(api[_-]?key|access[_-]?token)\s*:\s*[\'"]?[A-Za-z0-9\-_]{20,}'), + "Unsafe_YAML_Object": re.compile(r'!!python/(object|module|function)') +} + + +def AnalyseFile(FileLocation, patterns): + vulnerabilities = {key: [] for key in patterns.keys()} + try: + with open(FileLocation, 'r', encoding='utf-8') as file: + Data = file.read() + except Exception as e: + print(f"Error reading file {FileLocation}: {e}") + return None + for key, pattern in patterns.items(): + matches = pattern.findall(Data) + if matches: + vulnerabilities[key].extend(matches) + return vulnerabilities + +def AnalyseWordFile(FileLocation): + vulnerabilities = {key: [] for key in Word_Patterns.keys()} + try: + doc = docx.Document(FileLocation) + text_data = "\n".join([para.text for para in doc.paragraphs]) + except Exception as e: + print(f"Error reading file {FileLocation}: {e}") + return None + for key, pattern in Word_Patterns.items(): + matches = pattern.findall(text_data) + if matches: + vulnerabilities[key].extend(matches) + return vulnerabilities + +def get_modified_files(): + return os.getenv("MODIFIED_FILES", "").split() + +def PrintOutcome(Data): + Outside = max(len(line) for line in Data.splitlines()) + 4 + print('|' + '-' * (Outside - 2) + '|') + for line in Data.splitlines(): + print(f"| {line.ljust(Outside - 4)} |") + print('|' + '-' * (Outside - 2) + '|') + +def main(): + modified_files = get_modified_files() + if not modified_files: + print("No modified files detected.") + return + for file in modified_files: + if not os.path.exists(file): + print(f"File not found: {file}") + continue + print(f"Detected new file: {file}") + if file.endswith(".js"): + print(f"Scanning {file} for vulnerabilities...") + patterns = JS_Patterns + vulnerabilities = AnalyseFile(file, patterns) + elif file.endswith(".py"): + print(f"Scanning {file} for vulnerabilities...") + patterns = Python_Patterns + vulnerabilities = AnalyseFile(file, patterns) + elif file.endswith(".docx"): + print(f"Scanning {file} for vulnerabilities...") + vulnerabilities = AnalyseWordFile(file) + elif file.endswith(".txt"): + print(f"Scanning {file} for vulnerabilities...") + vulnerabilities = AnalyseFile(file, TXT_Patterns) + elif file.endswith(".yml") or file.endswith("yaml"): + print(f"Scanning {file} for vulnerabilities...") + vulnerabilities = AnalyseFile(file, YML_Patterns) + else: + print(f"{file} is not a JavaScript, Python or Word file. Skipping...") + continue + + if vulnerabilities and any(vulnerabilities.values()): + Outcome = f"Potential Vulnerability Found in {file}:\n" + for key, found in vulnerabilities.items(): + if found: + Outcome += f" {key.replace('_', ' ').title()} vulnerabilities:\n" + for q in found: + Outcome += f" - {q}\n" + else: + Outcome = f"No vulnerabilities found in {file}." + PrintOutcome(Outcome) + +if __name__ == "__main__": + main() diff --git a/Vulnerability_Tool/Vulnerability_V1.0.py b/Vulnerability_Tool/Vulnerability_V1.0.py new file mode 100644 index 0000000..4cbf201 --- /dev/null +++ b/Vulnerability_Tool/Vulnerability_V1.0.py @@ -0,0 +1,106 @@ +# Importing modules to assist with vulnerability scanning and detecting +import os +import re + +# Define text Colour +class Colour: + GREEN = '\033[92m' + RED = '\033[91m' + BLUE = '\033[94m' + YELLOW = '\033[93m' + V_PATTEN_NAME = '\033[38;5;208m' # Orange names + NORMAL = '\033[0m' + +# Define Vulnerability Pattern +V_Patterns = { + "Sql_Injection": re.compile(r'\.query\s*\(.*\+.*\)'), + "XSS": re.compile(r'res\.send\s*\(.*\+.*\)'), + "Command_Injection": re.compile(r'exec\s*\(.*\+.*\)'), + "insecure_file_handling": re.compile(r'fs\.unlink\s*\(.*\)'), + "insecure_file_upload": re.compile(r'multer\s*\(\s*{.*dest.*}\s*\)') +} +# Opening the files for processing +def AnalyseFile(FileLocation): + vulnerabilities = {key: [] for key in V_Patterns.keys()} + try: + with open(FileLocation, 'r', encoding='utf-8') as file: + Data = file.read() + except Exception as e: + print(f"Error reading file {FileLocation}: {e}") + return None + +# Check for vulnerabilities based on pre set V_Patterns + for key, pattern in V_Patterns.items(): + matches = pattern.findall(Data) + if matches: + vulnerabilities[key].extend(matches) + + return vulnerabilities + +# Formatting files for list +def list_files(): + return [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.js')] + +def OrderedF(Dataset): + print("|--------------------------------|\n| JavaScript files for Analysis: |\n|--------------------------------|") + for i, file in enumerate(Dataset, 1): + print(f"{i} - {file}") + +# Result box for outcome of vulnerability scan +def PrintOutcome(Data): + Outside = max(len(line) for line in Data.splitlines()) + 4 + print('|' + '-' * (Outside - 2) + '|') + for line in Data.splitlines(): + print(f"| {line.ljust(Outside - 4)} |") + print('|' + '-' * (Outside - 2) + '|') + +# Catches not JavaScript files in directory +def main(): + Dataset = list_files() + if not Dataset: + print("No .js files found") + return + +# Terminate program when "end" is entered in + while True: + OrderedF(Dataset) + User_Input = input("\nPlease enter a file number from the listed options\nor\nType 'end' to quit the application \n> ") + if User_Input == 'end': + break + +# Catches an input ouside of the file number range + try: + file_index = int(User_Input) - 1 + if file_index < 0 or file_index >= len(Dataset): + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease enter the file number from the listed options") + continue + + JsFile = Dataset[file_index] + print(f"{Colour.YELLOW}\nAnalysing: {Colour.NORMAL}{JsFile}") + vulnerabilities = AnalyseFile(JsFile) + +# This should not get called. However, is left here to future proof the application + if not vulnerabilities: + Outcome = f"Could not read file: {JsFile}" + +# No vulnerabilities have been located + elif not any(vulnerabilities.values()): + Outcome = f"{Colour.GREEN}No vulnerabilities found.{Colour.NORMAL}" + +# Lists the potentiaal vulnerability found + else: + Outcome = f"{Colour.RED}Potential Vulnerability Found: {Colour.NORMAL}\n" + for key, found in vulnerabilities.items(): + if found: + Outcome += f"{Colour.V_PATTEN_NAME} {key.replace('_', ' ').title()} vulnerabilities:{Colour.NORMAL}\n" + for q in found: + Outcome += f" - {q}\n" + +# Print Result + PrintOutcome(Outcome) +# Triggers invalid input - chance to try again + except ValueError: + print(f"\n{Colour.BLUE}|---------------|\n| Invalid input |\n|---------------|{Colour.NORMAL}\nPlease Input a number.") + +if __name__ == "__main__": + main() diff --git a/au.env b/au.env new file mode 100644 index 0000000..c037345 --- /dev/null +++ b/au.env @@ -0,0 +1 @@ +JWT_SECRET=your_super_secret_key diff --git a/controller/accountController.js b/controller/accountController.js new file mode 100644 index 0000000..d95cda4 --- /dev/null +++ b/controller/accountController.js @@ -0,0 +1,22 @@ +const getMealPlanByUserIdAndDate = require('../model/getMealPlanByUserIdAndDate.js'); + +const getAllAccount = async (req, res) => { + try { + const { user_id, created_at } = req.query; + + const mealPlans = await getMealPlanByUserIdAndDate(user_id, created_at); + + if (!mealPlans || mealPlans.length === 0) { + return res.status(404).json({ message: 'No meal plans found' }); + } + + res.status(200).json(mealPlans); + } catch (error) { + console.log('Error retrieving appointments:', error); + res.status(500).json({ error: 'Internal server error' }); + } +} + +module.exports = { + getAllAccount +}; \ No newline at end of file diff --git a/controller/appointmentController.js b/controller/appointmentController.js new file mode 100644 index 0000000..976c196 --- /dev/null +++ b/controller/appointmentController.js @@ -0,0 +1,44 @@ +const addAppointment = require('../model/addAppointment.js'); +const getAllAppointments = require('../model/getAppointments.js'); +const { validationResult } = require('express-validator'); +const { appointmentValidation } = require('../validators/appointmentValidator.js'); + +// Function to handle saving appointment data +const saveAppointment = async (req, res) => { + // Check for validation errors + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + // Extract appointment data from the request body + const { userId, date, time, description } = req.body; + + try { + // Call the addAppointment model function to insert the data into the database + const result = await addAppointment(userId, date, time, description); + + // Respond with success message if appointment data is successfully saved + res.status(201).json({ message: 'Appointment saved successfully' });//, appointmentId: result.id + } catch (error) { + console.error('Error saving appointment:', error); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +// Function to handle retrieving all appointment data +const getAppointments = async (req, res) => { + try { + // Call the appropriate model function to retrieve all appointment data from the database + // Here, you would call a function from the model layer that fetches all appointments + // For demonstration purposes, let's assume a function called getAllAppointments() in the model layer + const appointments = await getAllAppointments(); + + // Respond with the retrieved appointment data + res.status(200).json(appointments); + } catch (error) { + console.error('Error retrieving appointments:', error); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +module.exports = { saveAppointment, getAppointments }; diff --git a/controller/authController.js b/controller/authController.js new file mode 100644 index 0000000..9843ef5 --- /dev/null +++ b/controller/authController.js @@ -0,0 +1,112 @@ +// controllers/authController.js +const { createClient } = require('@supabase/supabase-js'); +const sendVerificationEmail = require('../new_utils/sendVerificationEmail'); + +// ── Supabase clients ─────────────────────────────────────────────────────────── +const supabase = createClient( + process.env.SUPABASE_URL, + process.env.SUPABASE_ANON_KEY +); + +// 用 service-role 做 token 驗證更新(避免 RLS) +const admin = createClient( + process.env.SUPABASE_URL, + process.env.SUPABASE_SERVICE_ROLE_KEY || process.env.SUPABASE_ANON_KEY, + { auth: { persistSession: false } } +); + +// 開機時打 log,方便你confirm係新程式 +console.log('[authController] loaded. SRK set =', !!process.env.SUPABASE_SERVICE_ROLE_KEY); + +// ── Handlers ────────────────────────────────────────────────────────────────── +exports.logLoginAttempt = async (req, res) => { + try { + console.log('[logLoginAttempt] payload:', req.body); + const { email, user_id, success, ip_address, created_at } = req.body; + + if (!email || typeof success === 'undefined' || !ip_address || !created_at) { + console.warn('[logLoginAttempt] missing required fields'); + return res.status(400).json({ + error: 'Missing required fields: email, success, ip_address, created_at', + }); + } + + const { data, error } = await supabase.from('auth_logs').insert([{ + email, user_id: user_id || null, success, ip_address, created_at, + }]); + + if (error) { + console.error('❌ [logLoginAttempt] Supabase insert error:', error); + return res.status(500).json({ error: 'Failed to log login attempt', detail: error }); + } + + console.log('✅ [logLoginAttempt] logged:', data); + return res.status(201).json({ message: 'Login attempt logged successfully', data }); + } catch (err) { + console.error('❌ [logLoginAttempt] unexpected error:', err); + return res.status(500).json({ error: 'Internal server error', detail: err && err.message }); + } +}; + +exports.requestEmailVerification = async (req, res) => { + console.log('[requestEmailVerification] body:', req.body); + const { email } = req.body; + if (!email) return res.status(400).json({ error: 'Email is required.' }); + + try { + const result = await sendVerificationEmail(email); + console.log('[requestEmailVerification] mailer returned:', result); + + const verifyUrl = result?.verifyUrl || result?.verifyURL || result?.url || null; + console.log('🔗 DEV VERIFY LINK:', verifyUrl || '(none)'); + + return res.status(200).json({ + message: `Verification email sent to ${email}`, + verifyUrl: verifyUrl || undefined, + }); + } catch (err) { + console.error('❌ [requestEmailVerification] error:', err?.message, err); + return res.status(500).json({ error: 'Internal server error' }); + } +}; + +// ★ Token-only 驗證(不改 users 表) +// GET /api/verify-email/:token +exports.verifyEmailToken = async (req, res) => { + const { token } = req.params; + const now = new Date().toISOString(); + console.log('[verifyEmailToken] start. token =', token); + + try { + // 1) 讀 token 行 + const { data: row, error: findErr } = await admin + .from('email_verification_tokens') + .select('id, user_email, expires_at, verified_at') + .eq('token', token) + .single(); + + console.log('[verifyEmailToken] row =', row, 'error =', findErr); + + if (findErr || !row) return res.status(400).send('Invalid or expired link'); + if (row.verified_at) return res.status(400).send('This link was already used'); + if (row.expires_at && new Date(row.expires_at) < new Date()) + return res.status(400).send('This link has expired'); + + // 2) 設 verified_at(一次性) + const { error: updErr } = await admin + .from('email_verification_tokens') + .update({ verified_at: now }) + .eq('id', row.id); + + if (updErr) { + console.error('[verifyEmailToken] update error:', updErr); + return res.status(500).send('Failed to verify token (DB update)'); + } + + console.log('✅ [verifyEmailToken] verified_at set for token id =', row.id); + return res.status(200).send('Email verified successfully (token)'); + } catch (e) { + console.error('❌ [verifyEmailToken] unexpected error:', e); + return res.status(500).send('Unexpected error'); + } +}; diff --git a/controller/chatbotController.js b/controller/chatbotController.js new file mode 100644 index 0000000..b214bdc --- /dev/null +++ b/controller/chatbotController.js @@ -0,0 +1,213 @@ +const { addHistory, getHistory, deleteHistory } = require('../model/chatbotHistory'); +const fetch = (...args) => + import('node-fetch').then(({default: fetch}) => fetch(...args)); + + +// Get response message generated by chatbot +// Used by [POST] localhost/api/chatbot/query + +const getChatResponse = async (req, res) => { + // Get input string from user + const { user_id, user_input } = req.body; + + try { + // Validate input data + if (!user_id || !user_input) { + return res.status(400).json({ + error: "Missing required fields: user_id and user_input are required" + }); + } + + if (typeof user_input !== 'string' || user_input.trim().length === 0) { + return res.status(400).json({ + error: "Invalid input: user_input must be a non-empty string" + }); + } + + // For now, use a simple response if AI server is not available + let responseText = `I understand you're asking about "${user_input}". How can I help you with that?`; + + try { + // Send request to API server and get response + const ai_response = await fetch("http://localhost:8000/ai-model/chatbot/chat", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + "query": user_input + }) + }); + + const result = await ai_response.json(); + + // Validate response data + if (result && result.msg) { + responseText = result.msg; + } + } catch (aiError) { + console.error("Error connecting to AI server:", aiError); + // Continue with fallback response + } + + // Store chat history + try { + await addHistory(user_id, user_input, responseText); + } catch (dbError) { + console.error("Error storing chat history:", dbError); + } + + // Return response to user + return res.status(200).json({ + message: "Success", + response_text: responseText + }); + + } catch (error) { + console.error("Error in chatbot response:", error); + return res.status(500).json({ + error: "Internal server error" + }); + } +}; + +// Get response message generated by chatbot +// Used by [POST] localhost/api/chatbot/add_urls +const addURL = async (req, res) => { + // Get input string from user + const { urls } = req.body; + + try { + // Validate input data + if (!urls) { + return res.status(400).json({ + error: "Invalid input data, urls not found" + }); + } + + try { + // Send request to API server and get response + const ai_response = await fetch(`http://localhost:8000/ai-model/chatbot/add_urls?urls=${urls}`, { + method: "POST", + headers: { + "Content-Type": "application/json" + } + }); + + const result = await ai_response.json(); + + // Validate response data and send corresponding error message + if (!result) { + return res.status(400).json({ + error: "An error occurred when fetching result from AI server" + }); + } + + // Return response to user + return res.status(200).json({ + message: "Success", + result: result + }); + } catch (aiError) { + console.error("Error connecting to AI server:", aiError); + return res.status(503).json({ + error: "AI server unavailable" + }); + } + + } catch (error) { + console.error("Error processing URL:", error); + return res.status(500).json({ + error: "Internal server error" + }); + } +}; + +// Get response message generated by chatbot +// Used by [POST] localhost/api/chatbot/add_pdfs +const addPDF = async (req, res) => { + // Get input string from user + const { pdfs } = req.body; + + try { + // Placeholder implementation + return res.status(200).json({ + message: "Success", + result: "This is dummy response" + }); + } catch (error) { + console.error("Error in chatbot response:", error); + return res.status(500).json({ + error: "Internal server error", + details: process.env.NODE_ENV === 'development' ? error.message : undefined + }); + } +}; + +// Retrieve the saved chat history stored in database +// Used by [POST] localhost/api/chatbot/history +const getChatHistory = async (req, res) => { + const { user_id } = req.body; + + try { + // Validate input data + if (!user_id) { + return res.status(400).json({ + error: "Missing required field: user_id is required" + }); + } + + const history = await getHistory(user_id); + + if (!history) { + return res.status(404).json({ + error: "No chat history found for this user" + }); + } + + return res.status(200).json({ + message: "Chat history retrieved successfully", + chat_history: history + }); + } catch (error) { + console.error("Error retrieving chat history:", error); + return res.status(500).json({ + error: "Internal server error", + details: process.env.NODE_ENV === 'development' ? error.message : undefined + }); + } +}; + +// Clear the chat history stored in database +// Used by [DELETE] localhost/api/chatbot/history +const clearChatHistory = async (req, res) => { + const { user_id } = req.body; + + try { + // Validate input data + if (!user_id) { + return res.status(400).json({ + error: "Missing required field: user_id is required" + }); + } + + await deleteHistory(user_id); + return res.status(200).json({ + message: "Chat history cleared successfully" + }); + } catch (error) { + console.error("Error clearing chat history:", error); + return res.status(500).json({ + error: "Internal server error", + details: process.env.NODE_ENV === 'development' ? error.message : undefined + }); + } +}; + +module.exports = { + getChatResponse, + addURL, + addPDF, + getChatHistory, + clearChatHistory +}; \ No newline at end of file diff --git a/controller/contactusController.js b/controller/contactusController.js index ba1f21f..c9a0d53 100644 --- a/controller/contactusController.js +++ b/controller/contactusController.js @@ -1,7 +1,24 @@ -// dbClient = require('../dbConnection.js'); +let addContactUsMsg = require("../model/addContactUsMsg.js"); +const { validationResult } = require('express-validator'); +const { contactusValidator } = require('../validators/contactusValidator.js'); -const contactus = { +const contactus = async (req, res) => { + // Check for validation errors + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + const { name, email, subject, message } = req.body; + + try { + await addContactUsMsg(name, email, subject, message); + + return res.status(201).json({ message: 'Data received successfully!' }); + } catch (error) { + console.error(error); + return res.status(500).json({ error: 'Internal server error' }); + } }; -module.exports = contactus; \ No newline at end of file +module.exports = { contactus }; \ No newline at end of file diff --git a/controller/estimatedCostController.js b/controller/estimatedCostController.js new file mode 100644 index 0000000..0528188 --- /dev/null +++ b/controller/estimatedCostController.js @@ -0,0 +1,34 @@ +let getFullorPartialCost = require('../model/getFullorPartialCost'); + +const getCost = async (req, res) => { + const recipe_id = req.params.recipe_id; + var { desired_servings, exclude_ids } = req.query; + + try { + if (!desired_servings) { + desired_servings = 0; + } + if (!exclude_ids) { + exclude_ids = ""; + } + + const result = await getFullorPartialCost.estimateCost(recipe_id, desired_servings, exclude_ids); + + if (result.status != 200) { + return res.status(result.status).json({ + error: result.error + }); + } + + return res.status(200).json(result.estimatedCost); + } catch (error) { + console.error("Error in estimation process: ", error); + return res.status(500).json({ + error: "Internal server error" + }) + } +} + +module.exports = { + getCost +} \ No newline at end of file diff --git a/controller/filterController.js b/controller/filterController.js new file mode 100644 index 0000000..1527fd4 --- /dev/null +++ b/controller/filterController.js @@ -0,0 +1,110 @@ +const supabase = require('../dbConnection'); + +/** + * Filter recipes based on dietary preferences and allergens + * @param {Request} req - Express request object + * @param {Response} res - Express response object + */ +const filterRecipes = async (req, res) => { + const { allergies, dietary } = req.query; + + try { + // Fetch the mapping of dietary names to IDs + const { data: dietaryMapping, error: dietaryError } = await supabase + .from('dietary_requirements') + .select('id, name'); + + if (dietaryError) throw dietaryError; + + // Validate dietary input + if (dietary && !dietaryMapping.some(d => d.name.toLowerCase().includes(dietary.toLowerCase()))) { + return res.status(400).json({ error: "Invalid dietary requirement provided" }); + } + + // Find dietary IDs for partial matches + const dietaryFilterIds = dietary + ? dietaryMapping + .filter(d => d.name.toLowerCase().includes(dietary.toLowerCase())) + .map(d => d.id.toString()) + : []; + + // Fetch recipes with their dietary requirements and ingredients + const { data: recipes, error: recipeError } = await supabase + .from('recipes') + .select(` + id, + recipe_name, + dietary, + dietary_requirements ( + id, + name + ), + ingredients ( + id, + name, + allergies_type ( + id, + name + ) + ) + `); + + if (recipeError) throw recipeError; + + // Validate allergies input + const allergyList = allergies + ? (Array.isArray(allergies) ? allergies : allergies.split(',')).map(allergy => + allergy.toLowerCase().trim() + ) + : []; + + const { data: allergensMapping, error: allergensError } = await supabase + .from('allergies') + .select('id, name'); + + if (allergensError) throw allergensError; + + if ( + allergyList.length && + !allergyList.every(allergy => + allergensMapping.some(a => a.name.toLowerCase().includes(allergy)) + ) + ) { + return res.status(400).json({ error: "Invalid allergen provided" }); + } + + // Filter recipes based on dietary requirements and allergens + const filteredRecipes = recipes.filter(recipe => { + // Check if any ingredient in the recipe has an allergen matching the allergyList (partial match) + const hasAllergy = recipe.ingredients.some(ingredient => { + return ( + ingredient.allergies_type && + allergyList.some(allergy => + ingredient.allergies_type.name + .toLowerCase() + .includes(allergy) // Check for partial match + ) + ); + }); + + // Exclude recipes with ingredients containing allergens + if (hasAllergy) return false; + + // Check if recipe matches any of the dietary filter IDs + const dietaryCheck = + !dietaryFilterIds.length || + (recipe.dietary && dietaryFilterIds.includes(recipe.dietary.toString())); + + return dietaryCheck; + }); + + res.status(200).json(filteredRecipes); + } catch (error) { + console.error('Error filtering recipes:', error.message); + res.status(400).json({ error: error.message }); + } +}; + +module.exports = { + filterRecipes, +}; diff --git a/controller/foodDataController.js b/controller/foodDataController.js new file mode 100644 index 0000000..9aa050c --- /dev/null +++ b/controller/foodDataController.js @@ -0,0 +1,87 @@ +const fetchAllDietaryRequirements = require("../model/fetchAllDietaryRequirements.js"); +const fetchAllCuisines = require("../model/fetchAllCuisines.js"); +const fetchAllAllergies = require("../model/fetchAllAllergies.js"); +const fetchAllIngredients = require("../model/fetchAllIngredients.js"); +const fetchAllCookingMethods = require("../model/fetchAllCookingMethods.js"); +const fetchAllSpiceLevels = require("../model/fetchAllSpiceLevels.js"); +const fetchAllHealthConditions = require("../model/fetchAllHealthConditions"); + +const getAllDietaryRequirements = async (req, res) => { + try { + const dietaryRequirements = await fetchAllDietaryRequirements(); + return res.status(200).json(dietaryRequirements); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +const getAllCuisines = async (req, res) => { + try { + const cuisines = await fetchAllCuisines(); + return res.status(200).json(cuisines); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +const getAllAllergies = async (req, res) => { + try { + const allergies = await fetchAllAllergies(); + return res.status(200).json(allergies); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +const getAllIngredients = async (req, res) => { + try { + const foodTypes = await fetchAllIngredients(); + return res.status(200).json(foodTypes); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +const getAllCookingMethods = async (req, res) => { + try { + const cookingMethods = await fetchAllCookingMethods(); + return res.status(200).json(cookingMethods); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +const getAllSpiceLevels = async (req, res) => { + try { + const spiceLevels = await fetchAllSpiceLevels(); + return res.status(200).json(spiceLevels); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +const getAllHealthConditions = async (req, res) => { + try { + const healthConditions = await fetchAllHealthConditions(); + return res.status(200).json(healthConditions); + } catch (error) { + console.error(error); + return res.status(500).json({error: "Internal server error"}); + } +}; + +module.exports = { + getAllDietaryRequirements, + getAllCuisines, + getAllAllergies, + getAllIngredients, + getAllCookingMethods, + getAllSpiceLevels, + getAllHealthConditions +}; \ No newline at end of file diff --git a/controller/healthArticleController.js b/controller/healthArticleController.js new file mode 100644 index 0000000..9503929 --- /dev/null +++ b/controller/healthArticleController.js @@ -0,0 +1,21 @@ +const getHealthArticles = require('../model/getHealthArticles'); + +const searchHealthArticles = async (req, res) => { + const { query } = req.query; + + if (!query) { + return res.status(400).json({ error: 'Missing query parameter' }); + } + + try { + const articles = await getHealthArticles(query); + res.status(200).json({ articles }); + } catch (error) { + console.error('Error searching articles:', error.message); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +module.exports = { + searchHealthArticles, +}; diff --git a/controller/healthNewsController.js b/controller/healthNewsController.js new file mode 100644 index 0000000..b49b692 --- /dev/null +++ b/controller/healthNewsController.js @@ -0,0 +1,682 @@ +const supabase = require('../dbConnection'); + +// Get all health news with flexible filtering +exports.filterNews = async (req, res) => { + try { + const { + id, + title, + content, + author_name, + category_name, + tag_name, + start_date, + end_date, + sort_by = 'published_at', + sort_order = 'desc', + limit = 20, + page = 1, + include_details = 'true' // Controls whether to include full relationship details + } = req.query; + + // If ID is provided, use a simplified query for better performance + if (id) { + // Configure select statement based on include_details preference + let selectStatement = '*'; + if (include_details === 'true') { + selectStatement = ` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `; + } else { + selectStatement = ` + id, + title, + summary, + published_at, + updated_at, + image_url, + author:authors(id, name), + category:categories(id, name) + `; + } + + const { data, error } = await supabase + .from('health_news') + .select(selectStatement) + .eq('id', id) + .single(); + + if (error) throw error; + + // Only fetch tags if include_details is true + if (include_details === 'true') { + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', id); + + if (tagsError) throw tagsError; + + data.tags = tags.map(t => t.tags); + } + + return res.status(200).json({ + success: true, + data + }); + } + + // For non-ID queries, use the original filtering logic + // Build the query + let query = supabase + .from('health_news'); + + // Configure select statement based on include_details preference + if (include_details === 'true') { + query = query.select(` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `); + } else { + query = query.select(` + id, + title, + summary, + published_at, + image_url, + author:authors(id, name), + category:categories(id, name) + `); + } + + // Apply filters + if (title) { + query = query.ilike('title', `%${title}%`); + } + + if (content) { + query = query.ilike('content', `%${content}%`); + } + + // Date range filtering + if (start_date) { + query = query.gte('published_at', start_date); + } + + if (end_date) { + query = query.lte('published_at', end_date); + } + + // Relational filtering + if (author_name) { + // Get the author ID first + const { data: authors, error: authorsError } = await supabase + .from('authors') + .select('id') + .ilike('name', `%${author_name}%`); + + if (authorsError) throw authorsError; + + if (authors.length > 0) { + const authorIds = authors.map(author => author.id); + query = query.in('author_id', authorIds); + } else { + // No matching authors, return empty result + return res.status(200).json({ success: true, data: [] }); + } + } + + if (category_name) { + // Get the category ID first + const { data: categories, error: categoriesError } = await supabase + .from('categories') + .select('id') + .ilike('name', `%${category_name}%`); + + if (categoriesError) throw categoriesError; + + if (categories.length > 0) { + const categoryIds = categories.map(category => category.id); + query = query.in('category_id', categoryIds); + } else { + // No matching categories, return empty result + return res.status(200).json({ success: true, data: [] }); + } + } + + // Pagination + const offset = (page - 1) * limit; + query = query.order(sort_by, { ascending: sort_order === 'asc' }) + .range(offset, offset + limit - 1); + + // Execute the query + let { data, error } = await query; + + if (error) throw error; + + // Handle tag filtering separately since it's a many-to-many relationship + if (tag_name) { + // Get tag IDs matching the name + const { data: tags, error: tagsError } = await supabase + .from('tags') + .select('id') + .ilike('name', `%${tag_name}%`); + + if (tagsError) throw tagsError; + + if (tags.length > 0) { + const tagIds = tags.map(tag => tag.id); + + // Get news IDs that have these tags + const { data: newsWithTags, error: newsTagsError } = await supabase + .from('news_tags') + .select('news_id') + .in('tag_id', tagIds); + + if (newsTagsError) throw newsTagsError; + + const newsIdsWithTags = newsWithTags.map(item => item.news_id); + + // Filter the results to only include news with matching tags + data = data.filter(news => newsIdsWithTags.includes(news.id)); + } else { + // No matching tags, return empty result + return res.status(200).json({ success: true, data: [] }); + } + } + + // Get tags for each news if include_details is true + if (include_details === 'true') { + for (let news of data) { + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', news.id); + + if (tagsError) throw tagsError; + + news.tags = tags.map(t => t.tags); + } + } + + // Get total count for pagination - FIX: Use proper Supabase count method + const { count, error: countError } = await supabase + .from('health_news') + .select('*', { count: 'exact', head: true }); + + if (countError) throw countError; + + const totalCount = count || 0; + + res.status(200).json({ + success: true, + data, + pagination: { + total: totalCount, + page: parseInt(page), + limit: parseInt(limit), + total_pages: Math.ceil(totalCount / limit) + } + }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get all health news +exports.getAllNews = async (req, res) => { + try { + const { data, error } = await supabase + .from('health_news') + .select(` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `) + .order('published_at', { ascending: false }); + + if (error) throw error; + + // Get tags for each news + for (let news of data) { + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', news.id); + + if (tagsError) throw tagsError; + + news.tags = tags.map(t => t.tags); + } + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get specific health news by ID +exports.getNewsById = async (req, res) => { + try { + const { id } = req.params; + + const { data, error } = await supabase + .from('health_news') + .select(` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `) + .eq('id', id) + .single(); + + if (error) throw error; + + // Get tags for the news + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', id); + + if (tagsError) throw tagsError; + + data.tags = tags.map(t => t.tags); + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get news by category +exports.getNewsByCategory = async (req, res) => { + try { + const { id } = req.params; + + const { data, error } = await supabase + .from('health_news') + .select(` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `) + .eq('category_id', id) + .order('published_at', { ascending: false }); + + if (error) throw error; + + // Get tags for each news + for (let news of data) { + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', news.id); + + if (tagsError) throw tagsError; + + news.tags = tags.map(t => t.tags); + } + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get news by author +exports.getNewsByAuthor = async (req, res) => { + try { + const { id } = req.params; + + const { data, error } = await supabase + .from('health_news') + .select(` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `) + .eq('author_id', id) + .order('published_at', { ascending: false }); + + if (error) throw error; + + // Get tags for each news + for (let news of data) { + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', news.id); + + if (tagsError) throw tagsError; + + news.tags = tags.map(t => t.tags); + } + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get news by tag +exports.getNewsByTag = async (req, res) => { + try { + const { id } = req.params; + + // First find all news IDs with this tag + const { data: newsIds, error: newsIdsError } = await supabase + .from('news_tags') + .select('news_id') + .eq('tag_id', id); + + if (newsIdsError) throw newsIdsError; + + if (newsIds.length === 0) { + return res.status(200).json({ success: true, data: [] }); + } + + // Get details for these news + const { data, error } = await supabase + .from('health_news') + .select(` + *, + author:authors(*), + source:sources(*), + category:categories(*) + `) + .in('id', newsIds.map(item => item.news_id)) + .order('published_at', { ascending: false }); + + if (error) throw error; + + // Get tags for each news + for (let news of data) { + const { data: tags, error: tagsError } = await supabase + .from('news_tags') + .select(` + tags:tags(*) + `) + .eq('news_id', news.id); + + if (tagsError) throw tagsError; + + news.tags = tags.map(t => t.tags); + } + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Create new health news +exports.createNews = async (req, res) => { + const { + title, + summary, + content, + author_id, + source_id, + category_id, + source_url, + image_url, + published_at, + tags + } = req.body; + + try { + // Start transaction + const { data, error } = await supabase + .from('health_news') + .insert({ + title, + summary, + content, + author_id, + source_id, + category_id, + source_url, + image_url, + published_at: published_at || new Date() + }) + .select() + .single(); + + if (error) throw error; + + // If there are tags, add tag associations + if (tags && tags.length > 0) { + const tagRelations = tags.map(tag_id => ({ + news_id: data.id, + tag_id + })); + + const { error: tagError } = await supabase + .from('news_tags') + .insert(tagRelations); + + if (tagError) throw tagError; + } + + res.status(201).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Update health news +exports.updateNews = async (req, res) => { + const { id } = req.params; + const { + title, + summary, + content, + author_id, + source_id, + category_id, + source_url, + image_url, + published_at, + tags + } = req.body; + + try { + // Update news + const { data, error } = await supabase + .from('health_news') + .update({ + title, + summary, + content, + author_id, + source_id, + category_id, + source_url, + image_url, + published_at, + updated_at: new Date() + }) + .eq('id', id) + .select() + .single(); + + if (error) throw error; + + // If tags are provided, delete old tag associations and add new ones + if (tags) { + // Delete old tag associations + const { error: deleteError } = await supabase + .from('news_tags') + .delete() + .eq('news_id', id); + + if (deleteError) throw deleteError; + + // Add new tag associations + if (tags.length > 0) { + const tagRelations = tags.map(tag_id => ({ + news_id: id, + tag_id + })); + + const { error: tagError } = await supabase + .from('news_tags') + .insert(tagRelations); + + if (tagError) throw tagError; + } + } + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Delete health news +exports.deleteNews = async (req, res) => { + const { id } = req.params; + + try { + // Due to foreign key constraints, deleting news will automatically delete related tag associations + const { error } = await supabase + .from('health_news') + .delete() + .eq('id', id); + + if (error) throw error; + + res.status(200).json({ + success: true, + message: 'Health news successfully deleted' + }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get all categories +exports.getAllCategories = async (req, res) => { + try { + const { data, error } = await supabase + .from('categories') + .select('*') + .order('name'); + + if (error) throw error; + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get all authors +exports.getAllAuthors = async (req, res) => { + try { + const { data, error } = await supabase + .from('authors') + .select('*') + .order('name'); + + if (error) throw error; + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Get all tags +exports.getAllTags = async (req, res) => { + try { + const { data, error } = await supabase + .from('tags') + .select('*') + .order('name'); + + if (error) throw error; + + res.status(200).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Create new category +exports.createCategory = async (req, res) => { + const { name, description } = req.body; + + try { + const { data, error } = await supabase + .from('categories') + .insert({ name, description }) + .select() + .single(); + + if (error) throw error; + + res.status(201).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Create new author +exports.createAuthor = async (req, res) => { + const { name, bio } = req.body; + + try { + const { data, error } = await supabase + .from('authors') + .insert({ name, bio }) + .select() + .single(); + + if (error) throw error; + + res.status(201).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; + +// Create new tag +exports.createTag = async (req, res) => { + const { name } = req.body; + + try { + const { data, error } = await supabase + .from('tags') + .insert({ name }) + .select() + .single(); + + if (error) throw error; + + res.status(201).json({ success: true, data }); + } catch (error) { + res.status(500).json({ success: false, message: error.message }); + } +}; \ No newline at end of file diff --git a/controller/imageClassificationController.js b/controller/imageClassificationController.js new file mode 100644 index 0000000..496ec4f --- /dev/null +++ b/controller/imageClassificationController.js @@ -0,0 +1,80 @@ +const path = require('path'); +const { spawn } = require('child_process'); +const fs = require('fs'); + +// Utility to delete the uploaded file +const deleteFile = (filePath) => { + fs.unlink(filePath, (err) => { + if (err) { + console.error('Error deleting file:', err); + } + }); +}; + +// Function to clean the raw prediction output +const cleanPrediction = (prediction) => { + const lines = prediction.split('\n'); + const lastLine = lines[lines.length - 2]; // Skip the last empty line + const startIndex = lastLine.indexOf(' ') + 1; + return lastLine.slice(startIndex).trim(); +}; + +// Function to handle prediction logic +const predictImage = (req, res) => { + // Path to the uploaded image file + const imagePath = req.file.path; + + if (!imagePath) { + return res.status(400).json({ error: 'Image path is missing.' }); + } + + // Read the image file from disk + fs.readFile(imagePath, (err, imageData) => { + if (err) { + console.error('Error reading image file:', err); + deleteFile(imagePath); + return res.status(500).json({ error: 'Internal server error' }); + } + + // Execute Python script using child_process.spawn + const pythonProcess = spawn('python', ['model/imageClassification.py']); + + // Pass image data to Python script via stdin + pythonProcess.stdin.write(imageData); + pythonProcess.stdin.end(); + + // Collect data from Python script output + let prediction = ''; + pythonProcess.stdout.on('data', (data) => { + prediction += data.toString(); + }); + + // Handle errors + pythonProcess.stderr.on('data', (data) => { + console.error('Error executing Python script:', data.toString()); + deleteFile(imagePath); + res.status(500).json({ error: 'Internal server error' }); + }); + + // When Python script finishes execution + pythonProcess.on('close', (code) => { + deleteFile(imagePath); + + if (code !== 0) { + console.error('Python script exited with code:', code); + return res.status(500).json({ error: 'Model execution failed.' }); + } + try{ + const cleanedPrediction = cleanPrediction(prediction); + res.status(200).json({ prediction: cleanedPrediction }); + } catch (e) { + console.error('Python script exited with code:', code); + res.status(500).json({ error: 'Internal server error' }); + } + }); + }); +}; + +module.exports = { + predictImage +}; diff --git a/controller/ingredientSubstitutionController.js b/controller/ingredientSubstitutionController.js new file mode 100644 index 0000000..46790a6 --- /dev/null +++ b/controller/ingredientSubstitutionController.js @@ -0,0 +1,83 @@ +const fetchIngredientSubstitutions = require("../model/fetchIngredientSubstitutions.js"); + +/** + * Get substitution options for a specific ingredient + * @param {Object} req - Express request object + * @param {Object} res - Express response object + */ +const getIngredientSubstitutions = async (req, res) => { + try { + const { ingredientId } = req.params; + + if (!ingredientId) { + return res.status(400).json({ error: "Ingredient ID is required" }); + } + + // Validate ingredientId is a number + const parsedId = parseInt(ingredientId); + if (isNaN(parsedId)) { + return res.status(400).json({ error: "Ingredient ID must be a number" }); + } + + // Extract optional filter parameters from query string + const options = {}; + + // Parse allergies if provided + if (req.query.allergies) { + try { + console.log(`Parsing allergies from query: ${req.query.allergies}`); + options.allergies = Array.isArray(req.query.allergies) + ? req.query.allergies.map(id => parseInt(id.trim())).filter(id => !isNaN(id)) + : req.query.allergies.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)); + console.log(`Parsed allergies: ${JSON.stringify(options.allergies)}`); + } catch (parseError) { + console.error('Error parsing allergies:', parseError); + options.allergies = []; + } + } + + // Parse dietary requirements if provided + if (req.query.dietaryRequirements) { + try { + console.log(`Parsing dietaryRequirements from query: ${req.query.dietaryRequirements}`); + options.dietaryRequirements = Array.isArray(req.query.dietaryRequirements) + ? req.query.dietaryRequirements.map(id => parseInt(id.trim())).filter(id => !isNaN(id)) + : req.query.dietaryRequirements.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)); + console.log(`Parsed dietaryRequirements: ${JSON.stringify(options.dietaryRequirements)}`); + } catch (parseError) { + console.error('Error parsing dietary requirements:', parseError); + options.dietaryRequirements = []; + } + } + + // Parse health conditions if provided + if (req.query.healthConditions) { + try { + console.log(`Parsing healthConditions from query: ${req.query.healthConditions}`); + options.healthConditions = Array.isArray(req.query.healthConditions) + ? req.query.healthConditions.map(id => parseInt(id.trim())).filter(id => !isNaN(id)) + : req.query.healthConditions.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)); + console.log(`Parsed healthConditions: ${JSON.stringify(options.healthConditions)}`); + } catch (parseError) { + console.error('Error parsing health conditions:', parseError); + options.healthConditions = []; + } + } + + console.log(`Processing substitution request for ingredient ID: ${parsedId} with options:`, JSON.stringify(options)); + const substitutions = await fetchIngredientSubstitutions(parsedId, options); + return res.status(200).json(substitutions); + } catch (error) { + console.error('Error in getIngredientSubstitutions:', error); + if (error.message === 'Ingredient not found') { + return res.status(404).json({ error: error.message }); + } else if (error.message === 'Invalid ingredient ID') { + return res.status(400).json({ error: error.message }); + } + return res.status(500).json({ error: "Internal server error" }); + } +}; + +module.exports = { + getIngredientSubstitutions +}; \ No newline at end of file diff --git a/controller/loginController.js b/controller/loginController.js index 78ddcd2..ee5b5d0 100644 --- a/controller/loginController.js +++ b/controller/loginController.js @@ -1,32 +1,220 @@ -const bcrypt = require('bcryptjs'); -const jwt = require('jsonwebtoken'); -let getUserCredentials = require('../model/getUserCredentials.js') +const bcrypt = require("bcryptjs"); +const jwt = require("jsonwebtoken"); +const logLoginEvent = require("../Monitor_&_Logging/loginLogger"); +const getUserCredentials = require("../model/getUserCredentials.js"); +const { addMfaToken, verifyMfaToken } = require("../model/addMfaToken.js"); +const sgMail = require("@sendgrid/mail"); +const crypto = require("crypto"); +const supabase = require("../dbConnection"); +const { validationResult } = require("express-validator"); + // Install nodemailer and add the following details to the .env file + //ALERT_EMAIL=nutrihelpnoreply1234@gmail.com + //ALERT_PASSWORD=yzzbfcnmemvneiqp +const nodemailer = require("nodemailer"); + +const transporter = nodemailer.createTransport({ + service: "gmail", + auth: { + user: process.env.ALERT_EMAIL, + pass: process.env.ALERT_PASSWORD, + }, +}); const login = async (req, res) => { - const { username, password } = req.body; - - try { - if (!username || !password) { - return res.status(400).json({ error: 'Username and password are required' }); - } - const user = await getUserCredentials(username, password); - if (user.length === 0) { - return res.status(401).json({ error: 'Invalid username or password' }); - } + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const email = req.body.email?.trim().toLowerCase(); + const password = req.body.password; + + let clientIp = req.headers["x-forwarded-for"] || req.socket.remoteAddress || req.ip; + clientIp = clientIp === "::1" ? "127.0.0.1" : clientIp; + + if (!email || !password) { + return res.status(400).json({ error: "Email and password are required" }); + } + + const tenMinutesAgoISO = new Date(Date.now() - 10 * 60 * 1000).toISOString(); + + try { + // Count failed login attempts in the past 10 minutes + const { data: failuresByEmail } = await supabase + .from("brute_force_logs") + .select("id") + .eq("email", email) + .eq("success", false) + .gte("created_at", tenMinutesAgoISO); + + const failureCount = failuresByEmail?.length || 0; + + if (failureCount >= 10) { + return res.status(429).json({ + error: "❌ Too many failed login attempts. Please try again after 10 minutes." + }); + } - const isPasswordValid = await bcrypt.compare(password, user.password); - if (!isPasswordValid) { - return res.status(401).json({ error: 'Invalid username or password' }); + // Validate credentials + const user = await getUserCredentials(email); + const userExists = user && user.length !== 0; + const isPasswordValid = userExists ? await bcrypt.compare(password, user.password) : false; + const isLoginValid = userExists && isPasswordValid; + + if (!isLoginValid) { + await supabase.from("brute_force_logs").insert([{ + email, + ip_address: clientIp, + success: false, + created_at: new Date().toISOString() + }]); + + if (failureCount === 4) { + return res.status(429).json({ + warning: "⚠ You have one attempt left before your account is temporarily locked." + }); + } + + if (!userExists || !isPasswordValid) { + await sendFailedLoginAlert(email, clientIp); + + if (!userExists) { + return res.status(401).json({ error: "Invalid email" }); } + + return res.status(401).json({ error: "Invalid password" }); + } + } + + // Log successful login + await supabase.from("brute_force_logs").insert([{ + email, + success: true, + created_at: new Date().toISOString() + }]); - const token = jwt.sign({ userId: user.user_id }, process.env.JWT_TOKEN, { expiresIn: '1h' }); + // ✅ Delete all failed attempts for this email + const { error: deleteError } = await supabase + .from("brute_force_logs") + .delete() + .eq("email", email) + .eq("success", false); - res.json({ token }); - } catch (error) { - console.error('Error logging in:', error); - res.status(500).json({ error: 'Internal server error' }); + if (deleteError) { + console.error("Failed to delete failed logs:", deleteError); } - return res.status(200).json('Placeholder'); + + // MFA handling + if (user.mfa_enabled) { + const token = crypto.randomInt(100000, 999999); + await addMfaToken(user.user_id, token); + await sendEmail(user, token); + return res.status(202).json({ + message: "An MFA Token has been sent to your email address" + }); + } + + // Log the Successful Login + await logLoginEvent({ + userId: user.user_id, + eventType: "LOGIN_SUCCESS", + ip: clientIp, + userAgent: req.headers["user-agent"] + }); + + // JWT generation + const token = jwt.sign( + { userId: user.user_id }, + process.env.JWT_TOKEN, + { expiresIn: "1h" } + ); + + return res.status(200).json({ user, token }); + + } catch (err) { + console.error("Login error:", err); + return res.status(500).json({ error: "Internal server error" }); + } +}; + +const loginMfa = async (req, res) => { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const email = req.body.email?.trim().toLowerCase(); + const password = req.body.password; + const mfa_token = req.body.mfa_token; + + if (!email || !password || !mfa_token) { + return res.status(400).json({ error: "Email, password, and token are required" }); + } + + try { + const user = await getUserCredentials(email); + if (!user || user.length === 0) { + return res.status(401).json({ error: "Invalid email or password" }); + } + + const isPasswordValid = await bcrypt.compare(password, user.password); + if (!isPasswordValid) { + return res.status(401).json({ error: "Invalid email or password" }); + } + + const tokenValid = await verifyMfaToken(user.user_id, mfa_token); + if (!tokenValid) { + return res.status(401).json({ error: "Token is invalid or has expired" }); + } + + const token = jwt.sign( + { userId: user.user_id }, + process.env.JWT_TOKEN, + { expiresIn: "1h" } + ); + + return res.status(200).json({ user, token }); + + } catch (err) { + console.error("MFA login error:", err); + return res.status(500).json({ error: "Internal server error" }); + } }; -module.exports = { login }; \ No newline at end of file +async function sendEmail(user, token) { + sgMail.setApiKey(process.env.SENDGRID_KEY); + try { + await sgMail.send({ + to: user.email, + from: "nutrihelpnoreply@gmail.com", + subject: "Nutrihelp login Token", + text: `Your token to log in is ${token}`, + html: `Your token to log in is ${token}` + }); + console.log("Email sent successfully!"); + } catch (err) { + console.error("Error sending email:", err); + } +} + +async function sendFailedLoginAlert(email, ip) { + try { + await transporter.sendMail({ + from: process.env.ALERT_EMAIL, + to: email, + subject: "Failed Login Attempt on NutriHelp", + text: `Hi, + +Someone tried to log in to NutriHelp using your email address from IP: ${ip}. + +If this wasn't you, please ignore this message. But if you're concerned, consider resetting your password or contacting support. + +– NutriHelp Security Team`, + }); + console.log(`Failed login alert sent to ${email}`); + } catch (err) { + console.error("Failed to send alert email:", err.message); + } +} + +module.exports = { login, loginMfa }; \ No newline at end of file diff --git a/controller/mealplanController.js b/controller/mealplanController.js new file mode 100644 index 0000000..86b4edf --- /dev/null +++ b/controller/mealplanController.js @@ -0,0 +1,67 @@ +const { validationResult } = require('express-validator'); +let { add, get, deletePlan, saveMealRelation } = require('../model/mealPlan.js'); + + +const addMealPlan = async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { recipe_ids, meal_type, user_id } = req.body; + + let meal_plan = await add(user_id, { recipe_ids: recipe_ids }, meal_type); + + await saveMealRelation(user_id, recipe_ids, meal_plan[0].id); + + return res.status(201).json({ message: 'success', statusCode: 201, meal_plan: meal_plan }); + + } catch (error) { + console.error({ error: 'error' }); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +const getMealPlan = async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { user_id } = req.body; + + let meal_plans = await get(user_id); + + if (meal_plans) { + return res.status(200).json({ message: 'success', statusCode: 200, meal_plans: meal_plans }); + } + return res.status(404).send({ error: 'Meal Plans not found for user.' }); + + } catch (error) { + console.error({ error: 'error' }); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +const deleteMealPlan = async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { id, user_id } = req.body; + + await deletePlan(id, user_id); + + return res.status(204).json({ message: 'success', statusCode: 204 }); + + } catch (error) { + console.error({ error: 'error' }); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +module.exports = { addMealPlan, getMealPlan, deleteMealPlan }; \ No newline at end of file diff --git a/controller/medicalPredictionController.js b/controller/medicalPredictionController.js new file mode 100644 index 0000000..6f7b197 --- /dev/null +++ b/controller/medicalPredictionController.js @@ -0,0 +1,58 @@ +// Get response message generated by chatbot +// Used by [POST] localhost/api/obesity/predict +const predict = async (req, res) => { + // Get input string from user + // Please refer to the AI_Team_Integration.xlsx for detail information about all input fields + const user_input = req.body; + + try { + // Validate input data + const required_keys = [ + "Gender", "Age", "Height", "Weight", + "family_history_with_overweight", "FAVC", "FCVC", "NCP", + "CAEC", "SMOKE", "CH2O", "SCC", "FAF", "TUE", "CALC", "MTRANS" + ] + const received_keys = Object.keys(user_input); + for (let i=0; i { + try { + const { user_id, type, content } = req.body; + + const { data, error } = await supabase + .from('notifications') + .insert([{ user_id, type, content, status: 'unread' }]); + + if (error) throw error; + + res.status(201).json({ message: 'Notification created', notification: data }); + } catch (error) { + console.error('Error creating notification:', error); + res.status(500).json({ error: 'An error occurred while creating the notification' }); + } +}; + +// Get all notifications for a specific user by user_id +exports.getNotificationsByUserId = async (req, res) => { + try { + const { user_id } = req.params; + + const { data, error } = await supabase + .from('notifications') + .select('*') + .eq('user_id', user_id); + + if (error) throw error; + + if (data.length === 0) { + return res.status(404).json({ message: 'No notifications found for this user' }); + } + + res.status(200).json(data); + } catch (error) { + console.error('Error retrieving notifications:', error); + res.status(500).json({ error: 'An error occurred while retrieving notifications' }); + } +}; + +// Update a notification status for specific id +exports.updateNotificationStatusById = async (req, res) => { + try { + const { id } = req.params; // Extract id from the URL parameters + const { status } = req.body; // Extract status from the request body + + const { data, error } = await supabase + .from('notifications') + .update({ status }) + .eq('simple_id', id) // Only update the notification with the specific id + + + if (error) { + console.error('Error updating notification:', error); + return res.status(500).json({ error: 'Failed to update notification' }); + } + + if (!data || data.length === 0) { + // If no data is returned, the notification was not found + return res.status(404).json({ error: 'Notification not found' }); + } + + res.status(200).json({ message: 'Notification updated successfully', notification: data }); + } catch (error) { + console.error('Error updating notification:', error); + res.status(500).json({ error: 'An error occurred while updating the notification' }); + } +}; + + +exports.deleteNotificationById = async (req, res) => { + try { + const { id } = req.params; + + const { data, error } = await supabase + .from('notifications') + .delete() + .eq('simple_id', id) // Only delete the notification with the specific id + + + if (error) { + console.error('Error deleting notification:', error); + return res.status(500).json({ error: 'Failed to delete notification' }); + } + + if (!data || data.length === 0) { + // If no data is returned, the notification was not found + return res.status(404).json({ error: 'Notification not found' }); + } + + res.status(200).json({ message: 'Notification deleted successfully' }); + } catch (error) { + console.error('Error deleting notification:', error); + res.status(500).json({ error: 'An error occurred while deleting the notification' }); + } +}; + + +// Mark all unread notifications as read for a specific user +exports.markAllUnreadNotificationsAsRead = async (req, res) => { + try { + + const { user_id } = req.params; + + const { data, error } = await supabase + .from('notifications') + .update({ status: 'read' }) + .eq('user_id', user_id) + .eq('status', 'unread'); + + + if (error) throw error; + + + if (data.length === 0) { + return res.status(404).json({ message: 'No unread notifications found for this user' }); + } + + res.status(200).json({ message: 'All unread notifications marked as read', updatedNotifications: data }); + } catch (error) { + console.error('Error marking notifications as read:', error); + res.status(500).json({ error: 'An error occurred while marking notifications as read' }); + } +}; diff --git a/controller/recipeController.js b/controller/recipeController.js new file mode 100644 index 0000000..309be91 --- /dev/null +++ b/controller/recipeController.js @@ -0,0 +1,189 @@ +let createRecipe = require("../model/createRecipe.js"); +let getUserRecipes = require("../model/getUserRecipes.js"); +let deleteUserRecipes = require("../model/deleteUserRecipes.js"); +const { validationResult } = require('express-validator'); + +const createAndSaveRecipe = async (req, res) => { + const { + user_id, + ingredient_id, + ingredient_quantity, + recipe_name, + cuisine_id, + total_servings, + preparation_time, + instructions, + recipe_image, + cooking_method_id, + } = req.body; + + try { + // if ( + // !user_id || + // !ingredient_id || + // !ingredient_quantity || + // !recipe_name || + // !cuisine_id || + // !total_servings || + // !preparation_time || + // !instructions || + // !cooking_method_id + // ) { + // return res.status(400).json({ + // error: "Recipe parameters are missed", + // statusCode: 400, + // }); + // } + + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const recipe = await createRecipe.createRecipe( + user_id, + ingredient_id, + ingredient_quantity, + recipe_name, + cuisine_id, + total_servings, + preparation_time, + instructions, + cooking_method_id + ); + + let savedData = await createRecipe.saveRecipe(recipe); + + if (recipe_image) { + await createRecipe.saveImage(recipe_image, savedData[0].id); + } + + await createRecipe.saveRecipeRelation(recipe, savedData[0].id); + + return res.status(201).json({ message: "success", statusCode: 201 }); + } catch (error) { + console.error("Error logging in:", error); + return res + .status(500) + .json({ error: "Internal server error", statusCode: 500 }); + } +}; + +const getRecipes = async (req, res) => { + const user_id = req.body.user_id; + + try { + if (!user_id) { + return res + .status(400) + .json({ error: "User Id is required", statusCode: 400 }); + } + let recipeList = []; + let cuisineList = []; + let ingredientList = []; + + const recipeRelation = await getUserRecipes.getUserRecipesRelation( + user_id + ); + if (recipeRelation.length === 0) { + return res + .status(404) + .json({ error: "Recipes not found", statusCode: 404 }); + } + + for (let i = 0; i < recipeRelation.length; i++) { + if (i === 0) { + recipeList.push(recipeRelation[i].recipe_id); + cuisineList.push(recipeRelation[i].cuisine_id); + ingredientList.push(recipeRelation[i].ingredient_id); + } else if (recipeList.indexOf(recipeRelation[i].recipe_id) < 0) { + recipeList.push(recipeRelation[i].recipe_id); + } else if (cuisineList.indexOf(recipeRelation[i].cuisine_id) < 0) { + cuisineList.push(recipeRelation[i].cuisine_id); + } else if ( + ingredientList.indexOf(recipeRelation[i].ingredient_id) < 0 + ) { + ingredientList.push(recipeRelation[i].ingredient_id); + } + } + + const recipes = await getUserRecipes.getUserRecipes(recipeList); + if (recipes.length === 0) { + return res + .status(404) + .json({ error: "Recipes not found", statusCode: 404 }); + } + + const ingredients = await getUserRecipes.getIngredients(ingredientList); + if (ingredients.length === 0) { + return res + .status(404) + .json({ error: "Ingredients not found", statusCode: 404 }); + } + + const cuisines = await getUserRecipes.getCuisines(cuisineList); + if (cuisines.length === 0) { + return res + .status(404) + .json({ error: "Cuisines not found", statusCode: 404 }); + } + + await Promise.all( + recipes.map(async (recipe) => { + for (const element of cuisines) { + if (recipe.cuisine_id == element.id) { + recipe["cuisine_name"] = element.name; + } + } + recipe.ingredients["category"] = []; + recipe.ingredients["name"] = []; + for (const ingredient of recipe.ingredients.id) { + for (const element of ingredients) { + if (ingredient == element.id) { + recipe.ingredients.name.push(element.name); + recipe.ingredients.category.push(element.category); + } + } + } + + // Get image URL + recipe.image_url = await getUserRecipes.getImageUrl( + recipe.image_id + ); + }) + ); + + return res + .status(200) + .json({ message: "success", statusCode: 200, recipes: recipes }); + } catch (error) { + console.error("Error logging in:", error); + return res + .status(500) + .json({ error: "Internal server error", statusCode: 500 }); + } +}; + +const deleteRecipe = async (req, res) => { + const { user_id, recipe_id } = req.body; + + try { + if (!user_id || !recipe_id) { + return res.status(400).json({ + error: "User Id or Recipe Id is required", + statusCode: 404, + }); + } + + await deleteUserRecipes.deleteUserRecipes(user_id, recipe_id); + + return res.status(200).json({ message: "success", statusCode: 204 }); + } catch (error) { + console.error(error); + return res + .status(500) + .json({ error: "Internal server error", statusCode: 500 }); + } +}; + +module.exports = { createAndSaveRecipe, getRecipes, deleteRecipe }; diff --git a/controller/recipeImageClassificationController.js b/controller/recipeImageClassificationController.js new file mode 100644 index 0000000..9e0cfc6 --- /dev/null +++ b/controller/recipeImageClassificationController.js @@ -0,0 +1,176 @@ +//FOR THIS API TO WORK, YOU MUST HAVE THE AI MODEL FILE SAVED TO THE PREDICTION_MODELS FOLDER +//THIS FILE CAN BE FOUND UPLOADED TO THE NUTRIHELP TEAMS SITE +// IT IS CALLED BEST_MODEL_CLASS.HDF5 + +const { spawn } = require("child_process"); +const fs = require("fs"); +const path = require("path"); +const { promisify } = require("util"); + +// Convert fs callbacks to promises +const readFileAsync = promisify(fs.readFile); +const writeFileAsync = promisify(fs.writeFile); +const unlinkAsync = promisify(fs.unlink); +const mkdirAsync = promisify(fs.mkdir); +const existsAsync = promisify(fs.exists); + +const predictRecipeImage = async (req, res) => { + try { + if (!req.file || !req.file.path) { + return res.status(400).json({ error: "No file uploaded" }); + } + + const imagePath = req.file.path; + const originalName = req.file.originalname; + + const fileExtension = path.extname(originalName).toLowerCase(); + const allowedExtensions = [".jpg", ".jpeg", ".png"]; + + if (!allowedExtensions.includes(fileExtension)) { + try { + await unlinkAsync(req.file.path); + } catch (err) { + console.error("Error deleting invalid file:", err); + } + return res.status(400).json({ error: "Invalid file type. Only JPG/PNG files are allowed." }); + } + + const originalFilename = originalName.toLowerCase(); + + try { + if (!await existsAsync('uploads')) { + await mkdirAsync('uploads', { recursive: true }); + console.log("Created uploads directory"); + } + } catch (err) { + console.error("Error creating uploads directory:", err); + } + + const namedImagePath = `uploads/${originalFilename}`; + + try { + await fs.promises.copyFile(imagePath, namedImagePath); + console.log(`Copied temporary file to ${namedImagePath}`); + + await writeFileAsync('uploads/original_filename.txt', originalFilename); + } catch (err) { + console.error("Error preparing image file:", err); + // Continue anyway + } + + return new Promise((resolve, reject) => { + const scriptPath = './model/recipeImageClassification.py'; + + if (!fs.existsSync(scriptPath)) { + console.error(`Python script not found at ${scriptPath}`); + res.status(500).json({ error: "Recipe classification script not found" }); + cleanupFiles(imagePath); + return resolve(); + } + + console.log(`Running Python script: ${scriptPath}`); + const pythonProcess = spawn('python', [scriptPath], { encoding: 'utf-8' }); + + let output = ''; + let errorOutput = ''; + + pythonProcess.stdout.on('data', (data) => { + output += data.toString(); + }); + + pythonProcess.stderr.on('data', (data) => { + const errorText = data.toString(); + errorOutput += errorText; + + if (errorText.includes("ERROR:") && + !errorText.includes("successfully") && + !errorText.includes("libpng warning") && + !errorText.includes("Allocating tensor")) { + console.error(`Python Error: ${errorText}`); + } + }); + + pythonProcess.on("close", (code) => { + console.log(`Python process exited with code: ${code}`); + + if (code === 0) { + try { + const cleanOutput = output.replace(/\x1b\[[0-9;]*m/g, ''); + + const lines = cleanOutput.split(/\r?\n/).filter(line => line.trim() !== ''); + const result = lines[lines.length - 1].trim(); + + if (!result) { + console.error("Python script returned empty result"); + res.status(500).json({ error: "Recipe classification returned empty result" }); + } else { + res.status(200).json({ prediction: result }); + } + } catch (error) { + console.error("Error processing Python output:", error); + res.status(500).json({ error: "Error processing recipe classification result" }); + } + } else { + if (errorOutput.includes("Model file not found")) { + res.status(500).json({ + error: "Recipe classification model not found. Please ensure the AI model is properly installed." + }); + } else if (errorOutput.includes("No file uploaded") || errorOutput.includes("Cannot open image file")) { + res.status(400).json({ error: "Unable to process the uploaded image" }); + } else { + console.error("Python script exited with error code:", code); + console.error("Error output:", errorOutput); + res.status(500).json({ error: "Internal server error during image classification" }); + } + } + + cleanupFiles(imagePath); + resolve(); + }); + + pythonProcess.on("error", (err) => { + console.error("Error running Python script:", err); + res.status(500).json({ error: "Failed to run image classification process" }); + cleanupFiles(imagePath); + resolve(); + }); + + const timeout = setTimeout(() => { + console.error("Python process timeout - killing process"); + pythonProcess.kill(); + if (!res.headersSent) { + res.status(500).json({ error: "Recipe classification timed out" }); + } + cleanupFiles(imagePath); + resolve(); + }, 30000); // 30 second timeout + + pythonProcess.on('close', () => { + clearTimeout(timeout); + }); + }); + } catch (error) { + console.error("Unexpected error in predictRecipeImage:", error); + if (!res.headersSent) { + res.status(500).json({ error: "Unexpected error during image processing" }); + } + if (req.file && req.file.path) { + cleanupFiles(req.file.path); + } + } +}; + +// Helper function to clean up temporary files +async function cleanupFiles(tempFilePath) { + try { + // Check if file exists before trying to delete + if (fs.existsSync(tempFilePath)) { + await unlinkAsync(tempFilePath); + console.log(`Cleaned up temporary file: ${tempFilePath}`); + } + } catch (err) { + console.error(`Error cleaning up temporary file ${tempFilePath}:`, err); + } +} + +module.exports = { predictRecipeImage }; diff --git a/controller/recipeNutritionController.js b/controller/recipeNutritionController.js new file mode 100644 index 0000000..e094e8e --- /dev/null +++ b/controller/recipeNutritionController.js @@ -0,0 +1,41 @@ +const supabase = require('../dbConnection.js'); + +exports.getRecipeNutritionByName = async (req, res) => { + const recipeName = req.query.name; + + if (!recipeName) { + return res.status(400).json({ error: "Missing 'name' query parameter" }); + } + + try { + const { data, error } = await supabase + .from('recipes') + .select(` + recipe_name, + calories, + fat, + carbohydrates, + protein, + fiber, + vitamin_a, + vitamin_b, + vitamin_c, + vitamin_d, + sodium, + sugar + `) + .ilike('recipe_name', recipeName); // case-insensitive match + + if (error) { + return res.status(500).json({ error: error.message }); + } + + if (!data || data.length === 0) { + return res.status(404).json({ error: 'Recipe not found' }); + } + + return res.json(data[0]); + } catch (err) { + return res.status(500).json({ error: 'Server error' }); + } +}; \ No newline at end of file diff --git a/controller/recipeScalingController.js b/controller/recipeScalingController.js new file mode 100644 index 0000000..123da96 --- /dev/null +++ b/controller/recipeScalingController.js @@ -0,0 +1,29 @@ +let getScaledRecipe = require('../model/getRecipeIngredients'); + +const scaleRecipe = async (req, res) => { + const { recipe_id, desired_servings } = req.params; + + try { + const result = await getScaledRecipe.getScaledIngredientsByServing(recipe_id, desired_servings); + + if (result.status != 200) { + return res.status(result.status).json({ + error: result.error + }); + } + + return res.status(200).json({ + scaled_ingredients: result.ingredients, + scaling_detail: result.scaling_detail + }); + } catch (error) { + console.error("Error when scaling recipe: ", error); + return res.status(500).json({ + error: "Internal server error" + }) + } +} + +module.exports = { + scaleRecipe +} \ No newline at end of file diff --git a/controller/signupController.js b/controller/signupController.js index 609f722..ff61e0f 100644 --- a/controller/signupController.js +++ b/controller/signupController.js @@ -1,35 +1,100 @@ -const bcrypt = require('bcryptjs'); -let getUser = require('../model/getUser.js') -let addUser = require('../model/addUser.js') +//const bcrypt = require('bcryptjs'); +//let getUser = require('../model/getUser.js'); +//let addUser = require('../model/addUser.js'); +const { validationResult } = require('express-validator'); +const { registerValidation } = require('../validators/signupValidator.js'); +// const supabase = require('../dbConnection'); +const logLoginEvent = require("../Monitor_&_Logging/loginLogger"); +const { supabase } = require("../database/supabase"); +const safeLog = async (payload) => { + try { await logLoginEvent(payload); } catch (e) { console.warn("log error:", e.message); } +}; const signup = async (req, res) => { - const { username, password } = req.body; + + const errors = validationResult(req); + if (!errors.isEmpty()) return res.status(400).json({ errors: errors.array() }); + + const { name, email, password, contact_number, address } = req.body; + const emailNormalized = (email || "").trim().toLowerCase(); - try { - if (!username || !password) { - return res.status(400).json({ error: 'Username and password are required' }); - } + let clientIp = req.headers["x-forwarded-for"] || req.socket?.remoteAddress || req.ip || ""; + clientIp = clientIp === "::1" ? "127.0.0.1" : clientIp; + const userAgent = req.get("User-Agent") || ""; - const userExists = await getUser(username); + try { + + const { data, error } = await supabase.auth.signUp({ + email: emailNormalized, + password, + options: { + data: { name, contact_number: contact_number || null, address: address || null }, - if (userExists.username) { - return res.status(400).json({ error: 'User already exists' }); - } + emailRedirectTo: process.env.APP_ORIGIN ? `${process.env.APP_ORIGIN}/login` : undefined, + }, + }); - const hashedPassword = await bcrypt.hash(password, 10); + if (error) { + const msg = (error.message || "").toLowerCase(); - await addUser(username, hashedPassword) + if (msg.includes("already") && msg.includes("registered")) { + await safeLog({ userId: null, eventType: "EXISTING_USER", ip: clientIp, userAgent, + details: { email: emailNormalized }}); + return res.status(400).json({ error: "User already exists" }); + } + if (msg.includes("password")) { + return res.status(400).json({ error: error.message }); + } + + return res.status(400).json({ error: error.message || "Unable to create user" }); + } - res.status(201).json({ message: 'User created successfully' }); + const userId = data.user?.id || null; - - } catch (error) { - console.error('Error creating user:', error); - res.status(500).json({ error: 'Internal server error' }); + if (data.session?.access_token) { + try { + const authed = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_ANON_KEY, { + global: { headers: { Authorization: `Bearer ${data.session.access_token}` } }, + }); + + await authed.from("profiles").upsert( + { + id: userId, + email: emailNormalized, + name, + contact_number: contact_number || null, + address: address || null, + }, + { onConflict: "id" } + ); + } catch (e) { + console.warn("profile upsert (authed) failed:", e.message); + + } } + + await safeLog({ + userId, + eventType: "SIGNUP_SUCCESS", + ip: clientIp, + userAgent, + details: { email: emailNormalized }, + }); + + return res.status(201).json({ + message: "User created successfully. Please check your email to verify your account.", + }); + } catch (err) { + console.error("Unexpected signup error:", err); + await safeLog({ + userId: null, eventType: "SIGNUP_FAILED", ip: clientIp, userAgent, + details: { reason: "Internal server error", error_message: err.message, email: emailNormalized }, + }); + return res.status(500).json({ error: "Internal server error" }); + } }; -module.exports = { signup }; \ No newline at end of file +module.exports = { signup }; diff --git a/controller/updateUserProfileController.js b/controller/updateUserProfileController.js new file mode 100644 index 0000000..afa58d8 --- /dev/null +++ b/controller/updateUserProfileController.js @@ -0,0 +1,56 @@ +const supabase = require('../dbConnection.js'); + +exports.updateUserProfile = async (req, res) => { + console.log(" hit update-by-identifier endpoint"); + try { + const { identifier, updates } = req.body; + + if (!identifier) { + return res.status(400).json({ message: "Email or Username is required as identifier." }); + } + + if (!updates || typeof updates !== 'object') { + return res.status(400).json({ message: "Updates object is required." }); + } + + + let { data: userData, error: emailError } = await supabase + .from('users') + .select('*') + .eq('email', identifier) + .single(); + + if (emailError || !userData) { + const { data, error: usernameError } = await supabase + .from('users') + .select('*') + .eq('name', identifier) + .single(); + + userData = data; + if (usernameError || !userData) { + return res.status(404).json({ message: "User not found with provided identifier." }); + } + } + + + const { data: updatedData, error: updateError } = await supabase + .from('users') + .update(updates) + .eq('user_id', userData.user_id); + + + if (updateError) { + console.error("Update error:", updateError); + return res.status(500).json({ error: "Failed to update user profile." }); + } + + return res.status(200).json({ + message: "User profile updated successfully.", + updatedProfile: updatedData + }); + } catch (error) { + console.error("Unexpected error:", error); + return res.status(500).json({ error: "Internal server error." }); + } +}; diff --git a/controller/uploadController.js b/controller/uploadController.js new file mode 100644 index 0000000..04fb478 --- /dev/null +++ b/controller/uploadController.js @@ -0,0 +1,83 @@ +const multer = require('multer'); +const { createClient } = require('@supabase/supabase-js'); + +const supabase = createClient( + process.env.SUPABASE_URL, + process.env.SUPABASE_ANON_KEY +); + + +const storage = multer.memoryStorage(); +const upload = multer({ + storage: storage, + limits: { fileSize: 5 * 1024 * 1024 }, + fileFilter: (req, file, cb) => { + const allowedTypes = ['image/jpeg', 'image/png', 'application/pdf']; + if (allowedTypes.includes(file.mimetype)) { + cb(null, true); + } else { + cb(new Error('Unsupported file type'), false); + } + } +}).single('file'); + + +exports.uploadFile = async (req, res) => { + const token = req.headers.authorization?.split(' ')[1]; + + if (!token) { + return res.status(401).json({ error: 'No authorization token provided' }); + } + + upload(req, res, async (err) => { + if (err) { + return res.status(400).json({ error: err.message }); + } + + if (!req.file) { + return res.status(400).json({ error: 'No file uploaded' }); + } + + const { user_id } = req.body; + const file = req.file; + const uploadTime = new Date().toISOString(); + const filePath = `files/${user_id}/${file.originalname}`; + + try { + + const { data, error } = await supabase.storage + .from('uploads') + .upload(filePath, file.buffer, { + contentType: file.mimetype, + cacheControl: '3600', + }); + + if (error) throw error; + + const { data: urlData, error: urlError } = await supabase + .storage + .from('uploads') + .getPublicUrl(filePath); + + if (urlError || !urlData) throw urlError; + + const fileUrl = urlData.publicUrl; + + const { error: logError } = await supabase.from('upload_logs').insert([ + { + user_id, + file_name: file.originalname, + file_url: fileUrl, + upload_time: uploadTime, + } + ]); + + if (logError) throw logError; + + return res.status(201).json({ message: 'File uploaded successfully', fileUrl: fileUrl }); + } catch (error) { + console.error('❌ File upload failed:', error); + return res.status(500).json({ error: 'File upload failed' }); + } + }); +}; diff --git a/controller/userFeedbackController.js b/controller/userFeedbackController.js new file mode 100644 index 0000000..9bcb309 --- /dev/null +++ b/controller/userFeedbackController.js @@ -0,0 +1,28 @@ +const { validationResult } = require('express-validator'); +let addUserFeedback = require("../model/addUserFeedback.js"); + +const userfeedback = async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + const { user_id, name, contact_number, email, experience, message } = req.body; + + await addUserFeedback( + user_id, + name, + contact_number, + email, + experience, + message + ); + + res.status(201).json({ message: "Data received successfully!" }); + } catch (error) { + console.error({ error }); + res.status(500).json({ error: "Internal server error" }); + } +}; + +module.exports = { userfeedback }; diff --git a/controller/userPasswordController.js b/controller/userPasswordController.js new file mode 100644 index 0000000..d9cbe4c --- /dev/null +++ b/controller/userPasswordController.js @@ -0,0 +1,47 @@ +const bcrypt = require('bcryptjs'); +let updateUser = require("../model/updateUserPassword.js"); +let getUser = require("../model/getUserPassword.js"); + +const updateUserPassword = async (req, res) => { + try { + if (!req.body.user_id) { + return res.status(400).send({ message: "User ID is required" }); + } + + if (!req.body.password) { + return res.status(400).send({ message: "Current password is required" }); + } + + if (!req.body.new_password) { + return res.status(400).send({ message: "New password is required" }); + } + + const user = await getUser(req.body.user_id); + if (!user || user.length === 0) { + return res + .status(401) + .json({ error: "Invalid user id" }); + } + + const isPasswordValid = await bcrypt.compare(req.body.password, user[0].password); + if (!isPasswordValid) { + return res + .status(401) + .json({ error: "Invalid password" }); + } + + const hashedPassword = await bcrypt.hash(req.body.new_password, 10); + + await updateUser( + req.body.user_id, + hashedPassword + ); + + res.status(200).json({ message: "Password updaded successfully" }); + } catch (error) { + console.error(error); + res.status(500).json({ message: "Internal server error" }); + } +}; + +module.exports = { updateUserPassword }; \ No newline at end of file diff --git a/controller/userPreferencesController.js b/controller/userPreferencesController.js new file mode 100644 index 0000000..fc7b590 --- /dev/null +++ b/controller/userPreferencesController.js @@ -0,0 +1,42 @@ +const fetchUserPreferences = require("../model/fetchUserPreferences"); +const updateUserPreferences = require("../model/updateUserPreferences"); + +const getUserPreferences = async (req, res) => { + try { + const userId = req.user.userId; + if (!userId) { + return res.status(400).json({ error: "User ID is required" }); + } + + const userPreferences = await fetchUserPreferences(userId); + if (!userPreferences || userPreferences.length === 0) { + return res + .status(404) + .json({ error: "User preferences not found" }); + } + + return res.status(200).json(userPreferences); + } catch (error) { + console.error(error); + return res.status(500).json({ error: "Internal server error" }); + } +}; + +const postUserPreferences = async (req, res) => { + try { + const { user } = req.body; + + await updateUserPreferences(user.userId, req.body); + return res + .status(204) + .json({ message: "User preferences updated successfully" }); + } catch (error) { + console.error(error); + return res.status(500).json({ error: "Internal server error" }); + } +}; + +module.exports = { + getUserPreferences, + postUserPreferences, +}; diff --git a/controller/userProfileController.js b/controller/userProfileController.js new file mode 100644 index 0000000..c7dfb94 --- /dev/null +++ b/controller/userProfileController.js @@ -0,0 +1,44 @@ +let { updateUser, saveImage } = require("../model/updateUserProfile.js"); +let getUser = require("../model/getUserProfile.js"); + +const updateUserProfile = async (req, res) => { + try { + if (!req.body.email) { + return res.status(400).send("Email is required"); + } + const user_profile = await updateUser( + req.body.name, + req.body.first_name, + req.body.last_name, + req.body.email, + req.body.contact_number, + req.body.address + ); + + var url = await saveImage(req.body.user_image, user_profile[0].user_id); + user_profile[0].image_url = url; + + res.status(200).json(user_profile); + } catch (error) { + console.error(error); + res.status(500).json({ message: "Internal server error" }); + } +}; + +const getUserProfile = async (req, res) => { + try { + const { email } = req.body; + if (!email) { + return res.status(400).send("Email is required"); + } + + const userprofile = await getUser(email); + + res.status(200).json(userprofile); + } catch (error) { + console.error(error); + res.status(500).json({ message: "Internal server error" }); + } +}; + +module.exports = { updateUserProfile, getUserProfile }; diff --git a/controller/waterIntakeController.js b/controller/waterIntakeController.js new file mode 100644 index 0000000..034bbf3 --- /dev/null +++ b/controller/waterIntakeController.js @@ -0,0 +1,38 @@ +const supabase = require('../dbConnection'); + +/** + * Update the daily water intake for a user + * @param {Request} req - Express request object + * @param {Response} res - Express response object + */ +const updateWaterIntake = async (req, res) => { + try { + const { user_id, glasses_consumed } = req.body; + const date = new Date().toISOString().split('T')[0]; + + if (!user_id || typeof glasses_consumed !== 'number') { + return res.status(400).json({ error: 'User ID and glasses consumed are required' }); + } + + const { data, error } = await supabase + .from('water_intake') + .upsert({ + user_id: user_id, + date: date, + glasses_consumed: glasses_consumed, + updated_at: new Date().toISOString() + }, { onConflict: ['user_id', 'date'] }); + + if (error) { + console.error('Error updating water intake:', error.message); + return res.status(500).json({ error: 'Failed to update water intake' }); + } + + return res.status(200).json({ message: 'Water intake updated successfully', data }); + } catch (error) { + console.error('Internal server error:', error.message); + return res.status(500).json({ error: 'Internal server error' }); + } +}; + +module.exports = { updateWaterIntake }; diff --git a/database/ingredient-allergy-trigger.sql b/database/ingredient-allergy-trigger.sql new file mode 100644 index 0000000..6500d3d --- /dev/null +++ b/database/ingredient-allergy-trigger.sql @@ -0,0 +1,18 @@ +-- Update ingredient allergy BOOL for recipes relation +create function update_allergies() +returns trigger +language plpgsql +as $$ +begin + UPDATE recipe_ingredient t1 + SET allergy = TRUE + FROM user_allergies t2 + WHERE t1.user_id = t2.user_id AND t1.ingredient_id = t2.allergy_id; + RETURN NULL; +end; +$$; + +create trigger allergy_update_trigger +after insert on recipe_ingredient +for each row +execute function update_allergies(); \ No newline at end of file diff --git a/database/ingredient-dislike-trigger.sql b/database/ingredient-dislike-trigger.sql new file mode 100644 index 0000000..7c02672 --- /dev/null +++ b/database/ingredient-dislike-trigger.sql @@ -0,0 +1,18 @@ +-- Update dislike BOOL for recipes relation +create function update_dislikes() +returns trigger +language plpgsql +as $$ +begin + UPDATE recipe_ingredient t1 + SET dislike = TRUE + FROM user_dislikes t2 + WHERE t1.user_id = t2.user_id AND t1.ingredient_id = t2.dislike_id; + RETURN NULL; +end; +$$; + +create trigger dislike_update_trigger +after insert on recipe_ingredient +for each row +execute function update_dislikes(); \ No newline at end of file diff --git a/database/recipe-allergy-trigger.sql b/database/recipe-allergy-trigger.sql new file mode 100644 index 0000000..6310b8f --- /dev/null +++ b/database/recipe-allergy-trigger.sql @@ -0,0 +1,18 @@ +-- Update recipes allergy BOOL +create function update_recipe_allergies() +returns trigger +language plpgsql +as $$ +begin + UPDATE recipes t1 + SET allergy = TRUE + FROM recipe_ingredient t2 + WHERE t1.user_id = t2.user_id AND t1.id = t2.recipe_id AND t2.allergy = TRUE; + RETURN NULL; +end; +$$; + +create trigger allergy_recipe_update_trigger +after update on recipe_ingredient +for each row +execute function update_recipe_allergies(); \ No newline at end of file diff --git a/database/recipe-dislike-trigger.sql b/database/recipe-dislike-trigger.sql new file mode 100644 index 0000000..6f7c3a7 --- /dev/null +++ b/database/recipe-dislike-trigger.sql @@ -0,0 +1,18 @@ +-- Update recipes dislike BOOL +create function update_recipe_dislikes() +returns trigger +language plpgsql +as $$ +begin + UPDATE recipes t1 + SET dislike = TRUE + FROM recipe_ingredient t2 + WHERE t1.user_id = t2.user_id AND t1.id = t2.recipe_id AND t2.dislike = TRUE; + RETURN NULL; +end; +$$; + +create trigger dislike_recipe_update_trigger +after update on recipe_ingredient +for each row +execute function update_recipe_dislikes(); \ No newline at end of file diff --git a/database/supabase.js b/database/supabase.js new file mode 100644 index 0000000..75c5ee1 --- /dev/null +++ b/database/supabase.js @@ -0,0 +1,10 @@ +// database/supabase.js +const { createClient } = require("@supabase/supabase-js"); + +const supabase = createClient( + process.env.SUPABASE_URL, + process.env.SUPABASE_ANON_KEY, + { auth: { autoRefreshToken: false, persistSession: false } } +); + +module.exports = { supabase, createClient }; diff --git a/index.yaml b/index.yaml new file mode 100644 index 0000000..6e166d5 --- /dev/null +++ b/index.yaml @@ -0,0 +1,2593 @@ +openapi: 3.0.0 +info: + title: NutriHelp API + version: 1.0.0 +servers: + - url: http://localhost:3000/api +paths: + /upload: + post: + summary: Upload a file + description: Upload JPG, PNG, or PDF (max 5MB, limited to 5 uploads per 10 minutes) + security: + - BearerAuth: [] + requestBody: + required: true + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + responses: + '200': + description: File uploaded successfully + '400': + description: Upload failed due to size/type restriction + '429': + description: Too many uploads from this IP (rate limit exceeded) + /appointments: + post: + summary: Save appointment data + description: Receives a user ID, date, time, and description, and saves the appointment data + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Appointment' + responses: + '201': + description: Appointment saved successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - missing required fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + get: + summary: Retrieve all appointment data + description: Returns a JSON array containing all appointments + responses: + '200': + description: Appointments fetched successfully + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Appointment' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /auth/request-email-verification: + post: + tags: + - Authentication + summary: Send email verification request + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + email: + type: string + example: testuser@example.com + responses: + '200': + description: Verification email sent + content: + application/json: + schema: + type: object + properties: + message: + type: string + example: Verification email sent to testuser@example.com + '400': + description: Bad request – email missing + '500': + description: Internal server error + /verify-email/{token}: + get: + tags: + - Authentication + summary: Verify email token + parameters: + - in: path + name: token + required: true + schema: + type: string + description: Verification token sent by email + responses: + '200': + description: Verified + content: + application/json: + schema: + type: object + properties: + verified: + type: boolean + email: + type: string + example: + verified: true + email: "user@example.com" + '400': + description: Invalid, expired or already used token + content: + application/json: + schema: + type: object + properties: + verified: + type: boolean + error: + type: string + example: + verified: false + error: "invalid_token" + '500': + description: Internal server error + /contactus: + post: + summary: Contact us + description: Receives a contact request + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ContactRequest' + responses: + '201': + description: Data received successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - missing required fields + content: + text/plain: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/dietaryrequirements: + get: + summary: Get dietary requirements + description: Retrieves a list of dietary requirements + responses: + '200': + description: List of dietary requirements + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/cuisines: + get: + summary: Get cuisines + description: Retrieves a list of cuisines + responses: + '200': + description: List of cuisines + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/allergies: + get: + summary: Get allergies + description: Retrieves a list of allergies + responses: + '200': + description: List of allergies + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/ingredients: + get: + summary: Get ingredients + description: Retrieves a list of ingredients (name and ID only) + responses: + '200': + description: List of ingredients + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/cookingmethods: + get: + summary: Get cooking methods + description: Retrieves a list of cooking methods + responses: + '200': + description: List of cooking methods + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/spicelevels: + get: + summary: Get spice levels + description: Retrieves a list of spice levels + responses: + '200': + description: List of spice levels + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /fooddata/healthconditions: + get: + summary: Get health conditions + description: Retrieves a list of health conditions + responses: + '200': + description: List of health conditions + content: + application/json: + schema: + $ref: '#/components/schemas/IDNamePair' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /imageClassification: + post: + summary: Image classification + description: Receives an image and classifies it + requestBody: + required: true + content: + multipart/form-data: + schema: + type: object + properties: + image: + type: string + format: binary + responses: + '200': + description: Image classified successfully + content: + application/json: + schema: + type: object + properties: + prediction: + type: string + example: "Avocado:~160 calories per 100 grams" + '400': + description: Bad request - missing image + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /recipeImageClassification: + post: + summary: Recipe image classification + description: Receives an image of a recipe and classifies it + requestBody: + required: true + content: + multipart/form-data: + schema: + type: object + properties: + image: + type: string + format: binary + responses: + '200': + description: Image classified successfully + content: + application/json: + schema: + type: object + properties: + prediction: + type: string + example: "Lasagna" + '400': + description: Bad request - missing image + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /login: + post: + summary: User login + description: Authenticates user and returns a JWT token + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LoginRequest' + responses: + '200': + description: Login successful, JWT token returned + content: + application/json: + schema: + type: object + properties: + token: + $ref: '#/components/schemas/JWTResponse' + user: + $ref: '#/components/schemas/UserResponse' + '400': + description: Email and password are required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '401': + description: Invalid email or password + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /login/mfa: + post: + summary: Multi-factor authentication + description: Authenticates user with multi-factor authentication + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LoginWithMFARequest' + responses: + '200': + description: MFA successful, JWT token returned + content: + application/json: + schema: + type: object + properties: + token: + $ref: '#/components/schemas/JWTResponse' + user: + $ref: '#/components/schemas/UserResponse' + '400': + description: Email and password are required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '401': + description: Invalid email or password + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /mealplan: + get: + summary: Get meal plan + description: Retrieves a meal plan for the user + # TODO should not use requestBody for GET + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + id: + type: integer + user_id: + type: integer + responses: + '200': + description: Meal plan fetched successfully + content: + application/json: + schema: + $ref: '#/components/schemas/CreateMealPlanRequest' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + post: + summary: Save meal plan + description: Receives a meal plan and saves it + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/MealPlanResponse' + responses: + '201': + description: Meal plan saved successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - missing required fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + delete: + summary: Delete meal plan + description: Deletes the user's meal plan + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + id: + type: integer + user_id: + type: integer + responses: + '204': + description: Meal plan deleted successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - missing required fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /recipe: + post: + summary: Get all recipes + description: Retrieves recipes for a given user ID + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + user_id: + type: integer + responses: + '200': + description: Recipe fetched successfully + content: + application/json: + schema: + type: object + properties: + recipes: + type: array + items: + type: object + properties: + id: + type: integer + created_at: + type: string + recipe_name: + type: string + cuisine_id: + type: integer + total_servings: + type: integer + preparation_time: + type: integer + ingredients: + type: object + properties: + id: + type: array + items: + type: integer + quantity: + type: array + items: + type: integer + category: + type: array + items: + type: string + name: + type: array + items: + type: string + instructions: + type: string + calories: + type: number + fat: + type: number + carbohydrates: + type: number + protein: + type: number + fiber: + type: number + vitamin_a: + type: number + vitamin_b: + type: number + vitamin_c: + type: number + vitamin_d: + type: number + sodium: + type: number + sugar: + type: number + cuisine_name: + type: string + + '400': + description: User ID is required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '404': + description: Recipes, ingredients, or cuisines not found + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /signup: + post: + summary: User signup + description: Registers a new user with an email and password + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/SignupRequest' + responses: + '201': + description: User created successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - either missing email/password or user already exists + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /userfeedback: + post: + summary: User feedback + description: Receives user feedback + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/FeedbackRequest' + responses: + '201': + description: Feedback received successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - missing required fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /user/preferences: + get: + summary: Get user preferences + description: Retrieves a list of user preferences + security: + - BearerAuth: [ ] + responses: + '200': + description: List of user preferences + content: + application/json: + schema: + type: object + properties: + dietary_requirements: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + allergies: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + cuisines: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + dislikes: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + health_conditions: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + spice_levels: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + cooking_methods: + type: array + items: + $ref: '#/components/schemas/IDNamePair' + examples: + userPreferences: + value: + dietary_requirements: + - id: 1 + name: "Vegetarian" + allergies: + - id: 1 + name: "Peanuts" + cuisines: + - id: 2 + name: "French" + - id: 5 + name: "Italian" + dislikes: + - id: 4 + name: "Chicken Thigh Fillets" + health_conditions: [ ] + spice_levels: + - id: 1 + name: "Mild" + - id: 2 + name: "Medium" + cooking_methods: + - id: 1 + name: "Bake" + - id: 4 + name: "Grill" + '400': + description: User ID is required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '404': + description: User preferences not found + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + post: + summary: Update user preferences + description: Updates the user's preferences + security: + - BearerAuth: [ ] + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + dietary_requirements: + type: array + items: + type: integer + allergies: + type: array + items: + type: integer + cuisines: + type: array + items: + type: integer + dislikes: + type: array + items: + type: integer + health_conditions: + type: array + items: + type: integer + spice_levels: + type: array + items: + type: integer + cooking_methods: + type: array + items: + type: integer + example: + dietary_requirements: [ 1, 2, 4 ] + allergies: [ 1 ] + cuisines: [ 2, 5 ] + dislikes: [ 4 ] + health_conditions: [ ] + spice_levels: [ 1, 2 ] + cooking_methods: [ 1, 4, 5 ] + responses: + '204': + description: User preferences updated successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: User ID is required or Request body is required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /userprofile: + get: + summary: Get user profile + description: Retrieves the user's profile + # TODO should not use requestBody for GET + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + email: + type: string + responses: + '200': + description: User profile fetched successfully + content: + application/json: + schema: + $ref: '#/components/schemas/UserProfileResponse' + '400': + description: Email is required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + put: + summary: Update user profile + description: Updates the user's profile + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UserUpdateRequest' + responses: + '204': + description: User profile updated successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: User ID is required or Request body is required + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + + /notifications: + post: + summary: Create a new notification + description: Creates a new notification for a specific user. + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + user_id: + type: integer + description: Unique identifier of the user. + type: + type: string + description: Type of notification (e.g., Email, Server, Phone). + content: + type: string + description: Content of the notification. + required: + - user_id + - type + - content + example: + user_id: 123 + type: "Email" + content: "This is a test notification" + responses: + 201: + description: Notification created successfully + content: + application/json: + schema: + type: object + properties: + message: + type: string + notification: + type: object + properties: + simple_id: + type: integer + user_id: + type: integer + type: + type: string + content: + type: string + status: + type: string + timestamp: + type: string + format: date-time + 400: + description: Bad Request - Missing required fields + 500: + description: Internal Server Error + + /notifications/{user_id}: + get: + summary: Get all notifications for a specific user + description: Retrieves all notifications associated with a specific `user_id`. + parameters: + - in: path + name: user_id + required: true + schema: + type: integer + description: Unique identifier of the user. + responses: + 200: + description: List of notifications for the user + content: + application/json: + schema: + type: array + items: + type: object + properties: + simple_id: + type: integer + user_id: + type: integer + type: + type: string + content: + type: string + status: + type: string + timestamp: + type: string + format: date-time + 404: + description: No notifications found for the user + 500: + description: Internal Server Error + + /notifications/{simple_id}: + delete: + summary: Delete a specific notification by simple ID + description: Deletes a notification identified by its `simple_id` (integer). + parameters: + - in: path + name: simple_id + required: true + schema: + type: integer + description: Simple identifier (integer) of the notification. + responses: + 200: + description: Notification deleted successfully + content: + application/json: + schema: + type: object + properties: + message: + type: string + example: "Notification deleted successfully" + 404: + description: Notification not found + 500: + description: Internal Server Error + + put: + summary: Update notification status by simple ID + description: Updates the status of a notification identified by its `simple_id` (integer). + parameters: + - in: path + name: simple_id + required: true + schema: + type: integer + description: Simple identifier (integer) of the notification. + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + status: + type: string + description: New status for the notification (e.g., "read" or "unread"). + required: + - status + example: + status: "read" + responses: + 200: + description: Notification updated successfully + content: + application/json: + schema: + type: object + properties: + message: + type: string + notification: + type: object + properties: + simple_id: + type: integer + user_id: + type: integer + type: + type: string + content: + type: string + status: + type: string + timestamp: + type: string + format: date-time + 404: + description: Notification not found + 500: + description: Internal Server Error + /substitution/ingredient/{ingredientId}: + get: + summary: Get ingredient substitutions + description: Retrieves substitution options for a specific ingredient, with optional filtering by allergies, dietary requirements, and health conditions. + parameters: + - name: ingredientId + in: path + required: true + description: ID of the ingredient to find substitutions for + schema: + type: integer + - name: allergies + in: query + required: false + description: List of allergy IDs to exclude from substitutions. Pass as a comma-separated string. + schema: + type: string + example: "2,3" + - name: dietaryRequirements + in: query + required: false + description: List of dietary requirement IDs to filter substitutions by. Pass as a comma-separated string. + schema: + type: string + example: "1,4" + - name: healthConditions + in: query + required: false + description: List of health condition IDs to consider for substitutions. Pass as a comma-separated string. + schema: + type: string + example: "2,5" + responses: + '200': + description: Substitution options retrieved successfully + content: + application/json: + schema: + $ref: '#/components/schemas/IngredientSubstitutionResponse' + '400': + description: Bad request - missing ingredient ID + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '404': + description: Ingredient not found + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /filter: + get: + summary: Filter recipes + description: Retrieve recipes filtered by dietary preferences and allergens. + tags: + - Recipes + parameters: + - name: allergies + in: query + description: List of allergens to exclude from the recipes. Pass as a comma-separated string or array. + required: false + schema: + type: string + example: Peanut,Soy + - name: dietary + in: query + description: Dietary preference to filter by (e.g., vegan, vegetarian). + required: false + schema: + type: string + example: vegan + - name: include_details + in: query + required: false + description: Whether to include full relationship details + schema: + type: string + enum: [true, false] + default: true + responses: + '200': + description: Filtered recipes + content: + application/json: + schema: + type: array + items: + type: object + properties: + id: + type: integer + description: Recipe ID + example: 1 + name: + type: string + description: Name of the recipe + example: Vegan Salad + recipe_ingredients: + type: array + description: Ingredients used in the recipe + items: + type: object + properties: + ingredient_id: + type: integer + description: ID of the ingredient + example: 3 + ingredients: + type: object + properties: + name: + type: string + description: Name of the ingredient + example: Lettuce + allergen: + type: string + description: Allergen associated with the ingredient + example: null + dietary_flag: + type: string + description: Dietary classification of the ingredient + example: vegan + '400': + description: Error in filtering recipes + content: + application/json: + schema: + type: object + properties: + error: + type: string + description: Error message + example: "Allergy type not found" + + /auth/log-login-attempt: + post: + summary: Log a login attempt + description: Records a login attempt in the auth_logs table with email, user ID (optional), IP, timestamp, and success status. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LoginLog' + responses: + '201': + description: Login attempt logged successfully + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + '400': + description: Bad request - missing required fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + + /recipe/cost/{recipe_id}: + get: + summary: Calculate estimated cost for a recipe + description: Returns JSON object containing estimated cost information and corresponding ingredients price + parameters: + - name: recipe_id + in: path + required: true + schema: + type: integer + description: Integer ID of the recipe for cost calculation + - name: exclude_ids + in: query + required: false + schema: + type: string + description: List of ingredient ids to be excluded, separated by commas + - name: desired_servings + in: query + required: false + schema: + type: integer + description: Number of serving would like to scale + responses: + '200': + description: Calculate cost successfully + content: + application/json: + schema: + $ref: '#/components/schemas/EstimatedCost' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + + /health-news: + get: + summary: Calculate estimated cost for a recipe + description: Returns JSON array containing total cost and corresponding ingredients price + summary: Unified Health News API + description: Comprehensive API for health news management with multiple actions and flexible filtering + parameters: + - in: path + name: recipe_id + required: true + - name: action + in: query + required: false + description: | + Action to perform (optional - API will auto-detect based on provided parameters): + - "filter" (default): Filter health news articles using flexible criteria + - "getById": Get specific health news by ID (requires id parameter) + - "getByCategory": Get news by category (requires categoryId parameter) + - "getByAuthor": Get news by author (requires authorId parameter) + - "getByTag": Get news by tag (requires tagId parameter) + - "getAllCategories": Get all categories + - "getAllAuthors": Get all authors + - "getAllTags": Get all tags + schema: + type: string + enum: [filter, getAll, getById, getByCategory, getByAuthor, getByTag, getAllCategories, getAllAuthors, getAllTags] + default: filter + - name: id + in: query + required: false + description: Health news ID + schema: + type: string + format: uuid + - name: categoryId + in: query + required: false + description: Category ID + schema: + type: string + format: uuid + - name: authorId + in: query + required: false + description: Author ID + schema: + type: string + format: uuid + - name: tagId + in: query + required: false + description: Tag ID + schema: + type: string + format: uuid + - name: title + in: query + required: false + description: Filter news by title (partial match) + schema: + type: string + - name: content + in: query + required: false + description: Filter news by content (partial match) + schema: + type: string + - name: author_name + in: query + required: false + description: Filter news by author name (partial match) + schema: + type: string + - name: category_name + in: query + required: false + description: Filter news by category name (partial match) + schema: + type: string + - name: tag_name + in: query + required: false + description: Filter news by tag name (partial match) + schema: + type: string + - name: start_date + in: query + required: false + description: Filter news published on or after this date (ISO format) + schema: + type: string + format: date-time + - name: end_date + in: query + required: false + description: Filter news published on or before this date (ISO format) + schema: + type: string + format: date-time + - name: sort_by + in: query + required: false + description: Field to sort by + schema: + type: string + default: published_at + - name: sort_order + in: query + required: false + description: Sort order + schema: + type: string + enum: [asc, desc] + default: desc + - name: limit + in: query + required: false + description: Number of records to return + schema: + type: integer + description: Integer ID of the recipe for cost calculation + default: 20 + - name: page + in: query + required: false + description: Page number for pagination + schema: + type: integer + default: 1 + - name: include_details + in: query + required: false + description: Whether to include full relationship details + schema: + type: string + enum: [true, false] + default: true + responses: + '200': + description: Calculate cost successfully + description: Successfully retrieved requested data + content: + application/json: + schema: + $ref: '#/components/schemas/EstimatedCost' + type: object + properties: + success: + type: boolean + example: true + data: + oneOf: + - type: array + items: + $ref: '#/components/schemas/HealthNews' + - $ref: '#/components/schemas/HealthNews' + - type: array + items: + $ref: '#/components/schemas/Category' + - type: array + items: + $ref: '#/components/schemas/Author' + - type: array + items: + $ref: '#/components/schemas/Tag' + pagination: + type: object + properties: + total: + type: integer + example: 48 + page: + type: integer + example: 1 + limit: + type: integer + example: 20 + total_pages: + type: integer + example: 3 + post: + summary: Unified Health News Creation API + description: Create health news articles and related entities + parameters: + - name: action + in: query + required: false + description: | + Action to perform: + - "createNews" (default): Create a new health news article + - "createCategory": Create a new category + - "createAuthor": Create a new author + - "createTag": Create a new tag + schema: + type: string + enum: [createNews, createCategory, createAuthor, createTag] + default: createNews + requestBody: + required: true + content: + application/json: + schema: + oneOf: + - type: object + properties: + title: + type: string + example: "Diet and Health: How to Plan Your Daily Meals" + summary: + type: string + example: "This article explains how to maintain health through proper meal planning" + content: + type: string + example: "Proper eating habits are essential for health." + author_id: + type: string + format: uuid + example: "123e4567-e89b-12d3-a456-426614174001" + category_id: + type: string + format: uuid + example: "123e4567-e89b-12d3-a456-426614174003" + required: + - title + - content + - type: object + properties: + name: + type: string + example: "Nutrition" + description: + type: string + example: "Articles about food nutrition" + required: + - name + - type: object + properties: + name: + type: string + example: "Dr. Smith" + bio: + type: string + example: "Nutrition expert with 20 years of experience" + required: + - name + - type: object + properties: + name: + type: string + example: "Weight Loss" + required: + - name + responses: + '201': + description: Resource created successfully + content: + application/json: + schema: + type: object + properties: + success: + type: boolean + example: true + data: + type: object + properties: + id: + type: string + example: "123e4567-e89b-12d3-a456-426614174000" + title: + type: string + example: "Diet and Health: How to Plan Your Daily Meals" + put: + summary: Update Health News + description: Update health news articles + parameters: + - name: id + in: query + required: true + description: Health news ID + schema: + type: string + format: uuid + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + title: + type: string + example: "Diet and Health: How to Plan Your Daily Meals (Updated)" + summary: + type: string + example: "This article explains how to maintain health through proper meal planning" + responses: + '200': + description: Health news updated successfully + content: + application/json: + schema: + type: object + properties: + success: + type: boolean + example: true + data: + $ref: '#/components/schemas/HealthNews' + '400': + description: Bad request - missing required parameter + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '404': + description: Health news not found + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + delete: + summary: Delete Health News + description: Delete health news articles + parameters: + - name: id + in: query + required: true + description: Health news ID + schema: + type: string + format: uuid + responses: + '200': + description: Health news deleted successfully + content: + application/json: + schema: + type: object + properties: + success: + type: boolean + example: true + message: + type: string + example: Health news successfully deleted + '400': + description: Bad request - missing required parameter + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '404': + description: Health news not found + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + + /recipe/nutritionlog: + get: + summary: Get full nutrition info for a recipe by name + description: Returns nutritional values of a recipe based on recipe_name + parameters: + - in: query + name: name + schema: + type: string + required: true + description: The name of the recipe to search (case-insensitive) + responses: + '200': + description: Nutritional info returned successfully + content: + application/json: + schema: + type: object + properties: + recipe_name: + type: string + calories: + type: number + fat: + type: number + carbohydrates: + type: number + protein: + type: number + fiber: + type: number + vitamin_a: + type: number + vitamin_b: + type: number + vitamin_c: + type: number + vitamin_d: + type: number + sodium: + type: number + sugar: + type: number + '400': + description: Missing recipe name query parameter + '404': + description: Recipe not found + '500': + description: Internal server error + + /healthArticles: + get: + summary: Search health articles + description: | + Search for health articles based on query string. The search is performed across article titles, tags, and content. + Results can be paginated, sorted, and filtered. + parameters: + - name: query + in: query + required: true + description: Search query string + schema: + type: string + - name: page + in: query + required: false + description: + schema: + type: integer + minimum: 1 + default: 1 + - name: limit + in: query + required: false + description: + schema: + type: integer + minimum: 1 + default: 10 + - name: sortBy + in: query + required: false + description: + schema: + type: string + enum: [created_at, title, views] + default: created_at + - name: sortOrder + in: query + required: false + description: Sort order (asc or desc) + schema: + type: string + enum: [asc, desc] + default: desc + responses: + '200': + description: Successful search + + /water-intake: + post: + summary: Update the number of glasses of water consumed + description: Updates the user's daily water intake by adding the number of glasses consumed. + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + user_id: + type: string + format: uuid + description: The unique ID of the user + glasses_consumed: + type: integer + description: Number of glasses consumed + required: + - user_id + - glasses_consumed + example: + user_id: "15" + glasses_consumed: 5 + responses: + '200': + description: Water intake updated successfully + content: + application/json: + schema: + type: object + properties: + message: + type: string + example: "Water intake updated successfully" + data: + type: object + properties: + user_id: + type: string + example: "15" + date: + type: string + format: date + example: "2025-05-10" + glasses_consumed: + type: integer + example: 5 + updated_at: + type: string + format: date-time + example: "2025-05-10T12:00:00Z" + '400': + description: Bad request - missing or invalid fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + /chatbot/history: + post: + summary: Get chat history + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UserIdRequest' + responses: + '200': + description: Chat history retrieved successfully + content: + application/json: + schema: + $ref: '#/components/schemas/ChatHistoryResponse' + '500': + description: Internal server error + + delete: + summary: Clear chat history + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UserIdRequest' + responses: + '200': + description: Chat history cleared successfully + content: + application/json: + schema: + $ref: '#/components/schemas/GenericSuccessResponse' + '500': + description: Internal server error + + /medical-report/retrieve: + post: + summary: Predict obesity level and diabetes risks + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/MedicalReportRequest' + responses: + '200': + description: Obesity level and diabetes risk result + content: + application/json: + schema: + $ref: '#/components/schemas/MedicalReportResponse' + '400': + description: Bad Request - Invalid input data. + '401': + description: Unauthorized - Authentication credentials missing or invalid. + '500': + description: Internal Server Error - Something went wrong on the server. + type: object + properties: + user_id: + type: string + format: uuid + description: The unique ID of the user + glasses_consumed: + type: integer + description: Number of glasses consumed + required: + - user_id + - glasses_consumed + example: + user_id: "15" + glasses_consumed: 5 + responses: + '200': + description: Water intake updated successfully + content: + application/json: + schema: + type: object + properties: + message: + type: string + example: "Water intake updated successfully" + data: + type: object + properties: + user_id: + type: string + example: "15" + date: + type: string + format: date + example: "2025-05-10" + glasses_consumed: + type: integer + example: 5 + updated_at: + type: string + format: date-time + example: "2025-05-10T12:00:00Z" + '400': + description: Bad request - missing or invalid fields + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + +components: + securitySchemes: + BearerAuth: + type: http + scheme: bearer + bearerFormat: JWT + schemas: + LoginRequest: + type: object + properties: + email: + type: string + example: test@email.com + password: + type: string + example: test123 + required: + - email + - password + SignupRequest: + type: object + properties: + name: + type: string + email: + type: string + password: + type: string + contact_number: + type: string + address: + type: string + required: + - name + - email + - password + - contact_number + - address + LoginWithMFARequest: + type: object + properties: + email: + type: string + password: + type: string + format: password + mfa_token: + type: string + required: + - email + - password + - mfa_token + UserResponse: + type: object + properties: + user_id: + type: integer + email: + type: string + password: + type: string + mfa_enabled: + type: boolean + UserUpdateRequest: + type: object + properties: + username: + type: string + first_name: + type: string + last_name: + type: string + email: + type: string + format: email + contact_number: + type: string + UserProfileResponse: + type: object + properties: + user_id: + type: integer + name: + type: string + first_name: + type: string + last_name: + type: string + email: + type: string + format: email + contact_number: + type: string + mfa_enabled: + type: boolean + JWTResponse: + type: string + ContactRequest: + type: object + properties: + name: + type: string + email: + type: string + format: email + message: + type: string + required: + - name + - email + - message + FeedbackRequest: + type: object + properties: + name: + type: string + contact_number: + type: string + email: + type: string + format: email + experience: + type: string + message: + type: string + required: + - name + - contact_number + - email + - experience + - message + IDNamePair: + type: object + properties: + id: + type: string + name: + type: string + Appointment: + type: object + properties: + userId: + type: integer + date: + type: string + format: date-time + time: + type: string + description: + type: string + SuccessResponse: + type: object + properties: + message: + type: string + ErrorResponse: + type: object + properties: + error: + type: string + Recipe: + type: object + properties: + id: + type: integer + name: + type: string + ingredients: + type: array + items: + type: string + cooking_method: + type: string + cuisine: + type: string + spice_level: + type: string + health_condition: + type: string + dietary_requirement: + type: string + allergy: + type: string + dislikes: + type: string + MealPlanRecipe: + type: object + properties: + id: + type: integer + example: 1 + name: + type: string + details: + type: object + properties: + calories: + type: number + fats: + type: number + proteins: + type: number + vitamins: + type: number + sodium: + type: number + CreateMealPlanRequest: + type: object + properties: + id: + type: integer + meal_type: + type: string + recipes: + type: array + items: + $ref: '#/components/schemas/MealPlanRecipe' + MealPlanResponse: + type: object + properties: + user_id: + type: integer + meal_type: + type: string + recipe_ids: + type: array + items: + type: integer + + LoginLog: + type: object + properties: + email: + type: string + example: user@example.com + user_id: + type: integer + nullable: true + example: 123 + success: + type: boolean + example: true + ip_address: + type: string + example: "192.168.1.1" + created_at: + type: string + format: date-time + example: "2025-03-23T13:45:00Z" + required: + - email + - success + - ip_address + - created_at + + EstimatedCost: + type: object + properties: + info: + type: object + properties: + estimation_type: + type: string + include_all_wanted_ingredients: + type: boolean + minimum_cost: + type: number + maximum_cost: + type: number + low_cost: + type: object + properties: + price: + type: number + count: + type: number + ingredients: + type: array + items: + type: object + properties: + ingredient_id: + type: integer + product_name: + type: string + quantity: + type: string + purchase_quantity: + type: integer + total_cost: + type: number + high_cost: + type: object + properties: + price: + type: number + count: + type: number + ingredients: + type: array + items: + type: object + properties: + ingredient_id: + type: integer + product_name: + type: string + quantity: + type: string + purchase_quantity: + type: integer + total_cost: + type: number + + minimum_cost: + type: number + maximum_cost: + type: number + include_all_ingredients: + type: boolean + low_cost_ingredients: + type: array + items: + type: object + properties: + ingredient_id: + type: integer + product_name: + type: string + quantity: + type: string + purchase_quantity: + type: integer + total_cost: + type: number + high_cost_ingredients: + type: array + items: + type: object + properties: + ingredient_id: + type: integer + product_name: + type: string + quantity: + type: string + purchase_quantity: + type: integer + total_cost: + type: number + + HealthNews: + type: object + properties: + id: + type: string + format: uuid + example: "123e4567-e89b-12d3-a456-426614174000" + title: + type: string + example: "Diet and Health: How to Plan Your Daily Meals" + summary: + type: string + example: "This article explains how to maintain health through proper meal planning" + author: + type: object + properties: + name: + type: string + example: "Dr. Smith" + category: + type: object + properties: + name: + type: string + example: "Nutrition" + image_url: + type: string + format: url + example: "https://example.com/images/healthy-eating.jpg" + published_at: + type: string + format: date-time + example: "2023-09-15T10:30:00Z" + + HealthNewsCreateRequest: + type: object + properties: + title: + type: string + example: "Diet and Health: How to Plan Your Daily Meals" + summary: + type: string + example: "This article explains how to maintain health through proper meal planning" + content: + type: string + example: "Proper eating habits are essential for health." + author_id: + type: string + format: uuid + example: "123e4567-e89b-12d3-a456-426614174001" + category_id: + type: string + format: uuid + example: "123e4567-e89b-12d3-a456-426614174003" + image_url: + type: string + format: url + example: "https://example.com/images/healthy-eating.jpg" + + HealthNewsUpdateRequest: + type: object + properties: + title: + type: string + example: "Diet and Health: How to Plan Your Daily Meals (Updated)" + summary: + type: string + example: "This article explains how to maintain health through proper meal planning" + category_id: + type: string + format: uuid + example: "123e4567-e89b-12d3-a456-426614174003" + + Author: + type: object + properties: + name: + type: string + example: "Dr. Smith" + bio: + type: string + example: "Nutrition expert with 20 years of experience" + + Source: + type: object + properties: + name: + type: string + example: "Health Times" + base_url: + type: string + format: url + example: "https://health-news.com" + + Category: + type: object + properties: + name: + type: string + example: "Nutrition" + description: + type: string + example: "Articles about food nutrition" + + Tag: + type: object + properties: + name: + type: string + example: "Weight Loss" + + # Chatbot-related Schemas + ChatbotQueryRequest: + type: object + properties: + user_id: + type: integer + user_input: + type: string + + ChatbotQueryResponse: + type: object + properties: + response_text: + type: string + # optional message field + message: + type: string + + ChatHistoryResponse: + type: object + properties: + message: + type: string + chat_history: + type: array + items: + type: object + properties: + user_input: + type: string + response_text: + type: string + timestamp: + type: string + format: date-time + + GenericSuccessResponse: + type: object + properties: + message: + type: string + + UserIdRequest: + type: object + properties: + user_id: + type: integer + + MedicalReportRequest: + MedicalReportRequest: + type: object + required: + - Gender + - Age + - Height + - Weight + - Any family history of overweight (yes/no) + - Frequent High Calorie Food Consumption (yes/no) + - Consumption of vegetables in meals + - Consumption of Food Between Meals + - Number of Main Meals + - Daily Water Intake + - Do you Smoke? + - Do you monitor your daily calories? + - Physical Activity Frequency + - Time Using Technology Devices Daily + - Alcohol Consumption Rate + - Mode of Transportation you use + properties: + Gender: + type: string + description: Gender of the individual. + example: "Male" + Age: + type: number + format: float + description: Age in years. + example: 24.443011 + Height: + type: number + format: float + description: Height in meters. + example: 1.699998 + Weight: + type: number + format: float + description: Weight in kilograms. + example: 81.66995 + Any family history of overweight (yes/no): + type: string + enum: ["yes", "no"] + description: Indicates if there is a family history of being overweight. + example: "yes" + Frequent High Calorie Food Consumption (yes/no): + type: string + enum: ["yes", "no"] + description: Indicates frequent consumption of high-calorie food. + example: "yes" + Consumption of vegetables in meals: + type: number + format: float + description: Frequency of vegetable consumption in meals. + example: 2.7 + Consumption of Food Between Meals: + type: string + enum: ["no", "Sometimes", "Frequently", "Always"] + description: Frequency of consuming food between meals. + example: "Sometimes" + Number of Main Meals: + type: number + format: float + description: Number of main meals per day. + example: 2.983297 + Daily Water Intake: + type: number + format: float + description: Daily water intake in liters. + example: 2.763573 + Do you Smoke?: + type: string + enum: ["yes", "no"] + description: Indicates if the individual smokes. + example: "no" + Do you monitor your daily calories?: + type: string + enum: ["yes", "no"] + description: Indicates if the person monitors their daily calorie intake. + example: "no" + Physical Activity Frequency: + type: number + format: float + description: Frequency of physical activity per week. + example: 0 + Time Using Technology Devices Daily: + type: number + format: float + description: Time spent using technological devices daily (in hours). + example: 0.976473 + Alcohol Consumption Rate: + type: string + enum: ["no", "never", "Sometimes", "Frequently", "Always"] + description: Frequency of alcohol consumption. + example: "Sometimes" + Mode of Transportation you use: + type: string + enum: ["Car", "Motorbike", "Bike", "Public_Transportation", "Walking"] + description: Common mode of transportation used. + example: "Public_Transportation" + + MedicalReportResponse: + type: object + properties: + medical_report: + type: object + description: Report containing obesity and diabetes predictions. + properties: + obesity_prediction: + type: object + properties: + obesity_level: + type: string + description: Predicted obesity level. + example: "Obese" + diabetes_prediction: + type: object + properties: + diabetes: + type: boolean + description: Indicates if diabetes is predicted (true or false). + example: true + confidence: + type: number + format: float + description: Model confidence score for diabetes prediction. + example: 0.798 + \ No newline at end of file diff --git a/jwt package.json b/jwt package.json new file mode 100644 index 0000000..c9abecd --- /dev/null +++ b/jwt package.json @@ -0,0 +1,2 @@ +npm init -y +npm install express jsonwebtoken bcrypt dotenv diff --git a/jwt routes.js b/jwt routes.js new file mode 100644 index 0000000..24c5e0f --- /dev/null +++ b/jwt routes.js @@ -0,0 +1,38 @@ +const express = require('express'); +const bcrypt = require('bcrypt'); +const jwt = require('jsonwebtoken'); +const { users } = require('../users'); +const authenticateToken = require('../middleware/authMiddleware'); + +const router = express.Router(); + +// Register +router.post('/register', async (req, res) => { + const { username, password } = req.body; + const userExists = users.find(u => u.username === username); + if (userExists) return res.status(409).json({ message: 'User already exists' }); + + const hashedPassword = await bcrypt.hash(password, 10); + users.push({ username, password: hashedPassword }); + res.status(201).json({ message: 'User registered' }); +}); + +// Login +router.post('/login', async (req, res) => { + const { username, password } = req.body; + const user = users.find(u => u.username === username); + + if (!user || !(await bcrypt.compare(password, user.password))) { + return res.status(401).json({ message: 'Invalid credentials' }); + } + + const token = jwt.sign({ username: user.username }, process.env.JWT_SECRET, { expiresIn: '1h' }); + res.json({ token }); +}); + +// Protected route +router.get('/dashboard', authenticateToken, (req, res) => { + res.json({ message: `Welcome to NutriHelp, ${req.user.username}` }); +}); + +module.exports = router; diff --git a/jwt server.js b/jwt server.js new file mode 100644 index 0000000..84a5686 --- /dev/null +++ b/jwt server.js @@ -0,0 +1,16 @@ +const express = require('express'); +const dotenv = require('dotenv'); +const authRoutes = require('./routes/auth'); + +dotenv.config(); +const app = express(); + +app.use(express.json()); +app.use('/api/auth', authRoutes); // prefix for auth routes + +app.get('/', (req, res) => { + res.send('Welcome to NutriHelp API'); +}); + +const PORT = process.env.PORT || 3000; +app.listen(PORT, () => console.log(`Server running on http://localhost:${PORT}`)); diff --git a/jwt users.js b/jwt users.js new file mode 100644 index 0000000..5165b87 --- /dev/null +++ b/jwt users.js @@ -0,0 +1,3 @@ +const users = []; + +module.exports = { users }; diff --git a/middleware.js b/middleware.js new file mode 100644 index 0000000..dbc9fca --- /dev/null +++ b/middleware.js @@ -0,0 +1,16 @@ +const jwt = require('jsonwebtoken'); + +function authenticateToken(req, res, next) { + const authHeader = req.headers['authorization']; + const token = authHeader && authHeader.split(' ')[1]; // Bearer TOKEN + + if (!token) return res.status(401).json({ message: 'Token missing' }); + + jwt.verify(token, process.env.JWT_SECRET, (err, user) => { + if (err) return res.status(403).json({ message: 'Invalid token' }); + req.user = user; + next(); + }); +} + +module.exports = authenticateToken; diff --git a/middleware/authenticateToken.js b/middleware/authenticateToken.js index dc31600..ca5f2cb 100644 --- a/middleware/authenticateToken.js +++ b/middleware/authenticateToken.js @@ -6,7 +6,7 @@ const authenticateToken = (req, res, next) => { if (token == null) return res.sendStatus(401); - jwt.verify(token, process.env.TOKEN_SECRET, (err, user) => { + jwt.verify(token, process.env.JWT_TOKEN, (err, user) => { if (err) return res.sendStatus(403); req.user = user; next(); diff --git a/middleware/authorizeRoles.js b/middleware/authorizeRoles.js new file mode 100644 index 0000000..8dbb133 --- /dev/null +++ b/middleware/authorizeRoles.js @@ -0,0 +1,17 @@ +function authorizeRoles(...allowedRoles) { + return (req, res, next) => { + const userRole = req.user?.role; + + if (!userRole) { + return res.status(403).json({ message: "Role missing in token" }); + } + + if (!allowedRoles.includes(userRole)) { + return res.status(403).json({ message: "Access denied: insufficient role" }); + } + + next(); + }; +} + +module.exports = authorizeRoles; // ✅ make sure this line is present \ No newline at end of file diff --git a/middleware/rateLimiter.js b/middleware/rateLimiter.js new file mode 100644 index 0000000..b1ce60d --- /dev/null +++ b/middleware/rateLimiter.js @@ -0,0 +1,39 @@ +const rateLimit = require('express-rate-limit'); + +// For login and MFA +const loginLimiter = rateLimit({ + windowMs: 10 * 60 * 1000, // 10 minutes + max: 20, + message: { + status: 429, + error: "Too many login attempts, please try again after 10 minutes.", + }, + standardHeaders: true, + legacyHeaders: false, +}); + +// For signup +const signupLimiter = rateLimit({ + windowMs: 10 * 60 * 1000, + max: 10, + message: { + status: 429, + error: "Too many signup attempts, please try again later.", + }, + standardHeaders: true, + legacyHeaders: false, +}); + +// For contact us and feedback forms +const formLimiter = rateLimit({ + windowMs: 60 * 60 * 1000, + max: 20, + message: { + status: 429, + error: "Too many form submissions from this IP, please try again after an hour.", + }, + standardHeaders: true, + legacyHeaders: false, +}); + +module.exports = { loginLimiter, signupLimiter, formLimiter }; \ No newline at end of file diff --git a/middleware/uploadMiddleware.js b/middleware/uploadMiddleware.js new file mode 100644 index 0000000..8283a25 --- /dev/null +++ b/middleware/uploadMiddleware.js @@ -0,0 +1,32 @@ +const multer = require('multer'); //Install Multer using npm install multer +const path = require('path'); + +const fileFilter = (req, file, cb) => { + const allowedTypes = /jpeg|jpg|png|pdf/; + const extname = allowedTypes.test(path.extname(file.originalname).toLowerCase()); + const mimetype = allowedTypes.test(file.mimetype); + + if (extname && mimetype) { + cb(null, true); + } else { + cb(new Error('Only JPEG, PNG images and PDFs are allowed.')); + } +}; + +const storage = multer.diskStorage({ + destination: function (req, file, cb) { + cb(null, 'uploads/'); + }, + filename: function (req, file, cb) { + cb(null, `${Date.now()}_${file.originalname}`); + } +}); + +const upload = multer({ + storage: storage, + limits: { fileSize: 5 * 1024 * 1024 }, // 5MB + fileFilter: fileFilter, +}); + +module.exports = upload; + \ No newline at end of file diff --git a/middleware/validateRequest.js b/middleware/validateRequest.js new file mode 100644 index 0000000..eb4bef4 --- /dev/null +++ b/middleware/validateRequest.js @@ -0,0 +1,17 @@ +const { validationResult } = require('express-validator'); + +module.exports = (req, res, next) => { + const errors = validationResult(req); + + if (!errors.isEmpty()) { + return res.status(400).json({ + success: false, + errors: errors.array().map(err => ({ + field: err.param, + message: err.msg + })) + }); + } + + next(); +}; diff --git a/model/addAppointment.js b/model/addAppointment.js new file mode 100644 index 0000000..d5e7954 --- /dev/null +++ b/model/addAppointment.js @@ -0,0 +1,14 @@ +const supabase = require('../dbConnection.js'); + +async function addAppointment(userId, date, time, description) { + try { + let { data, error } = await supabase + .from('appointments') + .insert({ user_id: userId, date, time, description }) + return data; + } catch (error) { + throw error; + } +} + +module.exports = addAppointment; diff --git a/model/addContactUsMsg.js b/model/addContactUsMsg.js new file mode 100644 index 0000000..8ad2aee --- /dev/null +++ b/model/addContactUsMsg.js @@ -0,0 +1,14 @@ +const supabase = require('../dbConnection.js'); + +async function addContactUsMsg(name, email, subject, message) { + try { + let { data, error } = await supabase + .from('contactus') + .insert({ name: name, email: email, subject:subject, message: message }) + return data + } catch (error) { + throw error; + } +} + +module.exports = addContactUsMsg; \ No newline at end of file diff --git a/model/addImageClassificationFeedback.js b/model/addImageClassificationFeedback.js new file mode 100644 index 0000000..4039c1d --- /dev/null +++ b/model/addImageClassificationFeedback.js @@ -0,0 +1,68 @@ +const supabase = require("../dbConnection.js"); +const fs = require("fs"); +const path = require("path"); + +/** + * Stores image classification feedback in Supabase + * + * @param {string} user_id - User ID (optional) + * @param {string} image_path - Path to the image file + * @param {string} predicted_class - The class predicted by the system + * @param {string} correct_class - The correct class according to user + * @param {object} metadata - Additional metadata (optional) + * @returns {Promise} Supabase response + */ +async function addImageClassificationFeedback( + user_id, + image_path, + predicted_class, + correct_class, + metadata = {} +) { + try { + const filename = path.basename(image_path); + let image_data = null; + let image_type = null; + + if (fs.existsSync(image_path)) { + const fileBuffer = fs.readFileSync(image_path); + image_data = fileBuffer.toString('base64'); + + const ext = path.extname(image_path).toLowerCase(); + if (ext === '.jpg' || ext === '.jpeg') { + image_type = 'image/jpeg'; + } else if (ext === '.png') { + image_type = 'image/png'; + } else { + image_type = 'application/octet-stream'; + } + } + + const timestamp = new Date().toISOString(); + + const { data, error } = await supabase + .from("image_classification_feedback") + .insert({ + user_id: user_id || null, + filename: filename, + image_data: image_data, + image_type: image_type, + predicted_class: predicted_class, + correct_class: correct_class, + metadata: metadata, + created_at: timestamp + }); + + if (error) { + console.error("Error storing image classification feedback:", error); + throw error; + } + + return data; + } catch (error) { + console.error("Failed to store image classification feedback:", error); + throw error; + } +} + +module.exports = addImageClassificationFeedback; \ No newline at end of file diff --git a/model/addMfaToken.js b/model/addMfaToken.js new file mode 100644 index 0000000..4f74e3a --- /dev/null +++ b/model/addMfaToken.js @@ -0,0 +1,46 @@ +const supabase = require('../dbConnection.js'); + +async function addMfaToken(userId, token) { + try { + const currentDate = new Date(); + const expiryDate = new Date(currentDate.getTime() + 10 * 60000); // 10 minutes in milliseconds + + let { data, error } = await supabase + .from('mfatokens') + .insert({ user_id: userId, expiry: expiryDate.toISOString(), token: token }); + return data + } catch (error) { + console.log(error); + throw error; + } +} + +async function verifyMfaToken(userId, token) { + try { + + let { data, error } = await supabase + .from('mfatokens') + .select('id, user_id, token, is_used, expiry') + .eq('token', token) + .eq('user_id', userId) + .eq('is_used', false); + + const mfaToken = data[0]; + if (!mfaToken || !mfaToken.id) return false; + + await supabase + .from('mfatokens') + .update({ is_used: true }) + .in('token', data.map(tokenData => tokenData.token)); + + const currentDate = new Date(); + const expiryDate = new Date(mfaToken.expiry); + if (currentDate > expiryDate) return false + return true; + } catch (error) { + console.log(error); + throw error; + } +} + +module.exports = {addMfaToken, verifyMfaToken}; \ No newline at end of file diff --git a/model/addUser.js b/model/addUser.js index e30a0a8..de96c17 100644 --- a/model/addUser.js +++ b/model/addUser.js @@ -1,10 +1,17 @@ const supabase = require('../dbConnection.js'); -async function addUser(username, password) { +async function addUser(name, email, password, mfa_enabled, contact_number, address) { try { let { data, error } = await supabase .from('users') - .insert({ username: username, password: password }) + .insert({ + name: name, + email: email, + password: password, + mfa_enabled: mfa_enabled, + contact_number: contact_number, + address: address + }) return data } catch (error) { throw error; diff --git a/model/addUserFeedback.js b/model/addUserFeedback.js new file mode 100644 index 0000000..7045ee4 --- /dev/null +++ b/model/addUserFeedback.js @@ -0,0 +1,26 @@ +const supabase = require("../dbConnection.js"); + +async function addUserFeedback( + user_id, + name, + contact_number, + email, + experience, + comments +) { + try { + let { data, error } = await supabase.from("userfeedback").insert({ + user_id: user_id, + name: name, + contact_number: contact_number, + email: email, + experience: experience, + comments: comments, + }); + return data; + } catch (error) { + throw error; + } +} + +module.exports = addUserFeedback; diff --git a/model/chatbotHistory.js b/model/chatbotHistory.js new file mode 100644 index 0000000..6025856 --- /dev/null +++ b/model/chatbotHistory.js @@ -0,0 +1,55 @@ +const supabase = require('../dbConnection.js'); + +async function addHistory(user_id, user_input, chatbot_response) { + try { + const { data, error } = await supabase + .from('chat_history') + .insert([ + { + user_id, + user_input, + chatbot_response, + timestamp: new Date().toISOString() + } + ]); + + if (error) throw error; + return data; + } catch (error) { + console.error('Error adding chat history:', error); + throw error; + } +} + +async function getHistory(user_id) { + try { + const { data, error } = await supabase + .from('chat_history') + .select('*') + .eq('user_id', user_id) + .order('timestamp', { ascending: false }); + + if (error) throw error; + return data; + } catch (error) { + console.error('Error getting chat history:', error); + throw error; + } +} + +async function deleteHistory(user_id) { + try { + const { data, error } = await supabase + .from('chat_history') + .delete() + .eq('user_id', user_id); + + if (error) throw error; + return data; + } catch (error) { + console.error('Error deleting chat history:', error); + throw error; + } +} + +module.exports = { addHistory, getHistory, deleteHistory }; \ No newline at end of file diff --git a/model/createRecipe.js b/model/createRecipe.js new file mode 100644 index 0000000..7e30879 --- /dev/null +++ b/model/createRecipe.js @@ -0,0 +1,177 @@ +const supabase = require("../dbConnection.js"); +const { decode } = require("base64-arraybuffer"); + +async function createRecipe( + user_id, + ingredient_id, + ingredient_quantity, + recipe_name, + cuisine_id, + total_servings, + preparation_time, + instructions, + cooking_method_id +) { + recipe = { + user_id: user_id, + recipe_name: recipe_name, + cuisine_id: cuisine_id, + total_servings: total_servings, + preparation_time: preparation_time, + ingredients: { + id: ingredient_id, + quantity: ingredient_quantity, + }, + cooking_method_id: cooking_method_id, + }; + + let calories = 0; + let fat = 0.0; + let carbohydrates = 0.0; + let protein = 0.0; + let fiber = 0.0; + let vitamin_a = 0.0; + let vitamin_b = 0.0; + let vitamin_c = 0.0; + let vitamin_d = 0.0; + let sodium = 0.0; + let sugar = 0.0; + + try { + let { data, error } = await supabase + .from("ingredients") + .select("*") + .in("id", ingredient_id); + + for (let i = 0; i < ingredient_id.length; i++) { + for (let j = 0; j < data.length; j++) { + if (data[j].id === ingredient_id[i]) { + calories = + calories + + (data[j].calories / 100) * ingredient_quantity[i]; + fat = fat + (data[j].fat / 100) * ingredient_quantity[i]; + carbohydrates = + carbohydrates + + (data[j].carbohydrates / 100) * ingredient_quantity[i]; + protein = + protein + + (data[j].protein / 100) * ingredient_quantity[i]; + fiber = + fiber + (data[j].fiber / 100) * ingredient_quantity[i]; + vitamin_a = + vitamin_a + + (data[j].vitamin_a / 100) * ingredient_quantity[i]; + vitamin_b = + vitamin_b + + (data[j].vitamin_b / 100) * ingredient_quantity[i]; + vitamin_c = + vitamin_c + + (data[j].vitamin_c / 100) * ingredient_quantity[i]; + vitamin_d = + vitamin_d + + (data[j].vitamin_d / 100) * ingredient_quantity[i]; + sodium = + sodium + + (data[j].sodium / 100) * ingredient_quantity[i]; + sugar = + sugar + (data[j].sugar / 100) * ingredient_quantity[i]; + } + } + } + + recipe.instructions = instructions; + recipe.calories = calories; + recipe.fat = fat; + recipe.carbohydrates = carbohydrates; + recipe.protein = protein; + recipe.fiber = fiber; + recipe.vitamin_a = vitamin_a; + recipe.vitamin_b = vitamin_b; + recipe.vitamin_c = vitamin_c; + recipe.vitamin_d = vitamin_d; + recipe.sodium = sodium; + recipe.sugar = sugar; + + return recipe; + } catch (error) { + throw error; + } +} + +async function saveRecipe(recipe) { + try { + let { data, error } = await supabase + .from("recipes") + .insert(recipe) + .select(); + return data; + } catch (error) { + throw error; + } +} + +async function saveImage(image, recipe_id) { + let file_name = `recipe/${recipe_id}.png`; + if (image === undefined || image === null) return null; + + try { + await supabase.storage.from("images").upload(file_name, decode(image), { + cacheControl: "3600", + upsert: false, + }); + const test = { + file_name: file_name, + display_name: file_name, + file_size: base64FileSize(image), + }; + + let { data: image_data } = await supabase + .from("images") + .insert(test) + .select("*"); + + await supabase + .from("recipes") + .update({ image_id: image_data[0].id }) // e.g { email: "sample@email.com" } + .eq("id", recipe_id); + } catch (error) { + throw error; + } +} + +function base64FileSize(base64String) { + let base64Data = base64String.split(",")[1] || base64String; + + let sizeInBytes = (base64Data.length * 3) / 4; + + if (base64Data.endsWith("==")) { + sizeInBytes -= 2; + } else if (base64Data.endsWith("=")) { + sizeInBytes -= 1; + } + + return sizeInBytes; +} + +async function saveRecipeRelation(recipe, savedDataId) { + try { + insert_object = []; + for (let i = 0; i < recipe.ingredients.id.length; i++) { + insert_object.push({ + ingredient_id: recipe.ingredients.id[i], + recipe_id: savedDataId, + user_id: recipe.user_id, + cuisine_id: recipe.cuisine_id, + cooking_method_id: recipe.cooking_method_id[i], + }); + } + let { data, error } = await supabase + .from("recipe_ingredient") + .insert(insert_object) + .select(); + return data; + } catch (error) { + throw error; + } +} +module.exports = { createRecipe, saveRecipe, saveRecipeRelation, saveImage }; diff --git a/model/createRecipeTestSample.json b/model/createRecipeTestSample.json new file mode 100644 index 0000000..55798bf --- /dev/null +++ b/model/createRecipeTestSample.json @@ -0,0 +1,10 @@ +{ + "user_id":15, + "recipe_name":"Tomato pesto chicken pasta", + "cuisine_id":5, + "total_servings":3, + "preparation_time":45, + "ingredient_id":[2,3,4,5,6,7,8], + "ingredient_quantity":[375,8,500,250,290,60,2], + "instructions":"Step 1 Cook the pasta in a large saucepan of boiling water following packet directions or until al dente. Drain, reserving ½ cup (125ml) of cooking liquid. Step 2 Meanwhile, heat oil in a large, deep non-stick frying pan over medium-high heat. Cook half the chicken, stirring, for 3 mins or until golden brown and cooked through. Transfer to a plate. Cover with foil to keep warm. Repeat with the remaining chicken. Step 3 Add the tomatoes to the pan and cook for 3 mins or until the tomatoes begin to collapse. Remove from heat. Transfer tomatoes to a separate plate. Step 4 Return chicken to pan with the pasta, pesto and reserved cooking liquid. Season. Toss to combine. Stir in the rocket. Top with the tomatoes." +} \ No newline at end of file diff --git a/model/deleteAppointment.js b/model/deleteAppointment.js new file mode 100644 index 0000000..326a87f --- /dev/null +++ b/model/deleteAppointment.js @@ -0,0 +1,20 @@ +const supabase = require('../dbConnection.js'); + +async function deleteAppointment(user_id, date, time, description) { + try { + let { error } = await supabase + .from('appointments') + .delete() + .eq('user_id', user_id) + .eq('date', date) + .eq('time', time) + .eq('description', description); + if (error) { + throw new Error('Error deleting appointment') + } + } catch (error) { + throw error; + } +} + +module.exports = deleteAppointment; \ No newline at end of file diff --git a/model/deleteUser.js b/model/deleteUser.js new file mode 100644 index 0000000..8578b4f --- /dev/null +++ b/model/deleteUser.js @@ -0,0 +1,17 @@ +const supabase = require('../dbConnection.js'); + +async function deleteUser(user_id) { + try { + let { error } = await supabase + .from('users') + .delete() + .eq('user_id', user_id) + if (error) { + throw new Error('Error deleting user') + } + } catch (error) { + throw error; + } +} + +module.exports = deleteUser; \ No newline at end of file diff --git a/model/deleteUserRecipes.js b/model/deleteUserRecipes.js new file mode 100644 index 0000000..bae1734 --- /dev/null +++ b/model/deleteUserRecipes.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function deleteUserRecipes(user_id, recipe_id ) { + + try { + let { data, error } = await supabase + .from('recipes') + .delete() + .eq('id', recipe_id) + .eq('user_id', user_id) + + return data + + } catch (error) { + throw error; + } +} + +module.exports = {deleteUserRecipes} \ No newline at end of file diff --git a/model/fetchAllAllergies.js b/model/fetchAllAllergies.js new file mode 100644 index 0000000..7ad895b --- /dev/null +++ b/model/fetchAllAllergies.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function fetchAllAllergies() { + try { + let { data, error } = await supabase + .from('allergies') + .select('*'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllAllergies; \ No newline at end of file diff --git a/model/fetchAllCookingMethods.js b/model/fetchAllCookingMethods.js new file mode 100644 index 0000000..14e8bbf --- /dev/null +++ b/model/fetchAllCookingMethods.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function fetchAllCookingMethods() { + try { + let { data, error } = await supabase + .from('cooking_methods') + .select('*'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllCookingMethods; \ No newline at end of file diff --git a/model/fetchAllCuisines.js b/model/fetchAllCuisines.js new file mode 100644 index 0000000..0ecb213 --- /dev/null +++ b/model/fetchAllCuisines.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function fetchAllCuisines() { + try { + let { data, error } = await supabase + .from('cuisines') + .select('*'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllCuisines; \ No newline at end of file diff --git a/model/fetchAllDietaryRequirements.js b/model/fetchAllDietaryRequirements.js new file mode 100644 index 0000000..ab5cc06 --- /dev/null +++ b/model/fetchAllDietaryRequirements.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function fetchAllDietaryRequirements() { + try { + let { data, error } = await supabase + .from('dietary_requirements') + .select('*'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllDietaryRequirements; \ No newline at end of file diff --git a/model/fetchAllHealthConditions.js b/model/fetchAllHealthConditions.js new file mode 100644 index 0000000..ebda6b1 --- /dev/null +++ b/model/fetchAllHealthConditions.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function fetchAllHealthConditions() { + try { + let { data, error } = await supabase + .from('health_conditions') + .select('*'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllHealthConditions; \ No newline at end of file diff --git a/model/fetchAllIngredients.js b/model/fetchAllIngredients.js new file mode 100644 index 0000000..46d4ab8 --- /dev/null +++ b/model/fetchAllIngredients.js @@ -0,0 +1,19 @@ +const supabase = require("../dbConnection.js"); + +async function fetchAllIngredients() { + try { + let { data, error } = await supabase + .from('ingredients') + .select('id, name, category'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllIngredients; \ No newline at end of file diff --git a/model/fetchAllSpiceLevels.js b/model/fetchAllSpiceLevels.js new file mode 100644 index 0000000..35b59f4 --- /dev/null +++ b/model/fetchAllSpiceLevels.js @@ -0,0 +1,19 @@ +const supabase = require('../dbConnection.js'); + +async function fetchAllSpiceLevels() { + try { + let { data, error } = await supabase + .from('spice_levels') + .select('*'); + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = fetchAllSpiceLevels; \ No newline at end of file diff --git a/model/fetchIngredientSubstitution.js b/model/fetchIngredientSubstitution.js new file mode 100644 index 0000000..a9fe25e --- /dev/null +++ b/model/fetchIngredientSubstitution.js @@ -0,0 +1,96 @@ +module.exports = { getSubstitutes }; + + +const supabase = require("../dbConnection.js"); + +/** + * Fetches substitution options for a given ingredient + * @param {number} ingredientId - The ID of the ingredient to find substitutions for + * @param {Object} options - Optional filtering parameters + * @param {Array} options.allergies - Array of allergy IDs to exclude + * @param {Array} options.dietaryRequirements - Array of dietary requirement IDs to filter by + * @param {Array} options.healthConditions - Array of health condition IDs to consider + * @returns {Promise} - Array of substitute ingredients with their details + */ +async function fetchIngredientSubstitutions(ingredientId, options = {}) { + try { + // First, get the original ingredient to know its category + let { data: originalIngredient, error: originalError } = await supabase + .from('ingredients') + .select('id, name, category') + .eq('id', ingredientId) + .single(); + + if (originalError) { + throw originalError; + } + + if (!originalIngredient) { + throw new Error('Ingredient not found'); + } + + // Build the query for substitutes in the same category + let query = supabase + .from('ingredients') + .select('id, name, category') + .eq('category', originalIngredient.category) + .neq('id', ingredientId); // Exclude the original ingredient + + // Apply filters based on options + if (options.allergies && options.allergies.length > 0) { + // Maps ingredients to allergies + const { data: allergyIngredients } = await supabase + .from('ingredient_allergies') + .select('ingredient_id') + .in('allergy_id', options.allergies); + + if (allergyIngredients && allergyIngredients.length > 0) { + const allergyIngredientIds = allergyIngredients.map(item => item.ingredient_id); + query = query.not('id', 'in', allergyIngredientIds); + } + } + + if (options.dietaryRequirements && options.dietaryRequirements.length > 0) { + // Maps ingredients to dietary requirements + const { data: dietaryIngredients } = await supabase + .from('user_dietary_requirements') + .select('ingredient_id') + .in('dietary_requirement_id', options.dietaryRequirements); + + if (dietaryIngredients && dietaryIngredients.length > 0) { + const dietaryIngredientIds = dietaryIngredients.map(item => item.ingredient_id); + query = query.in('id', dietaryIngredientIds); + } + } + + if (options.healthConditions && options.healthConditions.length > 0) { + // Maps ingredients to health conditions + const { data: healthIngredients } = await supabase + .from('user_health_conditions') + .select('ingredient_id') + .in('health_condition_id', options.healthConditions); + + if (healthIngredients && healthIngredients.length > 0) { + const healthIngredientIds = healthIngredients.map(item => item.ingredient_id); + query = query.in('id', healthIngredientIds); + } + } + + // Execute the query + let { data, error } = await query; + + if (error) { + throw error; + } + + // Return the substitutes along with the original ingredient + return { + original: originalIngredient, + substitutes: data || [] + }; + } catch (error) { + throw error; + } +} + +module.exports = fetchIngredientSubstitutions; diff --git a/model/fetchIngredientSubstitutions.js b/model/fetchIngredientSubstitutions.js new file mode 100644 index 0000000..af0b3d4 --- /dev/null +++ b/model/fetchIngredientSubstitutions.js @@ -0,0 +1,630 @@ +const supabase = require("../dbConnection.js"); + +/** + * Fetches substitution options for a given ingredient + * @param {number} ingredientId - The ID of the ingredient to find substitutions for + * @param {Object} options - Optional filtering parameters + * @param {Array} options.allergies - Array of allergy IDs to exclude + * @param {Array} options.dietaryRequirements - Array of dietary requirement IDs to filter by + * @param {Array} options.healthConditions - Array of health condition IDs to consider + * @returns {Promise} - Object containing original ingredient and array of substitute ingredients + */ +async function fetchIngredientSubstitutions(ingredientId, options = {}) { + // Input validation + if (!ingredientId) { + const error = new Error('Ingredient ID is required'); + console.error('Missing ingredientId parameter'); + throw error; + } + + const parsedId = parseInt(ingredientId); + if (isNaN(parsedId)) { + const error = new Error('Invalid ingredient ID'); + console.error(`Invalid ingredientId: ${ingredientId} is not a number`); + throw error; + } + + // Validate options object structure and ensure arrays are properly initialized + // Handle allergies parameter + if (options.allergies !== undefined) { + if (!Array.isArray(options.allergies)) { + console.error(`Invalid allergies format: ${typeof options.allergies}`); + // Try to parse string if it's a comma-separated list + if (typeof options.allergies === 'string') { + try { + options.allergies = options.allergies.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)); + console.log(`Parsed allergies from string: ${JSON.stringify(options.allergies)}`); + } catch (parseError) { + console.error('Error parsing allergies string:', parseError); + options.allergies = []; + } + } else { + // Convert to empty array for other non-array types + options.allergies = []; + console.log('Converted allergies to empty array'); + } + } else { + // Ensure all array elements are integers + options.allergies = options.allergies.map(id => parseInt(id)).filter(id => !isNaN(id)); + console.log(`Validated allergies array: ${JSON.stringify(options.allergies)}`); + } + } + + // Handle dietary requirements parameter + if (options.dietaryRequirements !== undefined) { + if (!Array.isArray(options.dietaryRequirements)) { + console.error(`Invalid dietary requirements format: ${typeof options.dietaryRequirements}`); + // Try to parse string if it's a comma-separated list + if (typeof options.dietaryRequirements === 'string') { + try { + options.dietaryRequirements = options.dietaryRequirements.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)); + console.log(`Parsed dietary requirements from string: ${JSON.stringify(options.dietaryRequirements)}`); + } catch (parseError) { + console.error('Error parsing dietary requirements string:', parseError); + options.dietaryRequirements = []; + } + } else { + // Convert to empty array for other non-array types + options.dietaryRequirements = []; + console.log('Converted dietary requirements to empty array'); + } + } else { + // Ensure all array elements are integers + options.dietaryRequirements = options.dietaryRequirements.map(id => parseInt(id)).filter(id => !isNaN(id)); + console.log(`Validated dietary requirements array: ${JSON.stringify(options.dietaryRequirements)}`); + } + } + + // Handle health conditions parameter + if (options.healthConditions !== undefined) { + if (!Array.isArray(options.healthConditions)) { + console.error(`Invalid health conditions format: ${typeof options.healthConditions}`); + // Try to parse string if it's a comma-separated list + if (typeof options.healthConditions === 'string') { + try { + options.healthConditions = options.healthConditions.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)); + console.log(`Parsed health conditions from string: ${JSON.stringify(options.healthConditions)}`); + } catch (parseError) { + console.error('Error parsing health conditions string:', parseError); + options.healthConditions = []; + } + } else { + // Convert to empty array for other non-array types + options.healthConditions = []; + console.log('Converted health conditions to empty array'); + } + } else { + // Ensure all array elements are integers + options.healthConditions = options.healthConditions.map(id => parseInt(id)).filter(id => !isNaN(id)); + console.log(`Validated health conditions array: ${JSON.stringify(options.healthConditions)}`); + } + } + + try { + // First, get the original ingredient to know its category + console.log(`Fetching original ingredient with ID: ${parsedId}`); + let { data: originalIngredient, error: originalError } = await supabase + .from('ingredients_new') + .select('ingredient_id, name, category') + .eq('ingredient_id', parsedId) + .single(); + + if (originalError) { + console.error('Error fetching original ingredient:', originalError); + throw new Error(`Database error: ${originalError.message}`); + } + + if (!originalIngredient) { + console.error(`Ingredient with ID ${parsedId} not found`); + throw new Error('Ingredient not found'); + } + + console.log(`Found original ingredient: ${originalIngredient.name} (Category: ${originalIngredient.category})`); + + // Build the query for substitutes in the same category + let query = supabase + .from('ingredients_new') + .select('ingredient_id, name, category, calories, fat, carbohydrates, protein, fiber, sodium, sugar') + .eq('category', originalIngredient.category) + .neq('ingredient_id', parsedId); // Exclude the original ingredient + + // Process allergies filter + if (options.allergies && Array.isArray(options.allergies) && options.allergies.length > 0) { + try { + console.log(`Processing allergies filter with ${options.allergies.length} items`); + + // Ensure allergies is an array of numbers + let validAllergyIds = []; + if (Array.isArray(options.allergies)) { + validAllergyIds = options.allergies + .filter(id => !isNaN(parseInt(id))) + .map(id => parseInt(id)); + } else { + console.error('Allergies is not an array, this should not happen as controller should convert it'); + // Fallback handling just in case + validAllergyIds = []; + } + + console.log(`Valid allergy IDs: ${JSON.stringify(validAllergyIds)}`); + + if (validAllergyIds.length > 0) { + console.log(`Processing ${validAllergyIds.length} allergy IDs directly`); + // First, verify the allergies exist in the allergens_new table + const { data: allergenInfo, error: allergenError } = await supabase + .from('allergens_new') + .select('allergen_id, standard_name') + .in('allergen_id', validAllergyIds); + + if (allergenError) { + console.error('Error fetching allergen information:', allergenError); + throw new Error(`Database error: ${allergenError.message}`); + } + + console.log(`Found ${allergenInfo ? allergenInfo.length : 0} allergens`); + + if (allergenInfo && allergenInfo.length > 0) { + // Get all ingredients that contain these allergens using the ingredient_allergens mapping table + const { data: ingredientsWithAllergens, error: ingredientAllergenError } = await supabase + .from('ingredient_allergens') + .select('ingredient_id') + .in('allergen_id', validAllergyIds); + + if (ingredientAllergenError) { + console.error('Error fetching ingredients with allergens:', ingredientAllergenError); + throw new Error(`Database error: ${ingredientAllergenError.message}`); + } + + // Extract ingredient IDs to exclude + let ingredientsToExclude = []; + if (ingredientsWithAllergens && ingredientsWithAllergens.length > 0) { + ingredientsToExclude = ingredientsWithAllergens.map(item => item.ingredient_id); + // Remove duplicates + ingredientsToExclude = [...new Set(ingredientsToExclude)]; + console.log(`Found ${ingredientsToExclude.length} ingredients to exclude due to allergens`); + } + + if (ingredientsToExclude.length > 0) { + console.log(`Excluding ${ingredientsToExclude.length} ingredients due to allergies`); + query = query.not('ingredient_id', 'in', `(${ingredientsToExclude.join(',')})`); + } else { + console.log('No ingredients found to exclude based on allergies'); + } + } else { + console.log('No valid allergens found with the provided IDs'); + } + } + } catch (allergyProcessingError) { + console.error('Error processing allergies:', allergyProcessingError); + // Instead of throwing an error, we'll log it and continue without allergy filtering + console.log('Continuing without allergy filtering due to error'); + } + } + + // Process dietary requirements filter using dietary_requirement_new and dietary_requirement_ingredients tables + if (options.dietaryRequirements && Array.isArray(options.dietaryRequirements) && options.dietaryRequirements.length > 0) { + try { + console.log(`Processing dietary requirements filter with ${options.dietaryRequirements.length} items`); + + // Ensure dietary requirements is an array of numbers + let validDietaryIds = []; + if (Array.isArray(options.dietaryRequirements)) { + validDietaryIds = options.dietaryRequirements + .filter(id => !isNaN(parseInt(id))) + .map(id => parseInt(id)); + } else { + console.error('DietaryRequirements is not an array, this should not happen as controller should convert it'); + // Fallback handling just in case + validDietaryIds = []; + } + + console.log(`Valid dietary requirement IDs: ${JSON.stringify(validDietaryIds)}`); + + if (validDietaryIds.length > 0) { + // Get dietary requirements information from dietary_requirement_new table + const { data: dietaryRequirementInfo, error: dietaryError } = await supabase + .from('dietary_requirement_new') + .select('dietary_requirement_id, requirement_name') + .in('dietary_requirement_id', validDietaryIds); + + if (dietaryError) { + console.error('Error fetching dietary requirements:', dietaryError); + throw new Error(`Database error: ${dietaryError.message}`); + } + + if (dietaryRequirementInfo && dietaryRequirementInfo.length > 0) { + console.log(`Found ${dietaryRequirementInfo.length} dietary requirements to consider`); + + // Get the dietary_requirement_ingredients mapping data for these dietary requirements + const { data: dietaryIngredients, error: dietaryIngredientsError } = await supabase + .from('dietary_requirement_ingredients') + .select('dietary_requirement_id, ingredient_id, recommendation_type') + .in('dietary_requirement_id', validDietaryIds); + + if (dietaryIngredientsError) { + console.error('Error fetching dietary requirement ingredients mapping:', dietaryIngredientsError); + throw new Error(`Database error: ${dietaryIngredientsError.message}`); + } + + if (dietaryIngredients && dietaryIngredients.length > 0) { + console.log(`Found ${dietaryIngredients.length} dietary requirement-ingredient mappings`); + + // Separate ingredients into include and avoid categories based on recommendation_type + const includeIngredients = {}; + const avoidIngredients = {}; + + // Initialize arrays for each dietary requirement + validDietaryIds.forEach(id => { + includeIngredients[id] = []; + avoidIngredients[id] = []; + }); + + // Populate the arrays based on recommendation_type + dietaryIngredients.forEach(item => { + if (item.recommendation_type === 'include') { + includeIngredients[item.dietary_requirement_id].push(item.ingredient_id); + } else if (item.recommendation_type === 'avoid') { + avoidIngredients[item.dietary_requirement_id].push(item.ingredient_id); + } + }); + + // Log the counts for debugging + validDietaryIds.forEach(id => { + console.log(`Dietary requirement ${id}: ${includeIngredients[id].length} include ingredients, ${avoidIngredients[id].length} avoid ingredients`); + }); + + // Exclude all ingredients that should be avoided for any of the dietary requirements + let allAvoidIngredients = []; + validDietaryIds.forEach(id => { + allAvoidIngredients = [...allAvoidIngredients, ...avoidIngredients[id]]; + }); + + // Remove duplicates + allAvoidIngredients = [...new Set(allAvoidIngredients)]; + + if (allAvoidIngredients.length > 0) { + console.log(`Excluding ${allAvoidIngredients.length} ingredients to avoid based on dietary requirements`); + query = query.not('ingredient_id', 'in', `(${allAvoidIngredients.join(',')})`); + } + + // Find ingredients that are recommended (include) for ALL selected dietary requirements + // Only apply this filter if there are actual include ingredients + let hasIncludeRecommendations = false; + validDietaryIds.forEach(id => { + if (includeIngredients[id].length > 0) { + hasIncludeRecommendations = true; + } + }); + + if (hasIncludeRecommendations) { + // Get the intersection of all include ingredients + let includeForAllRequirements = null; + + validDietaryIds.forEach(id => { + if (includeIngredients[id].length > 0) { + if (includeForAllRequirements === null) { + includeForAllRequirements = [...includeIngredients[id]]; + } else { + includeForAllRequirements = includeForAllRequirements.filter(ingredientId => + includeIngredients[id].includes(ingredientId)); + } + } + }); + + // If we have ingredients recommended for all dietary requirements, prioritize them + if (includeForAllRequirements && includeForAllRequirements.length > 0) { + console.log(`Prioritizing ${includeForAllRequirements.length} ingredients recommended for all dietary requirements`); + query = query.in('ingredient_id', includeForAllRequirements); + } + } + } else { + console.log('No dietary requirement-ingredient mappings found, falling back to default filtering'); + + // Fallback to using the requirement_name for basic filtering + const dietaryIngredientMapping = {}; + + // For each dietary requirement, identify suitable ingredients based on name + for (const dietaryReq of dietaryRequirementInfo) { + let ingredientQuery; + + // Different logic based on dietary requirement type + switch(dietaryReq.requirement_name.toLowerCase()) { + case 'vegetarian': + // For vegetarian, exclude meat and fish categories + const { data: vegetarianIngredients, error: vegError } = await supabase + .from('ingredients_new') + .select('ingredient_id') + .not('category', 'in', '(meat,fish,poultry)'); + + if (!vegError && vegetarianIngredients) { + dietaryIngredientMapping[dietaryReq.dietary_requirement_id] = vegetarianIngredients.map(ing => ing.ingredient_id); + } + break; + + case 'vegan': + // For vegan, exclude animal products + const { data: veganIngredients, error: veganError } = await supabase + .from('ingredients_new') + .select('ingredient_id') + .not('category', 'in', '(meat,fish,poultry,dairy,eggs)'); + + if (!veganError && veganIngredients) { + dietaryIngredientMapping[dietaryReq.dietary_requirement_id] = veganIngredients.map(ing => ing.ingredient_id); + } + break; + + case 'gluten-free': + // For gluten-free, exclude wheat-based ingredients + const { data: glutenFreeIngredients, error: gfError } = await supabase + .from('ingredients_new') + .select('ingredient_id') + .not('name', 'ilike', '%wheat%') + .not('name', 'ilike', '%gluten%') + .not('name', 'ilike', '%barley%') + .not('name', 'ilike', '%rye%'); + + if (!gfError && glutenFreeIngredients) { + dietaryIngredientMapping[dietaryReq.dietary_requirement_id] = glutenFreeIngredients.map(ing => ing.ingredient_id); + } + break; + + default: + // For other dietary requirements, use a keyword match approach + const { data: matchingIngredients, error: matchError } = await supabase + .from('ingredients_new') + .select('ingredient_id') + .ilike('name', `%${dietaryReq.requirement_name}%`); + + if (!matchError && matchingIngredients) { + dietaryIngredientMapping[dietaryReq.dietary_requirement_id] = matchingIngredients.map(ing => ing.ingredient_id); + } + break; + } + + console.log(`Mapped dietary requirement ${dietaryReq.requirement_name} to ${dietaryIngredientMapping[dietaryReq.dietary_requirement_id]?.length || 0} ingredients`); + } + + // Find ingredients that satisfy ALL dietary requirements (intersection) + let validIngredientIds = []; + let isFirst = true; + + for (const dietaryId in dietaryIngredientMapping) { + if (isFirst) { + validIngredientIds = dietaryIngredientMapping[dietaryId] || []; + isFirst = false; + } else { + // Keep only ingredients that are in both arrays (intersection) + validIngredientIds = validIngredientIds.filter(id => + dietaryIngredientMapping[dietaryId].includes(id)); + } + } + + if (validIngredientIds.length > 0) { + console.log(`Including ${validIngredientIds.length} ingredients that match all dietary requirements`); + query = query.in('ingredient_id', validIngredientIds); + } else { + console.log('No ingredients found that match all dietary requirements'); + } + } + } else { + console.log('No valid dietary requirements found with the provided IDs'); + } + } + } catch (dietaryProcessingError) { + console.error('Error processing dietary requirements:', dietaryProcessingError); + // Instead of throwing an error, we'll log it and continue without dietary filtering + console.log('Continuing without dietary filtering due to error'); + } + } + + // Process health conditions filter using health_conditions_new and condition_ingredients tables + if (options.healthConditions && Array.isArray(options.healthConditions) && options.healthConditions.length > 0) { + try { + console.log(`Processing health conditions filter with ${options.healthConditions.length} items`); + + // Ensure health conditions is an array of numbers + let validHealthIds = []; + if (Array.isArray(options.healthConditions)) { + validHealthIds = options.healthConditions + .filter(id => !isNaN(parseInt(id))) + .map(id => parseInt(id)); + } else { + console.error('HealthConditions is not an array, this should not happen as controller should convert it'); + // Fallback handling just in case + validHealthIds = []; + } + + console.log(`Valid health condition IDs: ${JSON.stringify(validHealthIds)}`); + + if (validHealthIds.length > 0) { + // Get health conditions information from health_conditions_new table + const { data: healthConditionInfo, error: healthError } = await supabase + .from('health_conditions_new') + .select('condition_id, name, description, recommended_foods, restricted_foods, severity_level') + .in('condition_id', validHealthIds); + + if (healthError) { + console.error('Error fetching health conditions:', healthError); + throw new Error(`Database error: ${healthError.message}`); + } + + if (healthConditionInfo && healthConditionInfo.length > 0) { + console.log(`Found ${healthConditionInfo.length} health conditions to consider`); + + // Get the condition_ingredients mapping data for these health conditions + const { data: conditionIngredients, error: conditionIngredientsError } = await supabase + .from('condition_ingredients') + .select('condition_id, ingredient_id, recommendation_type') + .in('condition_id', validHealthIds); + + if (conditionIngredientsError) { + console.error('Error fetching condition ingredients mapping:', conditionIngredientsError); + throw new Error(`Database error: ${conditionIngredientsError.message}`); + } + + if (conditionIngredients && conditionIngredients.length > 0) { + console.log(`Found ${conditionIngredients.length} condition-ingredient mappings`); + + // Separate ingredients into include and avoid categories based on recommendation_type + const includeIngredients = {}; + const avoidIngredients = {}; + + // Initialize arrays for each condition + validHealthIds.forEach(id => { + includeIngredients[id] = []; + avoidIngredients[id] = []; + }); + + // Populate the arrays based on recommendation_type + conditionIngredients.forEach(item => { + if (item.recommendation_type === 'include') { + includeIngredients[item.condition_id].push(item.ingredient_id); + } else if (item.recommendation_type === 'avoid') { + avoidIngredients[item.condition_id].push(item.ingredient_id); + } + }); + + // Log the counts for debugging + validHealthIds.forEach(id => { + console.log(`Condition ${id}: ${includeIngredients[id].length} include ingredients, ${avoidIngredients[id].length} avoid ingredients`); + }); + + // Exclude all ingredients that should be avoided for any of the conditions + let allAvoidIngredients = []; + validHealthIds.forEach(id => { + allAvoidIngredients = [...allAvoidIngredients, ...avoidIngredients[id]]; + }); + + // Remove duplicates + allAvoidIngredients = [...new Set(allAvoidIngredients)]; + + if (allAvoidIngredients.length > 0) { + console.log(`Excluding ${allAvoidIngredients.length} ingredients to avoid based on health conditions`); + query = query.not('ingredient_id', 'in', `(${allAvoidIngredients.join(',')})`); + } + + // Find ingredients that are recommended (include) for ALL selected health conditions + // Only apply this filter if there are actual include ingredients + let hasIncludeRecommendations = false; + validHealthIds.forEach(id => { + if (includeIngredients[id].length > 0) { + hasIncludeRecommendations = true; + } + }); + + if (hasIncludeRecommendations) { + // Get the intersection of all include ingredients + let includeForAllConditions = null; + + validHealthIds.forEach(id => { + if (includeIngredients[id].length > 0) { + if (includeForAllConditions === null) { + includeForAllConditions = [...includeIngredients[id]]; + } else { + includeForAllConditions = includeForAllConditions.filter(ingredientId => + includeIngredients[id].includes(ingredientId)); + } + } + }); + + // If we have ingredients recommended for all conditions, prioritize them + if (includeForAllConditions && includeForAllConditions.length > 0) { + console.log(`Prioritizing ${includeForAllConditions.length} ingredients recommended for all health conditions`); + // We'll use a union query to prioritize recommended ingredients but still show others + // This is a simplified approach - in a real implementation, you might want to add a 'recommended' flag + // to the results instead of filtering + query = query.in('ingredient_id', includeForAllConditions); + } + } + } else { + console.log('No condition-ingredient mappings found, using health condition metadata'); + + // Fallback to using the recommended_foods and restricted_foods arrays from health_conditions_new + let allRestrictedFoods = []; + let allRecommendedFoods = []; + + healthConditionInfo.forEach(condition => { + if (condition.restricted_foods && Array.isArray(condition.restricted_foods)) { + allRestrictedFoods = [...allRestrictedFoods, ...condition.restricted_foods]; + } + if (condition.recommended_foods && Array.isArray(condition.recommended_foods)) { + allRecommendedFoods = [...allRecommendedFoods, ...condition.recommended_foods]; + } + }); + + // Remove duplicates + allRestrictedFoods = [...new Set(allRestrictedFoods)]; + allRecommendedFoods = [...new Set(allRecommendedFoods)]; + + if (allRestrictedFoods.length > 0) { + console.log(`Using ${allRestrictedFoods.length} restricted foods from health conditions metadata`); + // Exclude ingredients that match restricted food keywords + allRestrictedFoods.forEach(food => { + query = query.not('name', 'ilike', `%${food}%`); + }); + } + + if (allRecommendedFoods.length > 0) { + console.log(`Using ${allRecommendedFoods.length} recommended foods from health conditions metadata`); + // Create a separate query for recommended foods and use it to prioritize results + let recommendedQuery = supabase + .from('ingredients_new') + .select('ingredient_id') + .eq('category', originalIngredient.category) + .neq('ingredient_id', parsedId); + + // Add conditions for each recommended food keyword + allRecommendedFoods.forEach(food => { + recommendedQuery = recommendedQuery.or(`name.ilike.%${food}%`); + }); + + const { data: recommendedIngredients, error: recommendedError } = await recommendedQuery; + + if (!recommendedError && recommendedIngredients && recommendedIngredients.length > 0) { + const recommendedIds = recommendedIngredients.map(item => item.ingredient_id); + console.log(`Found ${recommendedIds.length} ingredients matching recommended foods`); + query = query.in('ingredient_id', recommendedIds); + } + } + } + } else { + console.log('No valid health conditions found with the provided IDs'); + } + } + } catch (healthProcessingError) { + console.error('Error processing health conditions:', healthProcessingError); + // Instead of throwing an error, we'll log it and continue without health condition filtering + console.log('Continuing without health condition filtering due to error'); + } + } + + // Execute the query with pagination to limit result size + console.log('Executing final query for substitutes'); + const PAGE_SIZE = 50; // Limit results to prevent excessive data transfer + let { data, error, count } = await query + .select('ingredient_id, name, category', { count: 'exact' }) + .limit(PAGE_SIZE); + + if (error) { + console.error('Error fetching substitutes:', error); + throw new Error(`Database error: ${error.message}`); + } + + const result = { + original: originalIngredient, + substitutes: data || [], + pagination: { + total: count || 0, + limit: PAGE_SIZE, + hasMore: (count || 0) > PAGE_SIZE + } + }; + + console.log(`Found ${result.substitutes.length} substitutes for ${originalIngredient.name}`); + return result; + } catch (error) { + console.error('Error in fetchIngredientSubstitutions:', error); + throw error; + } +} + +module.exports = fetchIngredientSubstitutions; \ No newline at end of file diff --git a/model/fetchUserPreferences.js b/model/fetchUserPreferences.js new file mode 100644 index 0000000..1cd2973 --- /dev/null +++ b/model/fetchUserPreferences.js @@ -0,0 +1,61 @@ +const supabase = require("../dbConnection.js"); + +async function fetchUserPreferences(userId) { + try { + const { data: dietaryRequirements, error: drError } = await supabase + .from('user_dietary_requirements') + .select('...dietary_requirement_id(id, name)') + .eq('user_id', userId); + if (drError) throw drError; + + const { data: allergies, error: aError } = await supabase + .from('user_allergies') + .select('...allergy_id(id, name)') + .eq('user_id', userId); + if (aError) throw aError; + + const { data: cuisines, error: cError } = await supabase + .from('user_cuisines') + .select('...cuisine_id(id, name)') + .eq('user_id', userId); + if (cError) throw cError; + + const { data: dislikes, error: dError } = await supabase + .from('user_dislikes') + .select('...dislike_id(id, name)') + .eq('user_id', userId); + if (dError) throw dError; + + const { data: healthConditions, error: hcError } = await supabase + .from('user_health_conditions') + .select('...health_condition_id(id, name)') + .eq('user_id', userId); + if (hcError) throw hcError; + + const { data: spiceLevels, error: slError } = await supabase + .from('user_spice_levels') + .select('...spice_level_id(id, name)') + .eq('user_id', userId); + if (slError) throw slError; + + const { data: cookingMethods, error: cmError } = await supabase + .from('user_cooking_methods') + .select('...cooking_method_id(id, name)') + .eq('user_id', userId); + if (cmError) throw cmError; + + return { + dietary_requirements: dietaryRequirements, + allergies: allergies, + cuisines: cuisines, + dislikes: dislikes, + health_conditions: healthConditions, + spice_levels: spiceLevels, + cooking_methods: cookingMethods + }; + } catch (error) { + throw error; + } +} + +module.exports = fetchUserPreferences; diff --git a/model/getAppointments.js b/model/getAppointments.js new file mode 100644 index 0000000..f818472 --- /dev/null +++ b/model/getAppointments.js @@ -0,0 +1,20 @@ +const supabase = require('../dbConnection.js'); + +async function getAllAppointments() { + try { + // Fetch all appointment data from the appointments table + let { data, error } = await supabase + .from('appointments') + .select('*'); // Select all columns + + if (error) { + throw error; + } + + return data; + } catch (error) { + throw error; + } +} + +module.exports = getAllAppointments; diff --git a/model/getEstimatedCost.js b/model/getEstimatedCost.js new file mode 100644 index 0000000..1364524 --- /dev/null +++ b/model/getEstimatedCost.js @@ -0,0 +1,192 @@ +const supabase = require("../dbConnection.js"); + +//For getting the ingredients price from the DB +async function getIngredientsPrice(ingredient_id) { + try { + let { data, error } = await supabase + .from("ingredient_price") + .select("*") + .in("ingredient_id", ingredient_id); + return data; + } catch (error) { + throw error; + } +} + +//To convert the units +function convertUnits(value, fromMeasurement, toMeasurement) { + const result = { + unit: 0, + measurement: toMeasurement + } + + const conversions = { + weight: { g: 1, kg: 0.001 }, + liquid: { l: 1, ml: 1000 } + }; + + if (fromMeasurement === "ea") { + if (toMeasurement === "ea") { + result.unit = value; + return result; + } else { + throw new Error("Invalid unit conversion"); + } + } + + if (toMeasurement === "N/A") { + // Use g/ml as default + if (conversions.weight[fromMeasurement]) { + result.unit = value * (conversions.weight["g"] / conversions.weight[fromMeasurement]); + result.measurement = "g"; + return result; + } else if (conversions.liquid[fromMeasurement]) { + result.unit = value * (conversions.liquid["ml"] / conversions.liquid[fromMeasurement]); + result.measurement = "ml"; + return result; + } else { + throw new Error("Invalid unit conversion"); + } + } else { + if (conversions.weight[fromMeasurement] && conversions.weight[toMeasurement]) { + result.unit = value * (conversions.weight[toMeasurement] / conversions.weight[fromMeasurement]); + return result; + } else if (conversions.liquid[fromMeasurement] && conversions.liquid[toMeasurement]) { + result.unit = value * (conversions.liquid[toMeasurement] / conversions.liquid[fromMeasurement]); + return result; + } else { + throw new Error("Invalid unit conversion"); + } + } +} + + + +//To estimate the Ingredients Cost(lowest and highest) +function estimateIngredientsCost(ingredients, ingredients_price) { //return grouped data initially. + // Group ingredients by their id + var groupedIngredientsPrice = {}; + ingredients_price.forEach(( ingredient ) => { + let id = ingredient.ingredient_id; + if (groupedIngredientsPrice[id] == undefined) { + groupedIngredientsPrice[id] = []; + } + groupedIngredientsPrice[id].push(ingredient); + }) + + // Find minimum purchase quantity for every ingredients + // Each grocery store has different price -> low total price and high total price + const lowPriceRequiredIngredients = []; + const highPriceRequiredIngredients = []; + if ((ingredients.id.length === ingredients.quantity.length) && (ingredients.id.length === ingredients.measurement.length)) { + for (let i=0; i skip this ingredient + if (ingre) { + ingre = ingre.filter((item) => { + try { + let convertedResult = convertUnits(item.unit, item.measurement, target_measurement); + let estimatedPurchase = 1; + while (convertedResult.unit * estimatedPurchase < target_qty) { + estimatedPurchase += 1; + } + item.estimation = { + "unit": convertedResult.unit, + "measurement": convertedResult.measurement, + "purchase": estimatedPurchase, + "total_cost": estimatedPurchase * item.price + } + return true; + } catch (error) { + return false; + } + }).map(function(item) { return item; }); + } else { + ingre = []; + } + + if (ingre.length > 0) { + // Find min price + var minIngre = ingre.reduce((prev, curr) => { + return prev.estimation.total_cost < curr.estimation.total_cost ? prev : curr; + }); + lowPriceRequiredIngredients.push(minIngre); + + // Find max price + var maxIngre = ingre.reduce((prev, curr) => { + return prev.estimation.total_cost > curr.estimation.total_cost ? prev : curr; + }); + highPriceRequiredIngredients.push(maxIngre); + } + } + } + + return { + lowPriceRequiredIngredients, + highPriceRequiredIngredients + }; +} + +function prepareResponseData(lowPriceRequiredIngredients, highPriceRequiredIngredients) { + const estimatedCost = { + info: { + estimation_type: "", + include_all_wanted_ingredients: true, + minimum_cost: 0, + maximum_cost: 0 + }, + low_cost: { + price: 0, + count: 0, + ingredients: [] + }, + high_cost: { + price: 0, + count: 0, + ingredients: [] + } + }; + + let lowPriceID = [], highPriceID = []; + lowPriceRequiredIngredients.forEach((ingre) => { + estimatedCost.low_cost.ingredients.push({ + ingredient_id: ingre.ingredient_id, + product_name: ingre.name, + quantity: ingre.estimation.unit + ingre.estimation.measurement, + purchase_quantity: ingre.estimation.purchase, + total_cost: ingre.estimation.total_cost + }) + estimatedCost.info.minimum_cost += ingre.estimation.total_cost; + lowPriceID.push(ingre.ingredient_id); + }) + highPriceRequiredIngredients.forEach((ingre) => { + estimatedCost.high_cost.ingredients.push({ + ingredient_id: ingre.ingredient_id, + product_name: ingre.name, + quantity: ingre.estimation.unit + ingre.estimation.measurement, + purchase_quantity: ingre.estimation.purchase, + total_cost: ingre.estimation.total_cost + }) + estimatedCost.info.maximum_cost += ingre.estimation.total_cost; + highPriceID.push(ingre.ingredient_id); + }) + estimatedCost.info.minimum_cost = Math.round(estimatedCost.info.minimum_cost); + estimatedCost.info.maximum_cost = Math.round(estimatedCost.info.maximum_cost); + + estimatedCost.low_cost.price = estimatedCost.info.minimum_cost; + estimatedCost.low_cost.count = estimatedCost.low_cost.ingredients.length; + estimatedCost.high_cost.price = estimatedCost.info.maximum_cost; + estimatedCost.high_cost.count = estimatedCost.high_cost.ingredients.length; + return { estimatedCost, lowPriceID, highPriceID }; +} + +module.exports = { + getIngredientsPrice, + convertUnits, + estimateIngredientsCost, + prepareResponseData, +} \ No newline at end of file diff --git a/model/getFullorPartialCost.js b/model/getFullorPartialCost.js new file mode 100644 index 0000000..53e6579 --- /dev/null +++ b/model/getFullorPartialCost.js @@ -0,0 +1,93 @@ +let getRecipeIngredients = require('../model/getRecipeIngredients') +let getEstimatedCost = require('../model/getEstimatedCost'); + +async function estimateCost(recipe_id, desired_servings, exclude_ids){ + const result = { + status: 404, + error: "", + estimatedCost: {} + } + + // Recipe Scaling Option: check if scaling requested + // If yes (desired_servings > 0) -> proceed with scaled ingredients + // otherwise, get original servings + var ingredients_result; + if (desired_servings > 0) { + ingredients_result = await getRecipeIngredients.getScaledIngredientsByServing(recipe_id, desired_servings); + } else { + ingredients_result = await getRecipeIngredients.getOriginalIngredients(recipe_id); + } + + if (ingredients_result.status != 200) { + result.status = ingredients_result.status; + result.error = ingredients_result.error; + return result; + } + const ingredients = ingredients_result.ingredients; + + // Validate recipe's ingredients data + if (!ingredients || !ingredients.id || !ingredients.quantity) { + result.error = "Recipe contains invalid ingredients data, can not estimate cost"; + return result; + } + + if (!ingredients.measurement) { + ingredients.measurement = new Array(ingredients.quantity.length).fill("N/A"); + } + + // Return error if the excluding ingredients not included in recipe + let isFull = exclude_ids === ""; + if(!isFull){ + const exclude_ingre_ids = exclude_ids.split(",").map(id => parseInt(id)); + const invalid_exclude = exclude_ingre_ids.filter((id) => { + if (!ingredients.id.includes(id)) { + return true; + } + }) + if (invalid_exclude.length > 0) { + result.error = `Ingredient ${invalid_exclude.toString()} not found in recipe, can not exclude` + return result; + } + + // Filter out the unwanted ingredients + const exclude_indices = ingredients.id + .filter(id => exclude_ingre_ids.includes(id)) + .map(id => ingredients.id.indexOf(id)); + ingredients.id = ingredients.id.filter((id, i) => !exclude_indices.includes(i)) + ingredients.quantity = ingredients.quantity.filter((id, i) => !exclude_indices.includes(i)) + ingredients.measurement = ingredients.measurement.filter((id, i) => !exclude_indices.includes(i)) + } + + // Get ingredients price + const ingredients_price = await getEstimatedCost.getIngredientsPrice(ingredients.id); + + // Calculate ingredients price + const { lowPriceRequiredIngredients, highPriceRequiredIngredients } = getEstimatedCost.estimateIngredientsCost(ingredients, ingredients_price); + + if (lowPriceRequiredIngredients.length === 0 && highPriceRequiredIngredients.length === 0) { + result.error = "There was an error in estimation process"; + return result; + }; + + // Prepare response data + const { estimatedCost, lowPriceID, highPriceID } = getEstimatedCost.prepareResponseData(lowPriceRequiredIngredients, highPriceRequiredIngredients); + + // Check if missing ingredient + if (lowPriceID.length < ingredients.id.length || highPriceID.length < ingredients.id.length) { + estimatedCost.info.include_all_wanted_ingredients = false; + } else { + estimatedCost.info.include_all_wanted_ingredients = true; + } + + // Add estimation info + if (isFull) { estimatedCost.info.estimation_type = "full"; } + else { estimatedCost.info.estimation_type = "partial"; } + + result.status = 200; + result.estimatedCost = estimatedCost; + return result; +} + +module.exports ={ + estimateCost, +} \ No newline at end of file diff --git a/model/getHealthArticles.js b/model/getHealthArticles.js new file mode 100644 index 0000000..0f54a26 --- /dev/null +++ b/model/getHealthArticles.js @@ -0,0 +1,16 @@ +const supabase = require('../dbConnection'); + +const getHealthArticles = async (query) => { + const { data, error } = await supabase + .from('health_articles') + .select('*') + .or(`title.ilike.%${query}%,tags.cs.{${query}}`); + + if (error) { + throw new Error(error.message); + } + + return data; +}; + +module.exports = getHealthArticles; diff --git a/model/getMealPlanByUserIdAndDate.js b/model/getMealPlanByUserIdAndDate.js new file mode 100644 index 0000000..1c85f44 --- /dev/null +++ b/model/getMealPlanByUserIdAndDate.js @@ -0,0 +1,50 @@ +const supabase = require('../dbConnection.js'); + +async function getMealPlanByUserIdAndDate(user_id, created_at) { + try { + let query = supabase.from('meal_plan').select('created_at, recipes, meal_type'); + + if (user_id) { + query = query.eq('user_id', user_id); + } + + if (created_at) { + const startOfDay = `${created_at} 00:00:00`; + const endOfDay = `${created_at} 23:59:59`; + query = query.gte('created_at', startOfDay).lte('created_at', endOfDay); + } + + let { data: mealPlans, error } = await query; + + if (error || !mealPlans || mealPlans.length === 0) { + throw new Error('Meal plans not found or query error'); + } + + for (let mealPlan of mealPlans) { + const recipeIds = mealPlan?.recipes?.recipe_ids; + + if (!recipeIds || recipeIds.length === 0) { + mealPlan.recipes = []; + continue; + } + + const { data: recipes, error: recipesError } = await supabase + .from('recipes') + .select('recipe_name') + .in('id', recipeIds); + + if (recipesError) { + throw recipesError; + } + + mealPlan.recipes = recipes.map(recipe => recipe.recipe_name); + } + + return mealPlans; + } catch (error) { + console.error('Error fetching meal plans:', error.message); + throw error; + } +} + +module.exports = getMealPlanByUserIdAndDate; diff --git a/model/getRecipeIngredients.js b/model/getRecipeIngredients.js new file mode 100644 index 0000000..01696cf --- /dev/null +++ b/model/getRecipeIngredients.js @@ -0,0 +1,100 @@ +const supabase = require("../dbConnection.js"); + +// Get data from Supabase: id only +async function getIngredients(recipe_id) { + try { + let { data, error } = await supabase + .from("recipes") + .select("ingredients") + .eq("id", recipe_id); + return data; + } catch (error) { + throw error; + } +} + +// Get data from Supabase, id and total servings +async function getIngredientsWithTotalServing(recipe_id) { + try { + let { data, error } = await supabase + .from("recipes") + .select("total_servings, ingredients") + .in("id", recipe_id); + return data; + } catch (error) { + throw error; + } +} + +// Get and return result to user +async function getOriginalIngredients(recipe_id) { + const result = { + status: 404, + error: "", + ingredients: {} + } + + const data = await getIngredients(recipe_id); + if (data.length === 0) { + result.error = "Invalid recipe id, ingredients not found"; + return result; + }; + + result.status = 200; + result.ingredients = data[0].ingredients; + + return result; +} + +// Get and return result to user +async function getScaledIngredientsByServing(recipe_id, desired_servings) { + const result = { + status: 404, + error: "", + ingredients: {}, + scaling_detail: {} + } + + // Get recipe data + const data = await getIngredientsWithTotalServing([recipe_id]); + if (data.length === 0) { + result.error = "Invalid recipe id, can not scale"; + return result; + } + + // Get recipe's ingredients and serving + const recipe_serving = data[0].total_servings; + if (!recipe_serving || recipe_serving===0) { + result.error = "Recipe contains invalid total serving, can not scale"; + return result; + } + + const recipe_ingredients = data[0].ingredients; + if (!recipe_ingredients || !recipe_ingredients.id || !recipe_ingredients.quantity) { + result.error = "Recipe contains invalid ingredients data, can not scale"; + return result; + } + + // Scale + const ratio = desired_servings / recipe_serving; + + result.status = 200 + result.ingredients = { + id: recipe_ingredients.id, + quantity: recipe_ingredients.quantity.map(qty => qty * ratio), + measurement: recipe_ingredients.measurement + }; + result.scaling_detail = { + id: recipe_id, + scale_ratio: ratio, + desired_servings: desired_servings, + original_serving: recipe_serving, + original_ingredients: recipe_ingredients + }; + return result; +} + +module.exports = { + getOriginalIngredients, + getScaledIngredientsByServing +} \ No newline at end of file diff --git a/model/getUser.js b/model/getUser.js index 70f3b22..18bc2d0 100644 --- a/model/getUser.js +++ b/model/getUser.js @@ -1,16 +1,15 @@ const supabase = require('../dbConnection.js'); -async function getUser(username) { +async function getUser(email) { try { let { data, error } = await supabase .from('users') - .select('username') - .eq('username', username) + .select('*') + .eq('email', email) return data } catch (error) { throw error; } - } module.exports = getUser; \ No newline at end of file diff --git a/model/getUserCredentials.js b/model/getUserCredentials.js index bfdac6e..9294f07 100644 --- a/model/getUserCredentials.js +++ b/model/getUserCredentials.js @@ -1,11 +1,11 @@ const supabase = require('../dbConnection.js'); -async function getUserCredentials(username, password) { +async function getUserCredentials(email) { try { let { data, error } = await supabase .from('users') - .select('user_id,username,password') - .eq('username', username) + .select('user_id,email,password,mfa_enabled') + .eq('email', email) return data[0] } catch (error) { throw error; diff --git a/model/getUserPassword.js b/model/getUserPassword.js new file mode 100644 index 0000000..77abfb3 --- /dev/null +++ b/model/getUserPassword.js @@ -0,0 +1,16 @@ +const supabase = require('../dbConnection.js'); + +async function getUserProfile(user_id) { + try { + let { data, error } = await supabase + .from('users') + .select('user_id,password') + .eq('user_id', user_id) + return data + } catch (error) { + throw error; + } + +} + +module.exports = getUserProfile; \ No newline at end of file diff --git a/model/getUserProfile.js b/model/getUserProfile.js new file mode 100644 index 0000000..8c6b472 --- /dev/null +++ b/model/getUserProfile.js @@ -0,0 +1,40 @@ +const supabase = require("../dbConnection.js"); + +async function getUserProfile(email) { + try { + let { data, error } = await supabase + .from("users") + .select( + "user_id,name,first_name,last_name,email,contact_number,mfa_enabled,address,image_id" + ) + .eq("email", email); + + if (data[0].image_id != null) { + data[0].image_url = await getImageUrl(data[0].image_id); + } + + return data; + } catch (error) { + throw error; + } +} + +async function getImageUrl(image_id) { + try { + if (image_id == null) return ""; + let { data, error } = await supabase + .from("images") + .select("*") + .eq("id", image_id); + if (data[0] != null) { + let x = `${process.env.SUPABASE_STORAGE_URL}${data[0].file_name}`; + return x; + } + return data; + } catch (error) { + console.log(error); + throw error; + } +} + +module.exports = getUserProfile; diff --git a/model/getUserRecipes.js b/model/getUserRecipes.js new file mode 100644 index 0000000..a6b5007 --- /dev/null +++ b/model/getUserRecipes.js @@ -0,0 +1,76 @@ +const supabase = require("../dbConnection.js"); + +async function getUserRecipesRelation(user_id) { + try { + let { data, error } = await supabase + .from("recipe_ingredient") + .select("*") + .eq("user_id", user_id); + return data; + } catch (error) { + throw error; + } +} + +async function getUserRecipes(recipe_id) { + try { + let { data, error } = await supabase + .from("recipes") + .select("*") + .in("id", recipe_id); + return data; + } catch (error) { + throw error; + } +} + +async function getIngredients(ingredient_id) { + try { + let { data, error } = await supabase + .from("ingredients") + .select("*") + .in("id", ingredient_id); + return data; + } catch (error) { + throw error; + } +} + +async function getCuisines(cuisine_id) { + try { + let { data, error } = await supabase + .from("cuisines") + .select("*") + .in("id", cuisine_id); + return data; + } catch (error) { + throw error; + } +} + +async function getImageUrl(image_id) { + try { + if (image_id == null) return ""; + let { data, error } = await supabase + .from("images") + .select("*") + .eq("id", image_id); + + if (data[0] != null) { + let x = `${process.env.SUPABASE_STORAGE_URL}${data[0].file_name}`; + return x; + } + return data; + } catch (error) { + console.log(error); + throw error; + } +} + +module.exports = { + getUserRecipesRelation, + getUserRecipes, + getCuisines, + getIngredients, + getImageUrl, +}; diff --git a/model/imageClassification.py b/model/imageClassification.py new file mode 100644 index 0000000..059ad10 --- /dev/null +++ b/model/imageClassification.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python3.10 + +import os +os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' + +import sys +import matplotlib.pyplot as plt +import pandas as pd +import seaborn as sn +import numpy as np +from tensorflow.keras.preprocessing.image import ImageDataGenerator +from tensorflow.keras.applications import VGG19, VGG16 +from tensorflow.keras.layers import AveragePooling2D, Conv2D, MaxPooling2D, Dropout, Dense, Input, Flatten +from tensorflow.keras.models import Sequential +from tensorflow.keras.utils import load_img, img_to_array +from sklearn.metrics import confusion_matrix +from sklearn.model_selection import train_test_split + +from tensorflow.keras.models import load_model +from PIL import Image +import io + +# Get the relative path to the model file +model_path = os.path.join('model', 'modeltt.h5') + +try: + # Load the pre-trained model + model = load_model(model_path) +except Exception as e: + print("Error loading model:", e) + sys.exit(1) + +from tensorflow.keras.preprocessing.image import load_img, img_to_array + +cal_values = """Apple Braeburn:~52 calories per 100 grams +Apple Crimson Snow:~52 calories per 100 grams +Apple Golden 1:~52 calories per 100 grams +Apple Golden 2:~52 calories per 100 grams +Apple Golden 3:~52 calories per 100 grams +Apple Granny Smith:~52 calories per 100 grams +Apple Pink Lady:~52 calories per 100 grams +Apple Red 1:~52 calories per 100 grams +Apple Red 2:~52 calories per 100 grams +Apple Red 3:~52 calories per 100 grams +Apple Red Delicious:~52 calories per 100 grams +Apple Red Yellow 1:~52 calories per 100 grams +Apple Red Yellow 2:~52 calories per 100 grams +Apricot:~48 calories per 100 grams +Avocado:~160 calories per 100 grams +Avocado ripe:~160 calories per 100 grams +Banana:~89 calories per 100 grams +Banana Lady Finger:~89 calories per 100 grams +Banana Red:~89 calories per 100 grams +Beetroot:~43 calories per 100 grams +Blueberry:~57 calories per 100 grams +Cactus fruit:~50 calories per 100 grams +Cantaloupe 1:~34 calories per 100 grams +Cantaloupe 2:~34 calories per 100 grams +Carambula:~31 calories per 100 grams +Cauliflower:~25 calories per 100 grams +Cherry 1:~50 calories per 100 grams +Cherry 2:~50 calories per 100 grams +Cherry Rainier:~50 calories per 100 grams +Cherry Wax Black:~50 calories per 100 grams +Cherry Wax Red:~50 calories per 100 grams +Cherry Wax Yellow:~50 calories per 100 grams +Chestnut:~213 calories per 100 grams +Clementine:~47 calories per 100 grams +Cocos:~354 calories per 100 grams +Corn:~86 calories per 100 grams +Corn Husk:~86 calories per 100 grams +Cucumber Ripe:~15 calories per 100 grams +Cucumber Ripe 2:~15 calories per 100 grams +Dates:~277 calories per 100 grams +Eggplant:~25 calories per 100 grams +Fig:~74 calories per 100 grams +Ginger Root:~50 calories per 100 grams +Granadilla:~97 calories per 100 grams +Grape Blue:~69 calories per 100 grams +Grape Pink:~69 calories per 100 grams +Grape White:~69 calories per 100 grams +Grape White 2:~69 calories per 100 grams +Grape White 3:~69 calories per 100 grams +Grape White 4:~69 calories per 100 grams +Grapefruit Pink:~42 calories per 100 grams +Grapefruit White:~42 calories per 100 grams +Guava:~68 calories per 100 grams +Hazelnut:~628 calories per 100 grams +Huckleberry:~40 calories per 100 grams +Kaki:~81 calories per 100 grams +Kiwi:~61 calories per 100 grams +Kohlrabi:~27 calories per 100 grams +Kumquats:~71 calories per 100 grams +Lemon:~29 calories per 100 grams +Lemon Meyer:~29 calories per 100 grams +Limes:~30 calories per 100 grams +Lychee:~66 calories per 100 grams +Mandarine:~53 calories per 100 grams +Mango:~60 calories per 100 grams +Mango Red:~60 calories per 100 grams +Mangostan:~73 calories per 100 grams +Maracuja:~97 calories per 100 grams +Melon Piel de Sapo:~50 calories per 100 grams +Mulberry:~43 calories per 100 grams +Nectarine:~44 calories per 100 grams +Nectarine Flat:~44 calories per 100 grams +Nut Forest:~50 calories per 100 grams +Nut Pecan:~50 calories per 100 grams +Onion Red:~50 calories per 100 grams +Onion Red Peeled:~50 calories per 100 grams +Onion White:~50 calories per 100 grams +Orange:~47 calories per 100 grams +Papaya:~43 calories per 100 grams +Passion Fruit:~50 calories per 100 grams +Peach:~39 calories per 100 grams +Peach 2:~39 calories per 100 grams +Peach Flat:~39 calories per 100 grams +Pear:~57 calories per 100 grams +Pear 2:~57 calories per 100 grams +Pear Abate:~57 calories per 100 grams +Pear Forelle:~57 calories per 100 grams +Pear Kaiser:~57 calories per 100 grams +Pear Monster:~57 calories per 100 grams +Pear Red:~57 calories per 100 grams +Pear Stone:~57 calories per 100 grams +Pear Williams:~57 calories per 100 grams +Pepino:~42 calories per 100 grams +Pepper Green:~50 calories per 100 grams +Pepper Orange:~50 calories per 100 grams +Pepper Red:~50 calories per 100 grams +Pepper Yellow:~50 calories per 100 grams +Physalis:~53 calories per 100 grams +Physalis with Husk:~53 calories per 100 grams +Pineapple:~50 calories per 100 grams +Pineapple Mini:~50 calories per 100 grams +Pitahaya Red:~50 calories per 100 grams +Plum:~46 calories per 100 grams +Plum 2:~46 calories per 100 grams +Plum 3:~46 calories per 100 grams +Pomegranate:~83 calories per 100 grams +Pomelo Sweetie:~50 calories per 100 grams +Potato Red:~50 calories per 100 grams +Potato Red Washed:~50 calories per 100 grams +Potato Sweet:~50 calories per 100 grams +Potato White:~50 calories per 100 grams +Quince:~57 calories per 100 grams +Rambutan:~68 calories per 100 grams +Raspberry:~52 calories per 100 grams +Redcurrant:~56 calories per 100 grams +Salak:~82 calories per 100 grams +Strawberry:~32 calories per 100 grams +Strawberry Wedge:~32 calories per 100 grams +Tamarillo:~31 calories per 100 grams +Tangelo:~53 calories per 100 grams +Tomato 1:~18 calories per 100 grams +Tomato 2:~18 calories per 100 grams +Tomato 3:~18 calories per 100 grams +Tomato 4:~18 calories per 100 grams +Tomato Cherry Red:~18 calories per 100 grams +Tomato Heart:~18 calories per 100 grams +Tomato Maroon:~18 calories per 100 grams +Tomato not Ripened:~18 calories per 100 grams +Tomato Yellow:~18 calories per 100 grams +Walnut:~654 calories per 100 grams +Watermelon:~30 calories per 100 grams""" + +calories = cal_values.splitlines() + +# Read image data from stdin +image_data = sys.stdin.buffer.read() + +# Load image using PIL +image = Image.open(io.BytesIO(image_data)) + +# Resize image to (224, 224) +image = image.resize((224, 224)) + +# Convert image to numpy array +image_array = np.array(image) / 255.0 # Normalize image data + +# Add batch dimension +image_array = np.expand_dims(image_array, axis=0) + +# Perform prediction +prediction_result = model.predict(image_array).argmax() + +# Output prediction result +print(prediction_result, calories[prediction_result]) diff --git a/model/mealPlan.js b/model/mealPlan.js new file mode 100644 index 0000000..ca68950 --- /dev/null +++ b/model/mealPlan.js @@ -0,0 +1,100 @@ +const supabase = require('../dbConnection.js'); +let { getUserRecipes } = require('../model/getUserRecipes.js'); + + +async function add(userId, recipe_json, meal_type) { + try { + let { data, error } = await supabase + .from('meal_plan') + .insert({ user_id: userId, recipes: recipe_json, meal_type: meal_type }) + .select() + return data + } catch (error) { + console.log(error); + throw error; + } +} + +async function saveMealRelation(user_id, plan, savedDataId) { + try { + let recipes = await getUserRecipes(plan); + insert_object = []; + for (let i = 0; i < plan.length; i++) { + insert_object.push({ + mealplan_id: savedDataId, + recipe_id: plan[i], + user_id: user_id, + cuisine_id: recipes[i].cuisine_id, + cooking_method_id: recipes[i].cooking_method_id + }); + } + let { data, error } = await supabase + .from("recipe_meal") + .insert(insert_object) + .select(); + return data; + } catch (error) { + throw error; + } +} + +async function get(user_id) { + query = 'recipe_name,...cuisine_id(cuisine:name),total_servings,' + + '...cooking_method_id(cooking_method:name),' + + 'preparation_time,calories,fat,carbohydrates,protein,fiber,' + + 'vitamin_a,vitamin_b,vitamin_c,vitamin_d,sodium,sugar,allergy,dislike' + try { + let { data, error } = await supabase + .from('recipe_meal') + .select('...mealplan_id(id,meal_type),recipe_id,...recipe_id(' + query + ')') + .eq('user_id', user_id) + if (error) throw error; + + if (!data || !data.length) return null; + + let output = []; + let added = []; + for (let i = 0; i < data.length; i++) { + if (added.includes(data[i]['id'])) { + for (let j = 0; j < output.length; j++) { + if (output[j]['id'] == data[i]['id']) { + delete data[i]['id'] + delete data[i]['meal_type'] + output[j]['recipes'].push(data[i]) + } + } + } + else { + let mealplan = {} + mealplan['recipes'] = []; + mealplan['id'] = data[i]['id'] + mealplan['meal_type'] = data[i]['meal_type'] + added.push(data[i]['id']) + delete data[i]['id'] + delete data[i]['meal_type'] + mealplan['recipes'].push(data[i]) + output.push(mealplan) + } + } + return output; + + } catch (error) { + console.log(error); + throw error; + } +} +async function deletePlan(id, user_id) { + try { + let { data, error } = await supabase + .from('meal_plan') + .delete() + .eq('user_id', user_id) + .eq('id', id); + return data; + } catch (error) { + console.log(error); + throw error; + } +} + +module.exports = { add, get, deletePlan, saveMealRelation }; \ No newline at end of file diff --git a/model/recipeImageClassification.py b/model/recipeImageClassification.py new file mode 100644 index 0000000..aeb60da --- /dev/null +++ b/model/recipeImageClassification.py @@ -0,0 +1,494 @@ +import os +import sys +import json +import numpy as np +import traceback +import time +from PIL import Image, UnidentifiedImageError, ImageStat +import glob +import shutil +import random + +def debug_log(message): + try: + with open("python_debug.log", "a") as f: + f.write(f"{time.strftime('%Y-%m-%d %H:%M:%S')} - {message}\n") + except Exception as e: + sys.stderr.write(f"Could not write to debug log: {str(e)}\n") + +def handle_error(error_message, exit_code=1): + sys.stderr.write(f"ERROR: {error_message}\n") + try: + debug_log(f"ERROR: {error_message}") + except: + pass # If debug logging fails, just continue + sys.exit(exit_code) + +DISH_OVERRIDES = { + "chilli": "chili_con_carne", + "chili": "chili_con_carne", + "spag": "spaghetti_bolognese", + "bolognese": "spaghetti_bolognese", + "spaghetti": "spaghetti_bolognese", + "carbonara": "spaghetti_carbonara", + "lasagna": "lasagne", + "lasagne": "lasagne", + "curry": "chicken_curry", + "risotto": "mushroom_risotto", + "stir_fry": "stir_fried_vegetables", + "stirfry": "stir_fried_vegetables", + "steak": "steak", + "mac": "macaroni_cheese", + "macaroni": "macaroni_cheese", + "pizza": "pizza", + "burger": "hamburger", + "hamburger": "hamburger", + "salad": "greek_salad", + "cake": "chocolate_cake", + "soup": "miso_soup", + "cupcake": "cup_cakes", + "pasta": "spaghetti_bolognese", + "bread": "garlic_bread", + "bruschetta": "bruschetta", + "fish": "mussels", + "fried": "french_fries", + "rice": "fried_rice", + "tart": "apple_pie", + "pie": "apple_pie", + "icecream": "ice_cream", + "ice cream": "ice_cream", + # Add more food types + "sushi": "mussels", + "roll": "mussels", + "maki": "mussels", + "chicken": "chicken_wings", + "potato": "french_fries", + "wing": "chicken_wings", + "beef": "steak", + "pork": "baby_back_ribs", + "chocolate": "chocolate_cake", + "noodle": "ramen", + "dumpling": "dumplings", + "taco": "nachos", + "burrito": "nachos", + "cheese": "macaroni_cheese", + "egg": "eggs_benedict", + "yogurt": "frozen_yogurt", + "yoghurt": "frozen_yogurt" +} + +class_mapping = { + 0: 'apple_pie', + 1: 'baby_back_ribs', + 2: 'beef_tartare', + 3: 'beignets', + 4: 'bruschetta', + 5: 'caesar_salad', + 6: 'cannoli', + 7: 'caprese_salad', + 8: 'carrot_cake', + 9: 'chicken_curry', + 10: 'chicken_quesadilla', + 11: 'chicken_wings', + 12: 'chocolate_cake', + 13: 'creme_brulee', + 14: 'cup_cakes', + 15: 'deviled_eggs', + 16: 'donuts', + 17: 'dumplings', + 18: 'edamame', + 19: 'eggs_benedict', + 20: 'french_fries', + 21: 'fried_rice', + 22: 'frozen_yogurt', + 23: 'garlic_bread', + 24: 'greek_salad', + 25: 'grilled_cheese_sandwich', + 26: 'hamburger', + 27: 'ice_cream', + 28: 'lasagne', + 29: 'macaroni_cheese', + 30: 'macarons', + 31: 'miso_soup', + 32: 'mussels', + 33: 'nachos', + 34: 'omelette', + 35: 'onion_rings', + 36: 'oysters', + 37: 'pizza', + 38: 'ramen', + 39: 'spaghetti_bolognese', + 40: 'spaghetti_carbonara', + 41: 'steak', + 42: 'strawberry_shortcake', + 43: 'sushi' +} + +custom_food_types = { + 'sushi': 'sushi', + 'bento': 'mussels', + 'japanese': 'edamame' +} + +# Improved color to food mapping - more specific and accurate categories +color_to_food = { + # Primarily red foods + 'red': ['chicken_curry', 'pizza', 'steak', 'baby_back_ribs'], + + # Green-dominant foods (salads, vegetables) + 'green': ['caesar_salad', 'caprese_salad', 'greek_salad', 'edamame'], + + # Yellow/beige foods (pastries, fried foods) + 'yellow': ['apple_pie', 'french_fries', 'fried_rice'], + + # Brown foods (pasta, bread, chocolate) + 'brown': ['lasagne', 'spaghetti_bolognese', 'spaghetti_carbonara', 'chocolate_cake'], + + # Light-colored foods (dairy, light desserts) + 'white': ['cup_cakes', 'frozen_yogurt', 'ice_cream', 'macarons', 'edamame'], + + # Beige/tan foods (bread, pastries) + 'beige': ['bruschetta', 'garlic_bread', 'beignets', 'grilled_cheese_sandwich'], + + # Dark/mixed foods (soups, stews) + 'dark': ['miso_soup', 'ramen', 'beef_tartare'], + + # Orange-ish foods + 'orange': ['carrot_cake', 'chicken_wings', 'hamburger'] +} + +food_categories = { + 'salad': ['caesar_salad', 'caprese_salad', 'greek_salad'], + 'pasta': ['lasagne', 'spaghetti_bolognese', 'spaghetti_carbonara', 'macaroni_cheese'], + 'dessert': ['apple_pie', 'chocolate_cake', 'cup_cakes', 'ice_cream', 'frozen_yogurt', 'strawberry_shortcake', 'macarons'], + 'bread': ['garlic_bread', 'bruschetta', 'grilled_cheese_sandwich'], + 'meat': ['steak', 'baby_back_ribs', 'hamburger', 'beef_tartare', 'chicken_wings', 'chicken_curry', 'chicken_quesadilla'], + 'soup': ['miso_soup', 'ramen'], + 'seafood': ['mussels', 'oysters'], + 'rice': ['fried_rice'], + 'fried': ['french_fries', 'onion_rings'], + 'asian': ['ramen', 'dumplings', 'fried_rice', 'miso_soup', 'edamame'], + 'mexican': ['nachos', 'chicken_quesadilla'], + 'egg': ['omelette', 'eggs_benedict', 'deviled_eggs'], + 'sandwich': ['hamburger', 'grilled_cheese_sandwich'], + 'japanese': ['mussels', 'ramen', 'miso_soup', 'sushi'] +} + +food_to_color = {} +for color, foods in color_to_food.items(): + for food in foods: + food_to_color[food] = color + +try: + RESIZE_FILTER = Image.LANCZOS +except AttributeError: + try: + RESIZE_FILTER = Image.ANTIALIAS + except AttributeError: + try: + RESIZE_FILTER = Image.Resampling.LANCZOS # For newer Pillow versions + except AttributeError: + # Last resort fallback + RESIZE_FILTER = Image.NEAREST + +def is_valid_image(image_path): + """Check if the file is a valid image.""" + try: + with open(image_path, 'rb') as f: + header = f.read(12) + if header.startswith(b'\xff\xd8\xff'): + return True + if header.startswith(b'\x89PNG\r\n\x1a\n'): + return True + return False + except Exception: + return False + +def preprocess_image(image_path, target_size=(224, 224)): + """Preprocess an image for analysis.""" + try: + debug_log(f"Attempting to preprocess image: {image_path}") + + if not is_valid_image(image_path): + debug_log(f"File does not appear to be a valid JPG/PNG: {image_path}") + return None + + try: + img = Image.open(image_path) + + if img.mode != "RGB": + img = img.convert("RGB") + + img = img.resize(target_size, RESIZE_FILTER) + + img_array = np.array(img) + + return img_array + + except UnidentifiedImageError: + debug_log(f"Invalid image format: {image_path}") + return None + + except Exception as e: + debug_log(f"Error preprocessing image: {str(e)}") + return None + + except Exception as e: + debug_log(f"Unexpected error in preprocess_image: {str(e)}") + return None + +def extract_filename_hints(filename): + """Extract hints from filename about what food it might contain.""" + if not filename: + return None + + filename = filename.lower() + + filename = os.path.splitext(filename)[0] + + for key, value in custom_food_types.items(): + if key in filename: + debug_log(f"Found custom food keyword '{key}' in filename '{filename}'") + return value + + for key, value in DISH_OVERRIDES.items(): + if key in filename: + debug_log(f"Found keyword '{key}' in filename '{filename}'") + return value + + return None + +def get_color_name(r, g, b): + """Get the name of a color from its RGB values.""" + if r > 200 and g < 100 and b < 100: + return 'red' + elif r < 100 and g > 150 and b < 100: + return 'green' + elif r > 200 and g > 200 and b < 100: + return 'yellow' + elif r > 150 and g > 100 and b < 100: + return 'orange' + elif r < 100 and g < 100 and b > 150: + return 'blue' + elif r > 200 and g > 200 and b > 200: + return 'white' + elif r < 50 and g < 50 and b < 50: + return 'black' + elif r > 100 and g > 50 and b < 50: + return 'brown' + elif r > 150 and g > 100 and b > 100 and abs(r - g) < 50 and abs(r - b) < 50: + return 'beige' + elif r < 100 and g < 100 and b < 100: + return 'dark' + else: + return 'beige' + +def analyze_image_color(image_path): + """Analyze the dominant colors in an image.""" + try: + with Image.open(image_path) as img: + if img.mode != "RGB": + img = img.convert("RGB") + + img = img.resize((100, 100), RESIZE_FILTER) + + stat = ImageStat.Stat(img) + r_mean = stat.mean[0] + g_mean = stat.mean[1] + b_mean = stat.mean[2] + + dominant_color = get_color_name(r_mean, g_mean, b_mean) + + return dominant_color + except Exception as e: + debug_log(f"Error in color analysis: {str(e)}") + return 'beige' # Default to most common food color + +def analyze_image_texture(image_path): + """Analyze the texture complexity of an image.""" + try: + with Image.open(image_path) as img: + if img.mode != "L": + img = img.convert("L") + + img = img.resize((100, 100), RESIZE_FILTER) + + img_array = np.array(img) + + grad_x = np.gradient(img_array, axis=0) + grad_y = np.gradient(img_array, axis=1) + + grad_mag = np.sqrt(grad_x**2 + grad_y**2) + + avg_grad = np.mean(grad_mag) + + if avg_grad < 5: + return 'smooth' # Smooth texture (ice cream, soup) + elif avg_grad < 15: + return 'medium' # Medium texture (pasta, rice) + else: + return 'complex' # Complex texture (salad, stir fry) + except Exception as e: + debug_log(f"Error in texture analysis: {str(e)}") + return 'medium' # Default to medium texture + +def find_image_file(): + """Find the most recent image file in the uploads directory.""" + debug_log("Looking for image files...") + + if not os.path.exists('uploads'): + os.makedirs('uploads') + debug_log("Created uploads directory") + + if os.path.exists('uploads/image.jpg'): + if is_valid_image('uploads/image.jpg'): + debug_log("Found valid image.jpg in uploads directory") + return 'uploads/image.jpg' + else: + debug_log("Found image.jpg but it's not a valid image file") + + try: + uploaded_files = glob.glob('uploads/*.*') + debug_log(f"Files in uploads directory: {uploaded_files}") + + if not uploaded_files: + handle_error("No files found in uploads directory") + + image_files = [f for f in uploaded_files if f.lower().endswith(('.jpg', '.jpeg', '.png')) and is_valid_image(f)] + debug_log(f"Image files found: {image_files}") + + if not image_files: + handle_error("No valid image files found in uploads directory") + + latest_file = max(image_files, key=os.path.getmtime) + debug_log(f"Selected most recent image file: {latest_file}") + + return latest_file + + except Exception as e: + handle_error(f"Error finding image file: {str(e)}") + +def predict_class(image_path=None): + """Predict food class from image.""" + debug_log("Starting prediction process") + + if not image_path: + image_path = find_image_file() + debug_log(f"Using image file: {image_path}") + + try: + if not os.path.exists(image_path): + handle_error(f"Cannot open image file: {image_path} (file does not exist)") + + file_name = os.path.basename(image_path) + debug_log(f"Analyzing file: {file_name}") + + if "sushi" in file_name.lower(): + debug_log(f"Detected sushi in filename: {file_name}") + return "sushi" # Return sushi as match for sushi + + filename_hint = None + + if os.path.exists('uploads/original_filename.txt'): + try: + with open('uploads/original_filename.txt', 'r') as f: + original_filename = f.read().strip() + if "sushi" in original_filename.lower(): + debug_log(f"Detected sushi in original filename: {original_filename}") + return "sushi" # Return sushi as match for sushi + + filename_hint = extract_filename_hints(original_filename) + debug_log(f"Filename hint from original_filename.txt: {original_filename} -> {filename_hint}") + except Exception as e: + debug_log(f"Error reading original_filename.txt: {str(e)}") + + if not filename_hint: + filename_hint = extract_filename_hints(file_name) + debug_log(f"Filename hint from file name: {file_name} -> {filename_hint}") + + if filename_hint: + debug_log(f"Using filename hint for prediction: {filename_hint}") + return filename_hint + + debug_log("Using image analysis for prediction (no model)") + + dominant_color = analyze_image_color(image_path) + debug_log(f"Dominant color detected: {dominant_color}") + + texture_type = analyze_image_texture(image_path) + debug_log(f"Texture type detected: {texture_type}") + + if any(japan_term in file_name.lower() for japan_term in ["japan", "japanese", "nihon", "nippon", "tokyo"]): + debug_log(f"Japanese food context detected in filename: {file_name}") + prediction = random.choice(food_categories['japanese']) + return prediction + + prediction = None + + if dominant_color == 'green' and texture_type == 'complex': + prediction = random.choice(food_categories['salad']) + debug_log(f"Green + complex texture detected: classified as {prediction}") + + elif dominant_color == 'beige' and texture_type in ['regular', 'medium']: + prediction = random.choice(food_categories['bread']) + debug_log(f"Beige + regular texture detected: classified as {prediction}") + + elif dominant_color == 'dark' and texture_type == 'smooth': + prediction = random.choice(food_categories['soup']) + debug_log(f"Dark + smooth texture detected: classified as {prediction}") + + elif dominant_color in ['brown', 'beige'] and texture_type == 'medium': + prediction = random.choice(food_categories['pasta']) + debug_log(f"Brown/beige + medium texture detected: classified as {prediction}") + + elif dominant_color == 'white' and texture_type == 'smooth': + prediction = random.choice(['ice_cream', 'frozen_yogurt']) + debug_log(f"White + smooth texture detected: classified as {prediction}") + + elif dominant_color == 'red' and texture_type in ['medium', 'complex']: + prediction = random.choice(['steak', 'baby_back_ribs', 'chicken_curry']) + debug_log(f"Red + medium/complex texture detected: classified as {prediction}") + + elif dominant_color in ['white', 'beige'] and texture_type == 'complex': + prediction = 'sushi' # Best substitute for sushi + debug_log(f"White/beige + complex texture detected: possible sushi, classified as {prediction}") + + if not prediction and dominant_color in color_to_food: + food_options = color_to_food[dominant_color] + prediction = random.choice(food_options) + debug_log(f"Selected {prediction} from {dominant_color} foods based on color only") + + if prediction: + return prediction + + categories = list(food_categories.keys()) + random_category = random.choice(categories) + fallback_prediction = random.choice(food_categories[random_category]) + debug_log(f"Using random category ({random_category}) fallback prediction: {fallback_prediction}") + return fallback_prediction + + except Exception as e: + debug_log(f"Error during prediction: {str(e)}") + traceback.print_exc() + handle_error(f"Error during prediction: {str(e)}") + +if __name__ == "__main__": + try: + with open("python_debug.log", "w") as f: + f.write(f"{time.strftime('%Y-%m-%d %H:%M:%S')} - Starting script\n") + + if len(sys.argv) > 1: + image_path = sys.argv[1] + debug_log(f"Using image path from command line: {image_path}") + prediction = predict_class(image_path) + else: + debug_log("No command line argument provided, searching for images in uploads directory") + prediction = predict_class() + + print(prediction) + debug_log(f"Script completed successfully with prediction: {prediction}") + sys.exit(0) + except Exception as e: + traceback.print_exc() + debug_log(f"Unexpected error: {str(e)}") + handle_error(f"Unexpected error: {str(e)}") \ No newline at end of file diff --git a/model/updateUserPassword.js b/model/updateUserPassword.js new file mode 100644 index 0000000..ad3cea7 --- /dev/null +++ b/model/updateUserPassword.js @@ -0,0 +1,17 @@ +const supabase = require('../dbConnection.js'); + +async function updateUser(user_id, password) { + + try { + let { data, error } = await supabase + .from('users') + .update({ password: password }) + .eq('user_id', user_id) + .select('user_id,password') + return data + } catch (error) { + throw error; + } +} + +module.exports = updateUser; \ No newline at end of file diff --git a/model/updateUserPreferences.js b/model/updateUserPreferences.js new file mode 100644 index 0000000..662a178 --- /dev/null +++ b/model/updateUserPreferences.js @@ -0,0 +1,90 @@ +const supabase = require("../dbConnection.js"); + +async function updateUserPreferences(userId, body) { + try { + if (!body.dietary_requirements || !body.allergies || !body.cuisines || !body.dislikes || !body.health_conditions || !body.spice_levels || !body.cooking_methods) { + throw "Missing required fields"; + } + + const {error: drError} = await supabase + .from("user_dietary_requirements") + .delete() + .eq("user_id", userId); + if (drError) throw drError; + + const {error: aError} = await supabase + .from("user_allergies") + .delete() + .eq("user_id", userId); + if (aError) throw aError; + + const {error: cError} = await supabase + .from("user_cuisines") + .delete() + .eq("user_id", userId); + if (cError) throw cError; + + const {error: dError} = await supabase + .from("user_dislikes") + .delete() + .eq("user_id", userId); + if (dError) throw dError; + + const {error: hError} = await supabase + .from("user_health_conditions") + .delete() + .eq("user_id", userId); + if (hError) throw hError; + + const {error: sError} = await supabase + .from("user_spice_levels") + .delete() + .eq("user_id", userId); + if (sError) throw sError; + + const {error: cmError} = await supabase + .from("user_cooking_methods") + .delete() + .eq("user_id", userId); + if (cmError) throw cmError; + + const {error: driError} = await supabase + .from("user_dietary_requirements") + .insert(body.dietary_requirements.map((id) => ({user_id: userId, dietary_requirement_id: id}))); + if (driError) throw driError; + + const {error: aiError} = await supabase + .from("user_allergies") + .insert(body.allergies.map((id) => ({user_id: userId, allergy_id: id}))); + if (aiError) throw aiError; + + const {error: ciError} = await supabase + .from("user_cuisines") + .insert(body.cuisines.map((id) => ({user_id: userId, cuisine_id: id}))); + if (ciError) throw ciError; + + const {error: diError} = await supabase + .from("user_dislikes") + .insert(body.dislikes.map((id) => ({user_id: userId, dislike_id: id}))); + if (diError) throw diError; + + const {error: hiError} = await supabase + .from("user_health_conditions") + .insert(body.health_conditions.map((id) => ({user_id: userId, health_condition_id: id}))); + if (hiError) throw hiError; + + const {error: siError} = await supabase + .from("user_spice_levels") + .insert(body.spice_levels.map((id) => ({user_id: userId, spice_level_id: id}))); + if (siError) throw siError; + + const {error: cmiError} = await supabase + .from("user_cooking_methods") + .insert(body.cooking_methods.map((id) => ({user_id: userId, cooking_method_id: id}))); + if (cmiError) throw cmiError; + } catch (error) { + throw error; + } +} + +module.exports = updateUserPreferences; \ No newline at end of file diff --git a/model/updateUserProfile.js b/model/updateUserProfile.js new file mode 100644 index 0000000..5ab3383 --- /dev/null +++ b/model/updateUserProfile.js @@ -0,0 +1,77 @@ +const supabase = require("../dbConnection.js"); +const { decode } = require("base64-arraybuffer"); + +async function updateUser( + name, + first_name, + last_name, + email, + contact_number, + address +) { + let attributes = {}; + attributes["name"] = name || undefined; + attributes["first_name"] = first_name || undefined; + attributes["last_name"] = last_name || undefined; + attributes["email"] = email || undefined; + attributes["contact_number"] = contact_number || undefined; + attributes["address"] = address || undefined; + + try { + let { data, error } = await supabase + .from("users") + .update(attributes) // e.g { email: "sample@email.com" } + .eq("email", email) + .select( + "user_id,name,first_name,last_name,email,contact_number,mfa_enabled,address" + ); + return data; + } catch (error) { + throw error; + } +} +async function saveImage(image, user_id) { + let file_name = `users/${user_id}.png`; + if (image === undefined || image === null) return null; + + try { + await supabase.storage.from("images").upload(file_name, decode(image), { + cacheControl: "3600", + upsert: false, + }); + const test = { + file_name: file_name, + display_name: file_name, + file_size: base64FileSize(image), + }; + let { data: image_data } = await supabase + .from("images") + .insert(test) + .select("*"); + + await supabase + .from("users") + .update({ image_id: image_data[0].id }) // e.g { email: "sample@email.com" } + .eq("user_id", user_id); + + return `${process.env.SUPABASE_STORAGE_URL}${file_name}`; + } catch (error) { + throw error; + } +} + +function base64FileSize(base64String) { + let base64Data = base64String.split(",")[1] || base64String; + + let sizeInBytes = (base64Data.length * 3) / 4; + + if (base64Data.endsWith("==")) { + sizeInBytes -= 2; + } else if (base64Data.endsWith("=")) { + sizeInBytes -= 1; + } + + return sizeInBytes; +} + +module.exports = { updateUser, saveImage }; diff --git a/new_utils/sendVerificationEmail.js b/new_utils/sendVerificationEmail.js new file mode 100644 index 0000000..5637df1 --- /dev/null +++ b/new_utils/sendVerificationEmail.js @@ -0,0 +1,88 @@ +// new_utils/sendVerificationEmail.js +const { createClient } = require('@supabase/supabase-js'); +const crypto = require('crypto'); +const sgMail = require('@sendgrid/mail'); + +// env keys (must exist in .env) +const SUPABASE_URL = process.env.SUPABASE_URL; +const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY; +const SENDGRID_KEY = process.env.SENDGRID_KEY; +const SENDGRID_FROM = process.env.SENDGRID_FROM || 'no-reply@example.com'; +const DEV_VERIFY_BASE = process.env.DEV_VERIFY_BASE || `http://localhost:${process.env.PORT || 80}/api`; + +// prepare token + expiry +const token = crypto.randomBytes(16).toString('hex'); +const expiresAtISO = new Date(Date.now() + 24*60*60*1000).toISOString(); // 24h + +// validate minimal env +if (!SUPABASE_URL) console.warn('WARN: SUPABASE_URL missing in .env'); +if (!SUPABASE_SERVICE_ROLE_KEY) console.warn('WARN: SUPABASE_SERVICE_ROLE_KEY missing — DB inserts that bypass RLS will fail'); + +// create admin client (server-side only) +const supabaseAdmin = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY || '', { + auth: { persistSession: false } +}); + +// helper to generate token +function genToken(len = 24) { + return crypto.randomBytes(Math.ceil(len / 2)).toString('hex').slice(0, len); +} + +/** + * sendVerificationEmail(email) + * - inserts token into email_verification_tokens using admin client + * - sends email using SendGrid if API key present + * - returns { verifyUrl, token, insertData, sendgridResponse? } + */ +module.exports = async function sendVerificationEmail(email) { + if (!email) throw new Error('email required'); + + // create token + expiry (24h) + const token = genToken(32); + const expiresAtISO = new Date(Date.now() + 1000 * 60 * 60 * 24).toISOString(); + const createdAtISO = new Date().toISOString(); + + // insert token row using admin client (bypass RLS) + try { + const { data: insertData, error: supabaseError } = await supabaseAdmin + .from('email_verification_tokens') + .insert([{ + user_email: email, + token: token, + expires_at: expiresAtISO, + created_at: createdAtISO + }]); + + if (supabaseError) { + console.error('Supabase insert failed', supabaseError); + throw new Error('Supabase insert failed'); +} + + // build verification URL for dev/demo + const verifyUrl = `${DEV_VERIFY_BASE.replace(/\/$/, '')}/verify-email/${token}`; + + // optionally send via SendGrid + let sendgridResponse = undefined; + if (SENDGRID_KEY) { + sgMail.setApiKey(SENDGRID_KEY); + const msg = { + to: email, + from: SENDGRID_FROM, + subject: 'NutriHelp — Verify your email', + text: `Please verify your email by visiting ${verifyUrl}`, + html: `

Click to verify: ${verifyUrl}

` + }; + // send and capture response + sendgridResponse = await sgMail.send(msg); + } else { + // Log dev link - visible in console for demo + console.log('DEV verification link:', verifyUrl); + } + + return { verifyUrl, token, insertData, sendgridResponse }; + } catch (err) { + // bubble up rich error for upper-level controller + console.error('sendVerificationEmail error:', err.supabaseError || err); + throw err; + } +}; diff --git a/new_utils/supabaseAdmin.js b/new_utils/supabaseAdmin.js new file mode 100644 index 0000000..d7549da --- /dev/null +++ b/new_utils/supabaseAdmin.js @@ -0,0 +1,16 @@ +// new_utils/supabaseAdmin.js +const { createClient } = require('@supabase/supabase-js'); + +const SUPABASE_URL = process.env.SUPABASE_URL; +const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY; + +if (!SUPABASE_URL || !SUPABASE_SERVICE_ROLE_KEY) { + throw new Error('Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY in environment'); +} + +// admin client - server side only +const supabaseAdmin = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, { + auth: { persistSession: false } +}); + +module.exports = supabaseAdmin; diff --git a/package-lock.json b/package-lock.json index 4f1b8a3..74975f2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,26 +9,155 @@ "version": "1.0.0", "license": "ISC", "dependencies": { - "@supabase/supabase-js": "^2.40.0", + "@sendgrid/mail": "^8.1.5", + "@supabase/supabase-js": "^2.56.0", + "base64-arraybuffer": "^1.0.2", "bcryptjs": "^2.4.3", - "dotenv": "^16.4.5", + "chai": "^4.5.0", + "chai-http": "^4.4.0", + "cors": "^2.8.5", + "dotenv": "^16.6.1", "express": "^4.19.1", + "express-rate-limit": "^7.5.0", + "express-validator": "^7.2.1", + "helmet": "^8.1.0", "jsonwebtoken": "^9.0.2", - "mysql2": "^3.9.2" + "mocha": "^10.7.0", + "multer": "^1.4.5-lts.1", + "mysql2": "^3.9.2", + "node-fetch": "2.7.0", + "nutrihelp-api": "file:", + "sinon": "^18.0.0", + "swagger-ui-express": "^5.0.0", + "yamljs": "^0.3.0" + }, + "devDependencies": { + "axios": "^1.8.4", + "concurrently": "^8.2.2", + "form-data": "^4.0.2", + "proxyquire": "^2.1.3" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.2.tgz", + "integrity": "sha512-KHp2IflsnGywDjBWDkR9iEqiWSpc8GIi0lgTT3mOElT0PP1tG26P4tmFI2YvAdzgq9RGyoHZQEIEdZy6Ec5xCA==", + "dev": true, + "engines": { + "node": ">=6.9.0" } }, - "node_modules/@supabase/functions-js": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.1.5.tgz", - "integrity": "sha512-BNzC5XhCzzCaggJ8s53DP+WeHHGT/NfTsx2wUSSGKR2/ikLFQTBCDzMvGz/PxYMqRko/LwncQtKXGOYp1PkPaw==", + "node_modules/@noble/hashes": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", + "engines": { + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@paralleldrive/cuid2": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", + "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", + "dependencies": { + "@noble/hashes": "^1.1.5" + } + }, + "node_modules/@scarf/scarf": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz", + "integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==", + "hasInstallScript": true + }, + "node_modules/@sendgrid/client": { + "version": "8.1.5", + "resolved": "https://registry.npmjs.org/@sendgrid/client/-/client-8.1.5.tgz", + "integrity": "sha512-Jqt8aAuGIpWGa15ZorTWI46q9gbaIdQFA21HIPQQl60rCjzAko75l3D1z7EyjFrNr4MfQ0StusivWh8Rjh10Cg==", + "dependencies": { + "@sendgrid/helpers": "^8.0.0", + "axios": "^1.8.2" + }, + "engines": { + "node": ">=12.*" + } + }, + "node_modules/@sendgrid/helpers": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@sendgrid/helpers/-/helpers-8.0.0.tgz", + "integrity": "sha512-Ze7WuW2Xzy5GT5WRx+yEv89fsg/pgy3T1E3FS0QEx0/VvRmigMZ5qyVGhJz4SxomegDkzXv/i0aFPpHKN8qdAA==", + "dependencies": { + "deepmerge": "^4.2.2" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/@sendgrid/mail": { + "version": "8.1.5", + "resolved": "https://registry.npmjs.org/@sendgrid/mail/-/mail-8.1.5.tgz", + "integrity": "sha512-W+YuMnkVs4+HA/bgfto4VHKcPKLc7NiZ50/NH2pzO6UHCCFuq8/GNB98YJlLEr/ESDyzAaDr7lVE7hoBwFTT3Q==", + "dependencies": { + "@sendgrid/client": "^8.1.5", + "@sendgrid/helpers": "^8.0.0" + }, + "engines": { + "node": ">=12.*" + } + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/commons/node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "engines": { + "node": ">=4" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "11.2.2", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-11.2.2.tgz", + "integrity": "sha512-G2piCSxQ7oWOxwGSAyFHfPIsyeJGXYtc6mFbnFA+kRXkiEnTl8c/8jul2S329iFBnDI9HGoeWWAZvuvOkZccgw==", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@sinonjs/samsam": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.3.tgz", + "integrity": "sha512-hw6HbX+GyVZzmaYNh82Ecj1vdGZrqVIn/keDTg63IgAwiQPO+xCz99uG6Woqgb4tM0mUiFENKZ4cqd7IX94AXQ==", + "dependencies": { + "@sinonjs/commons": "^3.0.1", + "type-detect": "^4.1.0" + } + }, + "node_modules/@sinonjs/text-encoding": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz", + "integrity": "sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==" + }, + "node_modules/@supabase/auth-js": { + "version": "2.71.1", + "resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.71.1.tgz", + "integrity": "sha512-mMIQHBRc+SKpZFRB2qtupuzulaUhFYupNyxqDj5Jp/LyPvcWvjaJzZzObv6URtL/O6lPxkanASnotGtNpS3H2Q==", "dependencies": { "@supabase/node-fetch": "^2.6.14" } }, - "node_modules/@supabase/gotrue-js": { - "version": "2.62.2", - "resolved": "https://registry.npmjs.org/@supabase/gotrue-js/-/gotrue-js-2.62.2.tgz", - "integrity": "sha512-AP6e6W9rQXFTEJ7sTTNYQrNf0LCcnt1hUW+RIgUK+Uh3jbWvcIST7wAlYyNZiMlS9+PYyymWQ+Ykz/rOYSO0+A==", + "node_modules/@supabase/functions-js": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.4.6.tgz", + "integrity": "sha512-bhjZ7rmxAibjgmzTmQBxJU6ZIBCCJTc3Uwgvdi4FewueUTAGO5hxZT1Sj6tiD+0dSXf9XI87BDdJrg12z8Uaew==", + "license": "MIT", "dependencies": { "@supabase/node-fetch": "^2.6.14" } @@ -45,62 +174,87 @@ } }, "node_modules/@supabase/postgrest-js": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.9.2.tgz", - "integrity": "sha512-I6yHo8CC9cxhOo6DouDMy9uOfW7hjdsnCxZiaJuIVZm1dBGTFiQPgfMa9zXCamEWzNyWRjZvupAUuX+tqcl5Sw==", + "version": "1.21.4", + "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.21.4.tgz", + "integrity": "sha512-TxZCIjxk6/dP9abAi89VQbWWMBbybpGWyvmIzTd79OeravM13OjR/YEYeyUOPcM1C3QyvXkvPZhUfItvmhY1IQ==", + "license": "MIT", "dependencies": { "@supabase/node-fetch": "^2.6.14" } }, "node_modules/@supabase/realtime-js": { - "version": "2.9.3", - "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.9.3.tgz", - "integrity": "sha512-lAp50s2n3FhGJFq+wTSXLNIDPw5Y0Wxrgt44eM5nLSA3jZNUUP3Oq2Ccd1CbZdVntPCWLZvJaU//pAd2NE+QnQ==", + "version": "2.15.5", + "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.15.5.tgz", + "integrity": "sha512-/Rs5Vqu9jejRD8ZeuaWXebdkH+J7V6VySbCZ/zQM93Ta5y3mAmocjioa/nzlB6qvFmyylUgKVS1KpE212t30OA==", + "license": "MIT", "dependencies": { - "@supabase/node-fetch": "^2.6.14", - "@types/phoenix": "^1.5.4", - "@types/ws": "^8.5.10", - "ws": "^8.14.2" + "@supabase/node-fetch": "^2.6.13", + "@types/phoenix": "^1.6.6", + "@types/ws": "^8.18.1", + "ws": "^8.18.2" } }, "node_modules/@supabase/storage-js": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.5.5.tgz", - "integrity": "sha512-OpLoDRjFwClwc2cjTJZG8XviTiQH4Ik8sCiMK5v7et0MDu2QlXjCAW3ljxJB5+z/KazdMOTnySi+hysxWUPu3w==", + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.12.1.tgz", + "integrity": "sha512-QWg3HV6Db2J81VQx0PqLq0JDBn4Q8B1FYn1kYcbla8+d5WDmTdwwMr+EJAxNOSs9W4mhKMv+EYCpCrTFlTj4VQ==", + "license": "MIT", "dependencies": { "@supabase/node-fetch": "^2.6.14" } }, "node_modules/@supabase/supabase-js": { - "version": "2.40.0", - "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.40.0.tgz", - "integrity": "sha512-XF8OrsA13DYBL074sHH4M0NhXJCWhQ0R5JbVeVUytZ0coPMS9krRdzxl+0c4z4LLjqbm/Wdz0UYlTYM9MgnDag==", + "version": "2.57.4", + "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.57.4.tgz", + "integrity": "sha512-LcbTzFhHYdwfQ7TRPfol0z04rLEyHabpGYANME6wkQ/kLtKNmI+Vy+WEM8HxeOZAtByUFxoUTTLwhXmrh+CcVw==", + "license": "MIT", "dependencies": { - "@supabase/functions-js": "2.1.5", - "@supabase/gotrue-js": "2.62.2", + "@supabase/auth-js": "2.71.1", + "@supabase/functions-js": "2.4.6", "@supabase/node-fetch": "2.6.15", - "@supabase/postgrest-js": "1.9.2", - "@supabase/realtime-js": "2.9.3", - "@supabase/storage-js": "2.5.5" + "@supabase/postgrest-js": "1.21.4", + "@supabase/realtime-js": "2.15.5", + "@supabase/storage-js": "2.12.1" } }, + "node_modules/@types/chai": { + "version": "4.3.20", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.20.tgz", + "integrity": "sha512-/pC9HAB5I/xMlc5FP77qjCnI16ChlJfW0tGa0IUcFn38VJrTV6DeZ60NU5KZBtaOZqjdpwTWohz5HU1RrhiYxQ==" + }, + "node_modules/@types/cookiejar": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@types/cookiejar/-/cookiejar-2.1.5.tgz", + "integrity": "sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q==" + }, "node_modules/@types/node": { - "version": "20.11.30", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz", - "integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.1.0.tgz", + "integrity": "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w==", "dependencies": { - "undici-types": "~5.26.4" + "undici-types": "~7.8.0" } }, "node_modules/@types/phoenix": { - "version": "1.6.4", - "resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.4.tgz", - "integrity": "sha512-B34A7uot1Cv0XtaHRYDATltAdKx0BvVKNgYNqE4WjtPUa4VQJM7kxeXcVKaH+KS+kCmZ+6w+QaUdcljiheiBJA==" + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.6.tgz", + "integrity": "sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==", + "license": "MIT" + }, + "node_modules/@types/superagent": { + "version": "4.1.13", + "resolved": "https://registry.npmjs.org/@types/superagent/-/superagent-4.1.13.tgz", + "integrity": "sha512-YIGelp3ZyMiH0/A09PMAORO0EBGlF5xIKfDpK74wdYvWUs2o96b5CItJcWPdH409b7SAXIIG6p8NdU/4U2Maww==", + "dependencies": { + "@types/cookiejar": "*", + "@types/node": "*" + } }, "node_modules/@types/ws": { - "version": "8.5.10", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", - "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "license": "MIT", "dependencies": { "@types/node": "*" } @@ -117,20 +271,132 @@ "node": ">= 0.6" } }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/append-field": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", + "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "engines": { + "node": "*" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/aws-ssl-profiles": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/aws-ssl-profiles/-/aws-ssl-profiles-1.1.2.tgz", + "integrity": "sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g==", + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/axios": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", + "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/base64-arraybuffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", + "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==", + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/bcryptjs": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==" }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", @@ -140,7 +406,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -150,11 +416,65 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==" + }, "node_modules/buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -163,16 +483,25 @@ "node": ">= 0.8" } }, - "node_modules/call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", "dependencies": { - "es-define-property": "^1.0.0", "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" }, "engines": { "node": ">= 0.4" @@ -181,6 +510,215 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chai": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", + "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chai-http": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/chai-http/-/chai-http-4.4.0.tgz", + "integrity": "sha512-uswN3rZpawlRaa5NiDUHcDZ3v2dw5QgLyAwnQ2tnVNuP7CwIsOFuYJ0xR1WiR7ymD4roBnJIzOUep7w9jQMFJA==", + "dependencies": { + "@types/chai": "4", + "@types/superagent": "4.1.13", + "charset": "^1.0.1", + "cookiejar": "^2.1.4", + "is-ip": "^2.0.0", + "methods": "^1.1.2", + "qs": "^6.11.2", + "superagent": "^8.0.9" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/charset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/charset/-/charset-1.0.1.tgz", + "integrity": "sha512-6dVyOOYjpfFcL1Y4qChrAoQLRHvj2ziyhcm0QJlhOcAhykL/k1kTUPbeo+87MNRTRdk2OIIsIXbuF3x2wi5EXg==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/component-emitter": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.1.tgz", + "integrity": "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "engines": [ + "node >= 0.8" + ], + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/concurrently": { + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-8.2.2.tgz", + "integrity": "sha512-1dP4gpXFhei8IOtlXRE/T/4H88ElHgTiUzh71YUmtjTEHMSRS2Z/fgOxHSxxusGHogsRfxNq1vyAwxSC+EVyDg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.2", + "date-fns": "^2.30.0", + "lodash": "^4.17.21", + "rxjs": "^7.8.1", + "shell-quote": "^1.8.1", + "spawn-command": "0.0.2", + "supports-color": "^8.1.1", + "tree-kill": "^1.2.2", + "yargs": "^17.7.2" + }, + "bin": { + "conc": "dist/bin/concurrently.js", + "concurrently": "dist/bin/concurrently.js" + }, + "engines": { + "node": "^14.13.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/open-cli-tools/concurrently?sponsor=1" + } + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -201,9 +739,9 @@ } }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "engines": { "node": ">= 0.6" } @@ -213,6 +751,44 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, + "node_modules/cookiejar": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==" + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/date-fns": { + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", + "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.21.0" + }, + "engines": { + "node": ">=0.11" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/date-fns" + } + }, "node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -221,20 +797,42 @@ "ms": "2.0.0" } }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", "engines": { - "node": ">= 0.4" + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-eql": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" } }, "node_modules/denque": { @@ -262,10 +860,27 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/dotenv": { - "version": "16.4.5", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", - "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", "engines": { "node": ">=12" }, @@ -273,6 +888,19 @@ "url": "https://dotenvx.com" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", @@ -286,21 +914,23 @@ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "engines": { "node": ">= 0.8" } }, "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", "engines": { "node": ">= 0.4" } @@ -313,11 +943,55 @@ "node": ">= 0.4" } }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "engines": { + "node": ">=6" + } + }, "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", @@ -327,36 +1001,36 @@ } }, "node_modules/express": { - "version": "4.19.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.1.tgz", - "integrity": "sha512-K4w1/Bp7y8iSiVObmCrtq8Cs79XjJc/RU2YYkZQ7wpUu5ZyZ7MtPHkqoMz4pf+mgXfNvo2qft8D9OnrH2ABk9w==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -365,15 +1039,88 @@ }, "engines": { "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", + "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/express-validator": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/express-validator/-/express-validator-7.2.1.tgz", + "integrity": "sha512-CjNE6aakfpuwGaHQZ3m8ltCG2Qvivd7RHtVMS/6nVxOM7xVGqr4bhflsm4+N5FP5zI7Zxp+Hae+9RE+o8e3ZOQ==", + "dependencies": { + "lodash": "^4.17.21", + "validator": "~13.12.0" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/express/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/fill-keys": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/fill-keys/-/fill-keys-1.0.2.tgz", + "integrity": "sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA==", + "dev": true, + "dependencies": { + "is-object": "~1.0.1", + "merge-descriptors": "~1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" } }, "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "dependencies": { "debug": "2.6.9", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", @@ -384,6 +1131,77 @@ "node": ">= 0.8" } }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formidable": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", + "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", + "dependencies": { + "@paralleldrive/cuid2": "^2.2.2", + "dezalgo": "^1.0.4", + "once": "^1.4.0", + "qs": "^6.11.0" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -400,6 +1218,24 @@ "node": ">= 0.6" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -416,16 +1252,37 @@ "is-property": "^1.0.2" } }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "engines": { + "node": "*" + } + }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -434,32 +1291,71 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gopd": { + "node_modules/get-proto": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", "dependencies": { - "get-intrinsic": "^1.1.3" + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dependencies": { - "es-define-property": "^1.0.0" + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-proto": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "engines": { "node": ">= 0.4" }, @@ -467,10 +1363,13 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dependencies": { + "has-symbols": "^1.0.3" + }, "engines": { "node": ">= 0.4" }, @@ -489,6 +1388,22 @@ "node": ">= 0.4" } }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "bin": { + "he": "bin/he" + } + }, + "node_modules/helmet": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/helmet/-/helmet-8.1.0.tgz", + "integrity": "sha512-jOiHyAZsmnr8LqoPGmCjYAaiuWwjAPLgY8ZX2XrmHawt99/u1y6RgrZMTeoPfpUbV96HOalYgz1qzkRbw54Pmg==", + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/http-errors": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", @@ -504,28 +1419,135 @@ "node": ">= 0.8" } }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-ip": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ip/-/is-ip-2.0.0.tgz", + "integrity": "sha512-9MTn0dteHETtyUx8pxqMwg5hMBi3pvlyglJ+b79KOCca0po23337LbVV2Hl4xmMvfw++ljnO0/+5G6G+0Szh6g==", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" + "ip-regex": "^2.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "node_modules/is-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz", + "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "engines": { - "node": ">= 0.10" + "node": ">=8" } }, "node_modules/is-property": { @@ -533,6 +1555,33 @@ "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", "integrity": "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==" }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/jsonwebtoken": { "version": "9.0.2", "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", @@ -559,12 +1608,17 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, + "node_modules/just-extend": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz", + "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==" + }, "node_modules/jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.2.tgz", + "integrity": "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==", "dependencies": { - "buffer-equal-constant-time": "1.0.1", + "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } @@ -578,6 +1632,25 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, "node_modules/lodash.includes": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", @@ -613,17 +1686,62 @@ "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/long": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", - "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==" + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==" + }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dependencies": { + "get-func-name": "^2.0.1" + } }, "node_modules/lru-cache": { - "version": "8.0.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-8.0.5.tgz", - "integrity": "sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA==", + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/lru.min": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/lru.min/-/lru.min-1.1.2.tgz", + "integrity": "sha512-Nv9KddBcQSlQopmBHXSsZVY5xsdlZkdH/Iey0BlcBYggMd4two7cZnKOK9vmy3nY0O5RGH99z1PCeTpPqszUYg==", + "engines": { + "bun": ">=1.0.0", + "deno": ">=1.30.0", + "node": ">=8.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wellwelwel" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", "engines": { - "node": ">=16.14" + "node": ">= 0.4" } }, "node_modules/media-typer": { @@ -635,9 +1753,12 @@ } }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/methods": { "version": "1.1.2", @@ -677,21 +1798,158 @@ "node": ">= 0.6" } }, + "node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mocha": { + "version": "10.8.2", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.8.2.tgz", + "integrity": "sha512-VZlYo/WE8t1tstuRmqgeyBgCbJc/lEdopaa+axcKzTBJ+UIdlAB9XnmvTCAH4pwR4ElNInaedhEBmZD8iCSVEg==", + "dependencies": { + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^8.1.0", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9", + "yargs-unparser": "^2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/mocha/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/mocha/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/mocha/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/mocha/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/module-not-found-error": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/module-not-found-error/-/module-not-found-error-1.0.1.tgz", + "integrity": "sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g==", + "dev": true + }, "node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/multer": { + "version": "1.4.5-lts.2", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz", + "integrity": "sha512-VzGiVigcG9zUAoCNU+xShztrlr1auZOlurXynNvO9GiWD1/mTBbUljOKY+qMeazBqXgRnjzeEgJI/wyjJUHg9A==", + "deprecated": "Multer 1.x is impacted by a number of vulnerabilities, which have been patched in 2.x. You should upgrade to the latest 2.x version.", + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } + }, "node_modules/mysql2": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.2.tgz", - "integrity": "sha512-3Cwg/UuRkAv/wm6RhtPE5L7JlPB877vwSF6gfLAS68H+zhH+u5oa3AieqEd0D0/kC3W7qIhYbH419f7O9i/5nw==", + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.14.2.tgz", + "integrity": "sha512-YD6mZMeoypmheHT6b2BrVmQFvouEpRICuvPIREulx2OvP1xAxxeqkMQqZSTBefv0PiOBKGYFa2zQtY+gf/4eQw==", "dependencies": { + "aws-ssl-profiles": "^1.1.1", "denque": "^2.1.0", "generate-function": "^2.3.1", "iconv-lite": "^0.6.3", "long": "^5.2.1", - "lru-cache": "^8.0.0", + "lru.min": "^1.0.0", "named-placeholders": "^1.1.3", "seq-queue": "^0.0.5", "sqlstring": "^2.3.2" @@ -722,14 +1980,6 @@ "node": ">=12.0.0" } }, - "node_modules/named-placeholders/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "engines": { - "node": ">=12" - } - }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -738,10 +1988,81 @@ "node": ">= 0.6" } }, + "node_modules/nise": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/nise/-/nise-6.1.1.tgz", + "integrity": "sha512-aMSAzLVY7LyeM60gvBS423nBmIPP+Wy7St7hsb+8/fc1HmeoHJfLO8CKse4u3BtOZvQLJghYPI2i/1WZrEj5/g==", + "dependencies": { + "@sinonjs/commons": "^3.0.1", + "@sinonjs/fake-timers": "^13.0.1", + "@sinonjs/text-encoding": "^0.7.3", + "just-extend": "^6.2.0", + "path-to-regexp": "^8.1.0" + } + }, + "node_modules/nise/node_modules/@sinonjs/fake-timers": { + "version": "13.0.5", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz", + "integrity": "sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==", + "dependencies": { + "@sinonjs/commons": "^3.0.1" + } + }, + "node_modules/nise/node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "engines": { + "node": ">=16" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/nutrihelp-api": { + "resolved": "", + "link": true + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -757,6 +2078,42 @@ "node": ">= 0.8" } }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -765,10 +2122,56 @@ "node": ">= 0.8" } }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "engines": { + "node": "*" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "node_modules/proxy-addr": { "version": "2.0.7", @@ -782,12 +2185,28 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/proxyquire": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/proxyquire/-/proxyquire-2.1.3.tgz", + "integrity": "sha512-BQWfCqYM+QINd+yawJz23tbBM40VIGXOdDw3X344KcclI/gtBbdWF6SlQ4nK/bYhF9d27KYug9WzljHC6B9Ysg==", + "dev": true, + "dependencies": { + "fill-keys": "^1.0.2", + "module-not-found-error": "^1.0.1", + "resolve": "^1.11.1" + } + }, "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.1.0" }, "engines": { "node": ">=0.6" @@ -796,6 +2215,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -804,18 +2231,85 @@ "node": ">= 0.6" } }, - "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "dev": true, "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" + "tslib": "^2.1.0" } }, "node_modules/safe-buffer": { @@ -843,12 +2337,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", - "dependencies": { - "lru-cache": "^6.0.0" - }, + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "bin": { "semver": "bin/semver.js" }, @@ -856,21 +2347,10 @@ "node": ">=10" } }, - "node_modules/semver/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -890,6 +2370,14 @@ "node": ">= 0.8.0" } }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/send/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -900,50 +2388,105 @@ "resolved": "https://registry.npmjs.org/seq-queue/-/seq-queue-0.0.5.tgz", "integrity": "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==" }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dependencies": { + "randombytes": "^2.1.0" + } + }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "dependencies": { - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.18.0" + "send": "0.19.0" }, "engines": { "node": ">= 0.8.0" } }, - "node_modules/set-function-length": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", - "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/shell-quote": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", "dependencies": { - "define-data-property": "^1.1.4", "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/side-channel": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", - "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", "dependencies": { - "call-bind": "^1.0.7", + "call-bound": "^1.0.2", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -952,6 +2495,45 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/sinon": { + "version": "18.0.1", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-18.0.1.tgz", + "integrity": "sha512-a2N2TDY1uGviajJ6r4D1CyRAkzE9NNVlYOV1wX5xQDuAk0ONgzgRl0EjCQuRCPxOwp13ghsMwt9Gdldujs39qw==", + "dependencies": { + "@sinonjs/commons": "^3.0.1", + "@sinonjs/fake-timers": "11.2.2", + "@sinonjs/samsam": "^8.0.0", + "diff": "^5.2.0", + "nise": "^6.0.0", + "supports-color": "^7" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "node_modules/sinon/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/spawn-command": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/spawn-command/-/spawn-command-0.0.2.tgz", + "integrity": "sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==", + "dev": true + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + }, "node_modules/sqlstring": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.3.tgz", @@ -968,6 +2550,174 @@ "node": ">= 0.8" } }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/superagent": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.1.2.tgz", + "integrity": "sha512-6WTxW1EB6yCxV5VFOIPQruWGHqc3yI7hEmZK6h+pyk69Lk/Ut7rLUY6W/ONF2MjBuGjvmMiIpsrVJ2vjrHlslA==", + "deprecated": "Please upgrade to superagent v10.2.2+, see release notes at https://github.com/forwardemail/superagent/releases/tag/v10.2.2 - maintenance is supported by Forward Email @ https://forwardemail.net", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.4", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.1.2", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.11.0", + "semver": "^7.3.8" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/superagent/node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/superagent/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/swagger-ui-dist": { + "version": "5.27.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-5.27.0.tgz", + "integrity": "sha512-tS6LRyBhY6yAqxrfsA9IYpGWPUJOri6sclySa7TdC7XQfGLvTwDY531KLgfQwHEtQsn+sT4JlUspbeQDBVGWig==", + "dependencies": { + "@scarf/scarf": "=1.4.0" + } + }, + "node_modules/swagger-ui-express": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/swagger-ui-express/-/swagger-ui-express-5.0.1.tgz", + "integrity": "sha512-SrNU3RiBGTLLmFU8GIJdOdanJTl4TOmT27tt3bWWHppqYmAZ6IDuEuBvMU6nZq0zLEe6b/1rACXCgLZqO6ZfrA==", + "dependencies": { + "swagger-ui-dist": ">=5.0.0" + }, + "engines": { + "node": ">= v0.10.32" + }, + "peerDependencies": { + "express": ">=4.0.0 || >=5.0.0-beta" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", @@ -981,6 +2731,29 @@ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, + "node_modules/tree-kill": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", + "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", + "dev": true, + "bin": { + "tree-kill": "cli.js" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true + }, + "node_modules/type-detect": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", + "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", + "engines": { + "node": ">=4" + } + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -993,10 +2766,15 @@ "node": ">= 0.6" } }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" + }, "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz", + "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==" }, "node_modules/unpipe": { "version": "1.0.0", @@ -1006,6 +2784,11 @@ "node": ">= 0.8" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1014,6 +2797,14 @@ "node": ">= 0.4.0" } }, + "node_modules/validator": { + "version": "13.12.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.12.0.tgz", + "integrity": "sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg==", + "engines": { + "node": ">= 0.10" + } + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -1036,10 +2827,37 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/workerpool": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, "node_modules/ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "license": "MIT", "engines": { "node": ">=10.0.0" }, @@ -1056,10 +2874,142 @@ } } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yamljs": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/yamljs/-/yamljs-0.3.0.tgz", + "integrity": "sha512-C/FsVVhht4iPQYXOInoxUM/1ELSf9EsgKH34FofQOp6hwCPrW4vG4w5++TED3xRUo8gD7l0P1J1dLlDYzODsTQ==", + "dependencies": { + "argparse": "^1.0.7", + "glob": "^7.0.5" + }, + "bin": { + "json2yaml": "bin/json2yaml", + "yaml2json": "bin/yaml2json" + } + }, + "node_modules/yamljs/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/yamljs/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/yamljs/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/yamljs/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } } } } diff --git a/package.json b/package.json index 59f136a..47580ea 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,9 @@ "description": "nutrihelp-api", "main": "server.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "test:rce": "mocha ./test/costEstimationTest.js", + "test": "concurrently -k \"node server.js\" \"mocha --timeout 10000 --exit\"", + "start": "node server.js" }, "keywords": [ "NutriHelp", @@ -15,11 +17,35 @@ "author": "Gopher Industries", "license": "ISC", "dependencies": { - "@supabase/supabase-js": "^2.40.0", + "@sendgrid/mail": "^8.1.5", + "@supabase/supabase-js": "^2.56.0", + "base64-arraybuffer": "^1.0.2", "bcryptjs": "^2.4.3", - "dotenv": "^16.4.5", + "chai": "^4.5.0", + "chai-http": "^4.4.0", + "cors": "^2.8.5", + "dotenv": "^16.6.1", "express": "^4.19.1", + "express-rate-limit": "^7.5.0", + "express-validator": "^7.2.1", + "helmet": "^8.1.0", "jsonwebtoken": "^9.0.2", - "mysql2": "^3.9.2" + "mocha": "^10.7.0", + "multer": "^1.4.5-lts.1", + "mysql2": "^3.9.2", + "node-fetch": "2.7.0", + "nutrihelp-api": "file:", + "sinon": "^18.0.0", + "swagger-ui-express": "^5.0.0", + "yamljs": "^0.3.0" + }, + "devDependencies": { + "axios": "^1.8.4", + "concurrently": "^8.2.2", + "form-data": "^4.0.2", + "proxyquire": "^2.1.3" + }, + "directories": { + "test": "test" } } diff --git a/prediction_models/best_model_class.hdf5 b/prediction_models/best_model_class.hdf5 new file mode 100644 index 0000000..a2997c8 Binary files /dev/null and b/prediction_models/best_model_class.hdf5 differ diff --git a/prediction_models/model.txt b/prediction_models/model.txt new file mode 100644 index 0000000..013ab46 --- /dev/null +++ b/prediction_models/model.txt @@ -0,0 +1 @@ +model file goes here \ No newline at end of file diff --git a/rateLimiter.js b/rateLimiter.js new file mode 100644 index 0000000..02dbb05 --- /dev/null +++ b/rateLimiter.js @@ -0,0 +1,16 @@ +// rateLimiter.js +const rateLimit = require('express-rate-limit'); + +const uploadLimiter = rateLimit({ + windowMs: 10 * 60 * 1000, // 10 minutes + max: 5, // Limit to 5 uploads per 10 mins + message: { + success: false, + message: 'Too many uploads from this IP. Please try again later.', + }, + standardHeaders: true, + legacyHeaders: false, +}); + +module.exports = { uploadLimiter }; + \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..7e2b7af --- /dev/null +++ b/requirements.txt @@ -0,0 +1,6 @@ +tensorflow==2.17.0 +# keras==2.15.0 +numpy>=1.26.0 +pillow==9.5.0 +h5py>=3.10.0 +python-docx \ No newline at end of file diff --git a/routes/account.js b/routes/account.js new file mode 100644 index 0000000..9b9e28a --- /dev/null +++ b/routes/account.js @@ -0,0 +1,7 @@ +const express = require('express'); +const router = express.Router(); +const controller = require("../controller/accountController"); + +router.route('/').get(controller.getAllAccount); + +module.exports = router; \ No newline at end of file diff --git a/routes/appointment.js b/routes/appointment.js new file mode 100644 index 0000000..f8194ec --- /dev/null +++ b/routes/appointment.js @@ -0,0 +1,13 @@ +const express = require('express'); +const router = express.Router(); +const appointmentController = require('../controller/appointmentController.js'); +const { appointmentValidator } = require('../validators/appointmentValidator.js'); +const validate = require('../middleware/validateRequest.js'); + +// POST route for /api/appointments to save appointment data +router.route('/').post(appointmentValidator, validate, appointmentController.saveAppointment); + +// GET route for /api/appointments to retrieve all appointment data +router.route('/').get(appointmentController.getAppointments); + +module.exports = router; \ No newline at end of file diff --git a/routes/articles.js b/routes/articles.js new file mode 100644 index 0000000..c256dc4 --- /dev/null +++ b/routes/articles.js @@ -0,0 +1,7 @@ +const express = require('express'); +const router = express.Router(); +const { searchHealthArticles } = require('../controller/healthArticleController'); + +router.get('/', searchHealthArticles); + +module.exports = router; diff --git a/routes/auth.js b/routes/auth.js new file mode 100644 index 0000000..4b79947 --- /dev/null +++ b/routes/auth.js @@ -0,0 +1,15 @@ +const express = require('express'); +const router = express.Router(); + +const authController = require('../controller/authController'); + +// ⬅️ Existing route +router.post('/log-login-attempt', authController.logLoginAttempt); + +// ✅ New route for email verification request +router.post('/request-email-verification', authController.requestEmailVerification); + +router.get('/__ping', (req, res) => res.json({ ok: true, route: 'auth' })); +router.get('/verify-email/:token', authController.verifyEmailToken); + +module.exports = router; diff --git a/routes/chatbot.js b/routes/chatbot.js new file mode 100644 index 0000000..3ff8bfa --- /dev/null +++ b/routes/chatbot.js @@ -0,0 +1,14 @@ +const express = require('express'); +const router = express.Router(); +const chatbotController = require('../controller/chatbotController'); + +router.route('/query').post(chatbotController.getChatResponse); + +// router.route('/chat').post(chatbotController.getChatResponse); +router.route('/add_urls').post(chatbotController.addURL); +router.route('/add_pdfs').post(chatbotController.addPDF); + +router.route('/history').post(chatbotController.getChatHistory); +router.route('/history').delete(chatbotController.clearChatHistory); + +module.exports = router; diff --git a/routes/contactus.js b/routes/contactus.js index f342093..03bf3a9 100644 --- a/routes/contactus.js +++ b/routes/contactus.js @@ -1,8 +1,17 @@ const express = require("express"); -const router = express.Router(); +const router = express.Router(); const controller = require('../controller/contactusController.js'); -router.route('/').post(function(req,res) { +// Import the validation rule and middleware +const { contactusValidator } = require('../validators/contactusValidator.js'); +const validate = require('../middleware/validateRequest.js'); +const { formLimiter } = require('../middleware/rateLimiter'); // rate limiter added + +// router.route('/').post(contactusValidator, validate, (req,res) => { +// controller.contactus(req, res); +// }); +// Apply rate limiter and validation before the controller +router.post('/', formLimiter, contactusValidator, validate, (req, res) => { controller.contactus(req, res); }); diff --git a/routes/costEstimation.js b/routes/costEstimation.js new file mode 100644 index 0000000..3ed6e27 --- /dev/null +++ b/routes/costEstimation.js @@ -0,0 +1,7 @@ +const express = require('express'); +const router = express.Router(); +const estimatedCostController = require('../controller/estimatedCostController'); + +router.route('/:recipe_id').get(estimatedCostController.getCost); + +module.exports = router; diff --git a/routes/filter.js b/routes/filter.js new file mode 100644 index 0000000..1220941 --- /dev/null +++ b/routes/filter.js @@ -0,0 +1,9 @@ +const express = require('express'); +const { filterRecipes } = require('../controller/filterController'); + +const router = express.Router(); + +// Define the /filter route +router.get('/', filterRecipes); + +module.exports = router; \ No newline at end of file diff --git a/routes/fooddata.js b/routes/fooddata.js new file mode 100644 index 0000000..8ebe807 --- /dev/null +++ b/routes/fooddata.js @@ -0,0 +1,14 @@ +const express = require("express"); +const router = express.Router(); +const controller = require("../controller/foodDataController"); + + +router.route("/dietaryrequirements").get(controller.getAllDietaryRequirements); +router.route("/cuisines").get(controller.getAllCuisines); +router.route("/allergies").get(controller.getAllAllergies); +router.route("/ingredients").get(controller.getAllIngredients); +router.route("/cookingmethods").get(controller.getAllCookingMethods); +router.route("/spicelevels").get(controller.getAllSpiceLevels); +router.route("/healthconditions").get(controller.getAllHealthConditions); + +module.exports = router; \ No newline at end of file diff --git a/routes/healthNews.js b/routes/healthNews.js new file mode 100644 index 0000000..7ad2e8f --- /dev/null +++ b/routes/healthNews.js @@ -0,0 +1,250 @@ +const express = require('express'); +const router = express.Router(); +const healthNewsController = require('../controller/healthNewsController'); + +/** + * @api {get} /api/health-news Health News API + * @apiName HealthNewsAPI + * @apiGroup Health News + * @apiDescription Comprehensive API for health news management with flexible filtering + * + * @apiParam {String} [action] Action to perform (optional - the API will auto-detect based on parameters): + * - "filter" (default): Filter health news articles using flexible criteria + * - "getById": Get specific health news by ID (specify id parameter) + * - "getByCategory": Get news by category (specify categoryId parameter) + * - "getByAuthor": Get news by author (specify authorId parameter) + * - "getByTag": Get news by tag (specify tagId parameter) + * - "getAllCategories": Get all categories + * - "getAllAuthors": Get all authors + * - "getAllTags": Get all tags + * + * @apiParam {String} [id] Health news ID + * @apiParam {String} [categoryId] Category ID + * @apiParam {String} [authorId] Author ID + * @apiParam {String} [tagId] Tag ID + * + * @apiParam {String} [title] Filter news by title (partial match) + * @apiParam {String} [content] Filter news by content (partial match) + * @apiParam {String} [author_name] Filter news by author name (partial match) + * @apiParam {String} [category_name] Filter news by category name (partial match) + * @apiParam {String} [tag_name] Filter news by tag name (partial match) + * @apiParam {String} [start_date] Filter news published on or after this date (ISO format) + * @apiParam {String} [end_date] Filter news published on or before this date (ISO format) + * @apiParam {String} [sort_by="published_at"] Field to sort by + * @apiParam {String} [sort_order="desc"] Sort order ("asc" or "desc") + * @apiParam {Number} [limit=20] Number of records to return + * @apiParam {Number} [page=1] Page number for pagination + * @apiParam {String} [include_details="true"] Whether to include full relationship details ("true" or "false") + * + * @apiSuccess {Object} response API response + * @apiSuccess {Boolean} response.success Success status + * @apiSuccess {Array/Object} response.data Requested data + * @apiSuccess {Object} [response.pagination] Pagination information + */ +router.get('/', async (req, res) => { + // Auto-detect the appropriate action based on provided parameters + let action = req.query.action || 'filter'; + + // If no explicit action is provided, determine based on parameters + if (!req.query.action) { + if (req.query.id) { + action = 'getById'; + } else if (req.query.categoryId) { + action = 'getByCategory'; + } else if (req.query.authorId) { + action = 'getByAuthor'; + } else if (req.query.tagId) { + action = 'getByTag'; + } else if (req.query.type === 'categories') { + action = 'getAllCategories'; + } else if (req.query.type === 'authors') { + action = 'getAllAuthors'; + } else if (req.query.type === 'tags') { + action = 'getAllTags'; + } + } + + try { + switch (action) { + case 'filter': + return await healthNewsController.filterNews(req, res); + + case 'getAll': + return await healthNewsController.getAllNews(req, res); + + case 'getById': + if (!req.query.id) { + return res.status(400).json({ + success: false, + message: 'Missing required parameter: id' + }); + } + req.params.id = req.query.id; + return await healthNewsController.getNewsById(req, res); + + case 'getByCategory': + if (!req.query.categoryId) { + return res.status(400).json({ + success: false, + message: 'Missing required parameter: categoryId' + }); + } + req.params.id = req.query.categoryId; + return await healthNewsController.getNewsByCategory(req, res); + + case 'getByAuthor': + if (!req.query.authorId) { + return res.status(400).json({ + success: false, + message: 'Missing required parameter: authorId' + }); + } + req.params.id = req.query.authorId; + return await healthNewsController.getNewsByAuthor(req, res); + + case 'getByTag': + if (!req.query.tagId) { + return res.status(400).json({ + success: false, + message: 'Missing required parameter: tagId' + }); + } + req.params.id = req.query.tagId; + return await healthNewsController.getNewsByTag(req, res); + + case 'getAllCategories': + return await healthNewsController.getAllCategories(req, res); + + case 'getAllAuthors': + return await healthNewsController.getAllAuthors(req, res); + + case 'getAllTags': + return await healthNewsController.getAllTags(req, res); + + default: + return res.status(400).json({ + success: false, + message: `Unknown action: ${action}` + }); + } + } catch (error) { + return res.status(500).json({ + success: false, + message: error.message + }); + } +}); + +/** + * @api {post} /api/health-news Health News API + * @apiName HealthNewsCreateAPI + * @apiGroup Health News + * @apiDescription Create health news articles and related entities + * + * @apiParam {String} [action] Action to perform (optional - will auto-detect): + * - "createNews" (default): Create a new health news article + * - "createCategory": Create a category (only requires name and description fields) + * - "createAuthor": Create an author (only requires name and bio fields) + * - "createTag": Create a tag (only requires name field) + * + * @apiParam {Object} body Request body with data based on the action + * + * @apiSuccess {Object} response API response + * @apiSuccess {Boolean} response.success Success status + * @apiSuccess {Object} response.data Created entity data + */ +router.post('/', async (req, res) => { + // Auto-detect the operation based on the body fields + let action = req.query.action || 'createNews'; + + // If no explicit action is provided, determine based on body fields + if (!req.query.action) { + const body = req.body; + if (body.name && !body.content) { + if (body.bio) { + action = 'createAuthor'; + } else if (body.description) { + action = 'createCategory'; + } else { + action = 'createTag'; + } + } + } + + try { + switch (action) { + case 'createNews': + return await healthNewsController.createNews(req, res); + + case 'createCategory': + return await healthNewsController.createCategory(req, res); + + case 'createAuthor': + return await healthNewsController.createAuthor(req, res); + + case 'createTag': + return await healthNewsController.createTag(req, res); + + default: + return res.status(400).json({ + success: false, + message: `Unknown action: ${action}` + }); + } + } catch (error) { + return res.status(500).json({ + success: false, + message: error.message + }); + } +}); + +/** + * @api {put} /api/health-news Health News API + * @apiName HealthNewsUpdateAPI + * @apiGroup Health News + * @apiDescription Update health news articles + * + * @apiParam {String} id The ID of the news article to update + * + * @apiSuccess {Object} response API response + * @apiSuccess {Boolean} response.success Success status + * @apiSuccess {Object} response.data Updated news data + */ +router.put('/', async (req, res) => { + if (!req.query.id) { + return res.status(400).json({ + success: false, + message: 'Missing required parameter: id' + }); + } + + req.params.id = req.query.id; + return await healthNewsController.updateNews(req, res); +}); + +/** + * @api {delete} /api/health-news Health News API + * @apiName HealthNewsDeleteAPI + * @apiGroup Health News + * @apiDescription Delete health news articles + * + * @apiParam {String} id The ID of the news article to delete + * + * @apiSuccess {Object} response API response + * @apiSuccess {Boolean} response.success Success status + * @apiSuccess {String} response.message Success message + */ +router.delete('/', async (req, res) => { + if (!req.query.id) { + return res.status(400).json({ + success: false, + message: 'Missing required parameter: id' + }); + } + + req.params.id = req.query.id; + return await healthNewsController.deleteNews(req, res); +}); + +module.exports = router; \ No newline at end of file diff --git a/routes/imageClassification.js b/routes/imageClassification.js new file mode 100644 index 0000000..06f30b0 --- /dev/null +++ b/routes/imageClassification.js @@ -0,0 +1,36 @@ +const express = require('express'); +const predictionController = require('../controller/imageClassificationController.js'); +const { validateImageUpload } = require('../validators/imageValidator.js'); +const router = express.Router(); +const multer = require('multer'); +const fs = require('fs'); + +const uploadsDir = 'uploads'; +if (!fs.existsSync(uploadsDir)){ + fs.mkdirSync(uploadsDir, { recursive: true }); +} + +const upload = multer({ + dest: 'uploads/', + fileFilter: (req, file, cb) => cb(null, ['image/jpeg', 'image/png'].includes(file.mimetype)) +}); + +// Define route for receiving input data and returning predictions +router.post('/', upload.single('image'), validateImageUpload, (req, res) => { + // Check if a file was uploaded + // if (!req.file) { + // return res.status(400).json({ error: 'No image uploaded' }); + // } + + // Call the predictImage function from the controller with req and res objects + predictionController.predictImage(req, res); + + // Delete the uploaded file after processing + fs.unlink(req.file.path, (err) => { + if (err) { + console.error('Error deleting file:', err); + } + }); +}); + +module.exports = router; diff --git a/routes/index.js b/routes/index.js index ab5de88..92b9edb 100644 --- a/routes/index.js +++ b/routes/index.js @@ -2,4 +2,32 @@ module.exports = app => { app.use("/api/login", require('./login')); app.use("/api/signup", require('./signup')); app.use("/api/contactus", require('./contactus')); + app.use("/api/userfeedback", require('./userfeedback')); + app.use("/api/recipe", require('./recipe')); + app.use("/api/appointments", require('./appointment')); + app.use("/api/imageClassification", require('./imageClassification')); + app.use("/api/recipeImageClassification", require('./recipeImageClassification')); + app.use("/api/userprofile", require('./userprofile')); // get profile, update profile, update by identifier (email or username) + app.use("/api/userpassword", require('./userpassword')); + app.use("/api/fooddata", require('./fooddata')); + app.use("/api/user/preferences", require('./userPreferences')); + app.use("/api/mealplan", require('./mealplan')); + app.use("/api/account", require('./account')); + app.use('/api/notifications', require('./notifications')); + app.use('/api/filter', require('./filter')); + app.use('/api/substitution', require('./ingredientSubstitution')); + app.use('/api/auth', require('./auth')); + app.use('/api/recipe/cost', require('./costEstimation')); + app.use('/api/chatbot', require('./chatbot')); + // app.use('/api/obesity', require('./obesityPrediction')); + app.use('/api/upload', require('./upload')); + app.use('/api/upload', require('./upload')); + app.use("/api/articles", require('./articles')); + app.use('/api/chatbot', require('./chatbot')); + app.use('/api/medical-report', require('./medicalPrediction')); + app.use('/api/recipe/nutritionlog', require('./recipeNutritionlog')); + app.use('/api/recipe/scale', require('./recipeScaling')); + app.use('/api/water-intake', require('./waterIntake')); + app.use('/api/health-news', require('./healthNews')); + }; \ No newline at end of file diff --git a/routes/ingredientSubstitution.js b/routes/ingredientSubstitution.js new file mode 100644 index 0000000..eaed45a --- /dev/null +++ b/routes/ingredientSubstitution.js @@ -0,0 +1,8 @@ +const express = require("express"); +const router = express.Router(); +const controller = require("../controller/ingredientSubstitutionController"); + +// Route to get substitution options for a specific ingredient +router.route("/ingredient/:ingredientId").get(controller.getIngredientSubstitutions); + +module.exports = router; \ No newline at end of file diff --git a/routes/ingredients.js b/routes/ingredients.js new file mode 100644 index 0000000..e69de29 diff --git a/routes/login.js b/routes/login.js index c04a953..085b270 100644 --- a/routes/login.js +++ b/routes/login.js @@ -2,8 +2,19 @@ const express = require("express"); const router = express.Router(); const controller = require('../controller/loginController.js'); -router.route('/').post(function(req,res) { +// Import validation rules and middleware +const { loginValidator, mfaloginValidator } = require('../validators/loginValidator'); +const validate = require('../middleware/validateRequest'); +const { loginLimiter } = require('../middleware/rateLimiter'); // ✅ rate limiter added + +// POST /login +router.post('/', loginLimiter, loginValidator, validate, (req, res) => { controller.login(req, res); }); -module.exports = router; \ No newline at end of file +// POST /login/mfa +router.post('/mfa', loginLimiter, mfaloginValidator, validate, (req, res) => { + controller.loginMfa(req, res); +}); + +module.exports = router; diff --git a/routes/mealplan.js b/routes/mealplan.js new file mode 100644 index 0000000..e0bd6dc --- /dev/null +++ b/routes/mealplan.js @@ -0,0 +1,23 @@ +const express = require("express"); +const router = express.Router(); +const controller = require('../controller/mealplanController.js'); +const { addMealPlanValidation, getMealPlanValidation, deleteMealPlanValidation } = require('../validators/mealplanValidator.js'); +const validate = require('../middleware/validateRequest.js'); + +// Route to add a meal plan +router.route('/') + .post(addMealPlanValidation, validate, (req, res) => { + controller.addMealPlan(req, res); + }) + +// Route to get a meal plan + .get(getMealPlanValidation, validate, (req, res) => { + controller.getMealPlan(req, res); + }) + +// Route to delete a meal plan + .delete(deleteMealPlanValidation, validate, (req, res) => { + controller.deleteMealPlan(req, res); + }); + +module.exports = router; \ No newline at end of file diff --git a/routes/medicalPrediction.js b/routes/medicalPrediction.js new file mode 100644 index 0000000..e50fceb --- /dev/null +++ b/routes/medicalPrediction.js @@ -0,0 +1,8 @@ +const express = require('express'); +const router = express.Router(); +const medicalPredictionController = require('../controller/medicalPredictionController'); + +// router.route('/predict').post(obesityPredictionController.predict); +router.route('/retrieve').post(medicalPredictionController.predict); + +module.exports = router; diff --git a/routes/notifications.js b/routes/notifications.js new file mode 100644 index 0000000..f77921d --- /dev/null +++ b/routes/notifications.js @@ -0,0 +1,39 @@ +const express = require('express'); +const router = express.Router(); +const notificationController = require('../controller/notificationController'); +const { + validateCreateNotification, + validateUpdateNotification, + validateDeleteNotification +} = require('../validators/notificationValidator'); + +const validateResult = require('../middleware/validateRequest.js'); + +// Create a new notification +router.post( + '/', + validateCreateNotification, + validateResult, + notificationController.createNotification +); + +// Get notifications by user_id +router.get('/:user_id', notificationController.getNotificationsByUserId); + +// Update notification status by ID +router.put( + '/:id', + validateUpdateNotification, + validateResult, + notificationController.updateNotificationStatusById +); + +// Delete notification by ID +router.delete( + '/:id', + validateDeleteNotification, + validateResult, + notificationController.deleteNotificationById +); + +module.exports = router; diff --git a/routes/recipe.js b/routes/recipe.js new file mode 100644 index 0000000..089392b --- /dev/null +++ b/routes/recipe.js @@ -0,0 +1,13 @@ +const express = require('express'); +const router = express.Router(); +const recipeController = require('../controller/recipeController.js'); +const { validateRecipe } = require('../validators/recipeValidator.js'); +const validateRequest = require('../middleware/validateRequest.js'); + +// Validate and create recipe +router.post('/createRecipe', validateRecipe, validateRequest, recipeController.createAndSaveRecipe); + +router.post('/', recipeController.getRecipes); +router.delete('/', recipeController.deleteRecipe); + +module.exports = router; diff --git a/routes/recipeImageClassification.js b/routes/recipeImageClassification.js new file mode 100644 index 0000000..90d7bce --- /dev/null +++ b/routes/recipeImageClassification.js @@ -0,0 +1,67 @@ +const express = require('express'); +const predictionController = require('../controller/recipeImageClassificationController.js'); +const { validateRecipeImageUpload } = require('../validators/recipeImageValidator.js'); +const router = express.Router(); +const multer = require('multer'); +const fs = require('fs'); +const path = require('path'); + +// Ensure uploads directory exists +if (!fs.existsSync('./uploads')) { + fs.mkdirSync('./uploads', { recursive: true }); +} + +// Create temp directory for uploads +if (!fs.existsSync('./uploads/temp')) { + fs.mkdirSync('./uploads/temp', { recursive: true }); +} + +const storage = multer.diskStorage({ + destination: function (req, file, cb) { + cb(null, './uploads/temp/'); + }, + filename: function (req, file, cb) { + const uniquePrefix = Date.now() + '-'; + cb(null, uniquePrefix + file.originalname); + } +}); + +const fileFilter = (req, file, cb) => { + if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') { + cb(null, true); + } else { + cb(new Error('Only JPG and PNG image files are allowed'), false); + } +}; + +// Initialize multer upload middleware +const upload = multer({ + storage: storage, + fileFilter: fileFilter, + limits: { + fileSize: 5 * 1024 * 1024 // 5MB max file size + } +}); + +// Define route for receiving input data and returning predictions +router.post( + '/', + upload.single('image'), + validateRecipeImageUpload, // 👈 validate image file + predictionController.predictRecipeImage +); + +// Error handling middleware +router.use((err, req, res, next) => { + if (err instanceof multer.MulterError) { + if (err.code === 'LIMIT_FILE_SIZE') { + return res.status(400).json({ error: 'File size exceeds 5MB limit' }); + } + return res.status(400).json({ error: `Upload error: ${err.message}` }); + } else if (err) { + return res.status(400).json({ error: err.message }); + } + next(); +}); + +module.exports = router; \ No newline at end of file diff --git a/routes/recipeNutritionlog.js b/routes/recipeNutritionlog.js new file mode 100644 index 0000000..7a902ed --- /dev/null +++ b/routes/recipeNutritionlog.js @@ -0,0 +1,29 @@ +const express = require('express'); +const router = express.Router(); +const { getRecipeNutritionByName } = require('../controller/recipeNutritionController'); + +/** + * @swagger + * /api/recipe/nutrition: + * get: + * summary: Get full nutrition info for a recipe by name + * parameters: + * - in: query + * name: name + * schema: + * type: string + * required: true + * description: Name of the recipe (case-insensitive) + * responses: + * 200: + * description: Nutrition data found + * 400: + * description: Missing query parameter + * 404: + * description: Recipe not found + * 500: + * description: Internal server error + */ +router.get('/', getRecipeNutritionByName); + +module.exports = router; \ No newline at end of file diff --git a/routes/recipeScaling.js b/routes/recipeScaling.js new file mode 100644 index 0000000..3255ee7 --- /dev/null +++ b/routes/recipeScaling.js @@ -0,0 +1,7 @@ +const express = require('express'); +const router = express.Router(); +const recipeScalingController = require('../controller/recipeScalingController'); + +router.route('/:recipe_id/:desired_servings').get(recipeScalingController.scaleRecipe); + +module.exports = router; \ No newline at end of file diff --git a/routes/routes.js b/routes/routes.js new file mode 100644 index 0000000..fc6f82f --- /dev/null +++ b/routes/routes.js @@ -0,0 +1,43 @@ +const express = require('express'); +const multer = require('multer'); +const path = require('path'); +const router = express.Router(); +const recipeImageClassificationController = require('../controllers/recipeImageClassificationController'); + +const storage = multer.diskStorage({ + destination: function(req, file, cb) { + cb(null, 'uploads/'); + }, + filename: function(req, file, cb) { + cb(null, 'image.jpg'); + } +}); + +const upload = multer({ + storage: storage, + limits: { fileSize: 10 * 1024 * 1024 }, // 10MB limit + fileFilter: function(req, file, cb) { + const filetypes = /jpeg|jpg|png/; + const mimetype = filetypes.test(file.mimetype); + const extname = filetypes.test(path.extname(file.originalname).toLowerCase()); + + if (mimetype && extname) { + return cb(null, true); + } + cb(new Error('Only .png, .jpg and .jpeg format allowed!')); + } +}); + +// Recipe Classification Route +router.post('/classify', upload.single('photo'), recipeImageClassificationController); + +router.use('/classify', (err, req, res, next) => { + console.error('Error in classification route:', err); + res.status(500).json({ + success: false, + message: 'An error occurred during image classification', + error: process.env.NODE_ENV === 'development' ? err.message : 'Internal server error' + }); +}); + +module.exports = router; \ No newline at end of file diff --git a/routes/signup.js b/routes/signup.js index c2573d5..1590956 100644 --- a/routes/signup.js +++ b/routes/signup.js @@ -2,8 +2,14 @@ const express = require("express"); const router = express.Router(); const controller = require('../controller/signupController.js'); -router.route('/').post(function(req,res) { +// Import the validation rule and middleware +const { registerValidation } = require('../validators/signupValidator.js'); +const validate = require('../middleware/validateRequest'); +const { signupLimiter } = require('../middleware/rateLimiter'); // rate limiter added + +// Apply rate limiter and validation before the controller +router.post('/', signupLimiter, registerValidation, validate, (req, res) => { controller.signup(req, res); }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/routes/upload.js b/routes/upload.js new file mode 100644 index 0000000..ffc752d --- /dev/null +++ b/routes/upload.js @@ -0,0 +1,7 @@ +const express = require('express'); +const router = express.Router(); +const uploadController = require('../controller/uploadController'); + +router.post('/', uploadController.uploadFile); + +module.exports = router; diff --git a/routes/uploadRoutes.js b/routes/uploadRoutes.js new file mode 100644 index 0000000..a300a3a --- /dev/null +++ b/routes/uploadRoutes.js @@ -0,0 +1,24 @@ +const express = require('express'); +const router = express.Router(); +const upload = require('../middleware/uploadMiddleware'); +const { uploadLimiter } = require('../rateLimiter'); + +const authenticateToken = require('../middleware/authenticateToken'); // ensures JWT is valid +const authorizeRoles = require('../middleware/authorizeRoles'); + +// ✅ Only admins can upload +router.post( + '/upload', + authenticateToken, + authorizeRoles(9), // role_id = 9 is admin + uploadLimiter, + upload.single('file'), + (req, res) => { + if (!req.file) { + return res.status(400).json({ message: 'No file uploaded' }); + } + res.status(200).json({ message: 'File uploaded successfully', file: req.file }); + } +); + +module.exports = router; \ No newline at end of file diff --git a/routes/userPreferences.js b/routes/userPreferences.js new file mode 100644 index 0000000..c76c748 --- /dev/null +++ b/routes/userPreferences.js @@ -0,0 +1,17 @@ +const express = require("express"); +const router = express.Router(); +const controller = require("../controller/userPreferencesController"); +const authenticateToken = require("../middleware/authenticateToken"); +const { validateUserPreferences } = require("../validators/userPreferencesValidator"); +const ValidateRequest = require("../middleware/validateRequest"); + +router.route("/").get(authenticateToken, controller.getUserPreferences); +router.post( + "/", + authenticateToken, + validateUserPreferences, + ValidateRequest, + controller.postUserPreferences + ); + +module.exports = router; \ No newline at end of file diff --git a/routes/userfeedback.js b/routes/userfeedback.js new file mode 100644 index 0000000..560a8bd --- /dev/null +++ b/routes/userfeedback.js @@ -0,0 +1,12 @@ +const express = require("express"); +const router = express.Router(); +const controller = require('../controller/userFeedbackController'); +const { feedbackValidation } = require('../validators/feedbackValidator.js'); +const validate = require('../middleware/validateRequest.js'); +const { formLimiter } = require('../middleware/rateLimiter'); // ✅ rate limiter added + +router.post('/', formLimiter, feedbackValidation, validate, (req, res) => { + controller.userfeedback(req, res); +}); + +module.exports = router; diff --git a/routes/userpassword.js b/routes/userpassword.js new file mode 100644 index 0000000..f420957 --- /dev/null +++ b/routes/userpassword.js @@ -0,0 +1,9 @@ +const express = require("express"); +const router = express.Router(); +const controller = require('../controller/userPasswordController.js'); + +router.route('/').put(function(req,res) { + controller.updateUserPassword(req, res); +}); + +module.exports = router; \ No newline at end of file diff --git a/routes/userprofile.js b/routes/userprofile.js new file mode 100644 index 0000000..e6937c1 --- /dev/null +++ b/routes/userprofile.js @@ -0,0 +1,16 @@ +const express = require("express"); +const router = express.Router(); +const controller = require('../controller/userProfileController.js'); +const updateUserProfileController = require('../controller/updateUserProfileController.js'); + +router.route('/').put(function(req,res) { + controller.updateUserProfile(req, res); +}); + +router.route('/').get(function(req,res) { + controller.getUserProfile(req, res); +}); + +router.put('/update-by-identifier', updateUserProfileController.updateUserProfile); + +module.exports = router; \ No newline at end of file diff --git a/routes/verify.js b/routes/verify.js new file mode 100644 index 0000000..6e255d4 --- /dev/null +++ b/routes/verify.js @@ -0,0 +1,60 @@ +// routes/verify.js +const express = require('express'); +const router = express.Router(); +const supabaseAdmin = require('../new_utils/supabaseAdmin'); // should use SERVICE_ROLE key + +router.get('/verify-email/:token', async (req, res) => { + const token = req.params.token; + if (!token) return res.status(400).send('Token missing'); + + try { + // 1) find token row (only what we need) + const { data: row, error: rowErr } = await supabaseAdmin + .from('email_verification_tokens') + .select('id, user_email, expires_at, verified_at') + .eq('token', token) + .single(); + + if (rowErr || !row) return res.status(400).send('Invalid or expired link'); + + // 2) already used? + if (row.verified_at) { + return res.status(400).send('This link was already used'); + } + + // 3) expired? (treat null as “no expiry”) + if (row.expires_at && new Date(row.expires_at) < new Date()) { + return res.status(400).send('This link has expired'); + } + + // 4) mark token as verified (single-use) + const now = new Date().toISOString(); + const { error: updErr } = await supabaseAdmin + .from('email_verification_tokens') + .update({ verified_at: now }) + .eq('id', row.id); + + if (updErr) { + console.error('[verify-email] token update error:', updErr); + return res.status(500).send('Failed to verify token'); + } + + // 5) success page (or redirect to frontend if you prefer) + const email = row.user_email; + return res.send(` + + Email verified + +

✅ Email verified

+

${email} has been verified via token.

+

This verification link is now single-use.

+ + + `); + } catch (err) { + console.error('verify-email error', err); + return res.status(500).send('Internal server error'); + } +}); + +module.exports = router; diff --git a/routes/waterIntake.js b/routes/waterIntake.js new file mode 100644 index 0000000..6b24f77 --- /dev/null +++ b/routes/waterIntake.js @@ -0,0 +1,8 @@ +const express = require('express'); +const router = express.Router(); +const { updateWaterIntake } = require('../controller/waterIntakeController'); + +router.post('/', updateWaterIntake); +console.log("Water Intake Route Loaded"); + +module.exports = router; diff --git a/server.js b/server.js index cdcc6a1..6e63624 100644 --- a/server.js +++ b/server.js @@ -1,17 +1,199 @@ -require('dotenv').config(); -const express = require('express'); +require("dotenv").config(); +const express = require("express"); + +const FRONTEND_ORIGIN = "http://localhost:3000"; + +const helmet = require('helmet'); +const cors = require("cors"); +const swaggerUi = require("swagger-ui-express"); +const yaml = require("yamljs"); +const { exec } = require("child_process"); +const bodyParser = require("body-parser"); +const multer = require("multer"); +const rateLimit = require('express-rate-limit'); // ✅ added +const uploadRoutes = require('./routes/uploadRoutes'); +const fs = require("fs"); +const path = require("path"); +const verifyRoutes = require('./routes/verify'); + + + +// Ensure uploads directory exists +const uploadsDir = path.join(__dirname, 'uploads'); +if (!fs.existsSync(uploadsDir)) { + try { + fs.mkdirSync(uploadsDir, { recursive: true }); + console.log("Created uploads directory"); + } catch (err) { + console.error("Error creating uploads directory:", err); + } +} + +// Create temp directory for uploads +const tempDir = path.join(__dirname, 'uploads', 'temp'); +if (!fs.existsSync(tempDir)) { + try { + fs.mkdirSync(tempDir, { recursive: true }); + console.log("Created temp uploads directory"); + } catch (err) { + console.error("Error creating temp uploads directory:", err); + } +} + +// Function to clean up old temporary files +function cleanupOldFiles() { + const now = Date.now(); + const ONE_DAY = 24 * 60 * 60 * 1000; // 24 hours in milliseconds + + // Clean temporary files + try { + const tempFiles = fs.readdirSync(tempDir); + console.log(`Checking ${tempFiles.length} temporary files for cleanup`); + + let deletedCount = 0; + tempFiles.forEach(file => { + const filePath = path.join(tempDir, file); + try { + const stats = fs.statSync(filePath); + // Delete files older than 1 day + if (now - stats.mtimeMs > ONE_DAY) { + fs.unlinkSync(filePath); + deletedCount++; + } + } catch (err) { + console.error(`Error checking file ${filePath}:`, err); + } + }); + + if (deletedCount > 0) { + console.log(`Cleaned up ${deletedCount} old temporary files`); + } + } catch (err) { + console.error("Error during file cleanup:", err); + } +} + +// Clean up temporary files on startup +cleanupOldFiles(); + +// Schedule cleanup to run every 3 hours +setInterval(cleanupOldFiles, 3 * 60 * 60 * 1000); const app = express(); -const port = process.env.PORT || 3000; + +const port = process.env.PORT || 80; let db = require("./dbConnection"); -app.use(express.urlencoded({ extended: true })); -app.use(express.json()); +// CORS +//app.options("*", cors({ origin: "http://localhost:3000" })); +//app.use(cors({ origin: "http://localhost:3000" })); + + +app.use(cors({ + origin: FRONTEND_ORIGIN, + credentials: true, + methods: ["GET","POST","PUT","PATCH","DELETE","OPTIONS"], + allowedHeaders: ["Content-Type","Authorization"] +})); + +app.options("*", cors({ + origin: FRONTEND_ORIGIN, + credentials: true, +})); + +app.use((req, res, next) => { + res.header("Access-Control-Allow-Credentials", "true"); + next(); +}); + +app.set("trust proxy", 1); + +// Helmet Security +app.use(helmet({ + contentSecurityPolicy: { + directives: { + defaultSrc: ["'self'"], + scriptSrc: ["'self'", "'unsafe-inline'", "https://cdn.jsdelivr.net"], + styleSrc: ["'self'", "'unsafe-inline'", "https://cdn.jsdelivr.net"], + objectSrc: ["'none'"], + }, + }, + crossOriginEmbedderPolicy: true, + referrerPolicy: { policy: "strict-origin-when-cross-origin" }, +})); + +// Global Rate Limiting Middleware +const limiter = rateLimit({ + windowMs: 15 * 60 * 1000, // 15 minutes + max: 1000, // Limit each IP to 1000 requests per windowMs + standardHeaders: true, + legacyHeaders: false, + message: { + status: 429, + error: "Too many requests, please try again later.", + }, +}); +app.use(limiter); // apply globally + +// Swagger Docs +const swaggerDocument = yaml.load("./index.yaml"); +app.use("/api-docs", swaggerUi.serve, swaggerUi.setup(swaggerDocument)); + +// JSON & URL parser +app.use(express.json({ limit: "50mb" })); +app.use(express.urlencoded({ limit: "50mb", extended: true })); +app.use('/api', verifyRoutes); + +// Routes +const routes = require("./routes"); +routes(app); + +app.use("/api", uploadRoutes); +app.use("/uploads", express.static("uploads")); + +//signup +app.use("/api/signup", require("./routes/signup")); + +// Error handler +app.use((err, req, res, next) => { + if (err) { + res.status(400).json({ error: err.message }); + } else { + next(); + } +}); + +// Global error handler +app.use((err, req, res, next) => { + console.error("Unhandled error:", err); + res.status(500).json({ error: "Internal server error" }); +}); + +//verify Routes +app.use('/api', verifyRoutes); + -const routes = require('./routes') -routes(app) +// Dummy Email Verification Page for Testing +app.get('/verify-email/:token', (req, res) => { + const token = req.params.token; + res.send(` + + + Email Verification + + +

✅ Email Verification

+

Your token:

+ ${token} +

This is a dummy page for development testing only.

+ + + `); +}); -app.listen(port, () => { - console.log(`Server is running on port ${port}`); +// Start server +app.listen(port, async () => { + console.log(`Server is running on port ${port}`); + exec(`start http://localhost:${port}/api-docs`); }); \ No newline at end of file diff --git a/setup/README_FEEDBACK.md b/setup/README_FEEDBACK.md new file mode 100644 index 0000000..966db55 --- /dev/null +++ b/setup/README_FEEDBACK.md @@ -0,0 +1,111 @@ +# Image Classification Feedback System + +This system collects and analyzes user feedback on food image classifications to continuously improve the accuracy of the image classification API. + +## Setup Instructions + +### 1. Create the Supabase Table + +1. Log in to your Supabase dashboard. +2. Navigate to the SQL Editor. +3. Copy and paste the contents of `setup/create_feedback_table.sql`. +4. Run the SQL script to create the necessary table and policies. + +### 2. Configuration + +Make sure your `.env` file contains the Supabase connection details: + +``` +SUPABASE_URL=your_supabase_url +SUPABASE_ANON_KEY=your_supabase_anon_key +``` + +## Using the Feedback System + +### Collecting Feedback via CLI Tool + +The command-line tool allows you to submit feedback for incorrectly classified images: + +```bash +node collect_feedback.js +``` + +Example: +```bash +node collect_feedback.js ./uploads/sushi.jpg "sushi" +``` + +### Collecting Feedback in the API + +To collect feedback from users in your application, implement this in your API routes: + +```javascript +const addImageClassificationFeedback = require('./model/addImageClassificationFeedback'); + +// Example route handler +app.post('/api/classification-feedback', async (req, res) => { + try { + const { user_id, image_path, predicted_class, correct_class, metadata } = req.body; + + await addImageClassificationFeedback( + user_id, + image_path, + predicted_class, + correct_class, + metadata + ); + + res.status(200).json({ message: 'Feedback submitted successfully' }); + } catch (error) { + console.error('Failed to submit feedback:', error); + res.status(500).json({ error: 'Failed to submit feedback' }); + } +}); +``` + +### Analyzing Collected Feedback + +To analyze the feedback data: + +```bash +# Analyze all feedback +node analyze_feedback.js + +# Analyze feedback for a specific class +node analyze_feedback.js sushi +``` + +### Generating Improvement Suggestions + +Generate code improvement suggestions based on collected feedback: + +```bash +node generate_improvements.js +``` + +## Feedback Data Model + +The feedback system stores the following information: + +- `id`: Unique identifier for the feedback entry +- `user_id`: ID of the user providing feedback (optional) +- `filename`: Original filename of the image +- `image_data`: Base64 encoded image data (optional) +- `image_type`: MIME type of the image +- `predicted_class`: Class predicted by the system +- `correct_class`: Correct class according to user +- `metadata`: Additional metadata +- `created_at`: When the feedback was submitted + +## Benefits + +- **Continuous Improvement**: The system helps identify and fix common classification errors. +- **User Engagement**: Allows users to contribute to improving the system. +- **Data Collection**: Builds a dataset that can be used for future model improvements. +- **Performance Monitoring**: Helps track classification accuracy over time. + +## Maintenance + +- The database is configured to automatically clean up image data older than 90 days to save storage. +- Regularly review the feedback data to identify trends and implement improvements. +- Update the keyword mappings in `add_keywords.js` based on feedback analysis. \ No newline at end of file diff --git a/test/appointmenttest.js b/test/appointmenttest.js new file mode 100644 index 0000000..35d5e57 --- /dev/null +++ b/test/appointmenttest.js @@ -0,0 +1,57 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +const deleteAppointment = require("../model/deleteAppointment"); +chai.use(chaiHttp); + +describe("Appointment: Test saveAppointment - Required Fields Not Entered", () => { + it("should return 400, Missing required fields", (done) => { + chai.request("http://localhost:80") + .post("/api/appointments") + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Missing required fields"); + done(); + }); + }); +}); + +describe("Appointment: Test saveAppointment - Appointment Saved Successfully", () => { + it("should return 201, Appointment saved successfully", (done) => { + chai.request("http://localhost:80") + .post("/api/appointments") + .send({ + userId: "1", + date: "2024-01-01", + time: "20:30:00", + description: "test appointment" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(201); + expect(res.body) + .to.have.property("message") + .that.equals("Appointment saved successfully"); + done(); + deleteAppointment("1", "2024-01-01", "20:30:00", "test appointment"); //deletes created appointment from db + }); + }); +}); + +describe("Appointment: Test getAppointments - Success", () => { + it("should return 200, with an array of appointments", (done) => { + chai.request("http://localhost:80") + .get("/api/appointments") + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body).to.be.an("array"); + done(); + }); + }); +}); \ No newline at end of file diff --git a/test/contactustest.js b/test/contactustest.js new file mode 100644 index 0000000..e74e988 --- /dev/null +++ b/test/contactustest.js @@ -0,0 +1,110 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +chai.use(chaiHttp); + +describe("Contactus: Test contactus - Name Not Entered", () => { + it("should return 400, Name is required", (done) => { + chai.request("http://localhost:80") + .post("/api/contactus") + .send({ + name: "", + email: "test@test.com", + subject: "test", + message: "test" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Name is required"); + done(); + }); + }); +}); + +describe("Contactus: Test contactus - Email Not Entered", () => { + it("should return 400, Email is required", (done) => { + chai.request("http://localhost:80") + .post("/api/contactus") + .send({ + name: "test", + email: "", + subject: "test", + message: "test" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Email is required"); + done(); + }); + }); +}); + +describe("Contactus: Test contactus - Subject Not Entered", () => { + it("should return 400, Subject is required", (done) => { + chai.request("http://localhost:80") + .post("/api/contactus") + .send({ + name: "test", + email: "test@test.com", + subject: "", + message: "test" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Subject is required"); + done(); + }); + }); +}); + +describe("Contactus: Test contactus - Name Not Entered", () => { + it("should return 400, Message is required", (done) => { + chai.request("http://localhost:80") + .post("/api/contactus") + .send({ + name: "test", + email: "test@test.com", + subject: "test", + message: "" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Message is required"); + done(); + }); + }); +}); + +describe("Contactus: Test contactus - Message Sent Successfully", () => { + it("should return 201, Data recieved successfully", (done) => { + chai.request("http://localhost:80") + .post("/api/contactus") + .send({ + name: "test", + email: "test@test.com", + subject: "test", + message: "test" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(201); + expect(res.body) + .to.have.property("message") + .that.equals("Data received successfully!"); + done(); + }); + }); +}); \ No newline at end of file diff --git a/test/costEstimationTest.js b/test/costEstimationTest.js new file mode 100644 index 0000000..93ac0df --- /dev/null +++ b/test/costEstimationTest.js @@ -0,0 +1,274 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +chai.use(chaiHttp); + +// Tests may not work if the table data is updated +// => Remove all equal assertions +describe("Test Full Cost Estimation", () => { + + describe("Cost Estimation: Test valid recipe", () => { + it("should return 200, return minimum/maximum cost and ingredients for recipe 261", (done) => { + const recipe_id = 261; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.all.keys( + 'info', + 'low_cost', + 'high_cost'); + expect(res.body.info) + .to.have.all.keys( + 'estimation_type', + 'include_all_wanted_ingredients', + 'minimum_cost', + 'maximum_cost' + ); + expect(res.body.info.estimation_type).to.equal("full"); + expect(res.body.info.minimum_cost).to.equal(18); + expect(res.body.info.maximum_cost).to.equal(42); + expect(res.body.info.include_all_wanted_ingredients).to.equal(true); + done(); + }); + }); + it("should return 200, return minimum/maximum cost and ingredients for recipe 262", (done) => { + const recipe_id = 262; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.all.keys( + 'info', + 'low_cost', + 'high_cost'); + expect(res.body.info) + .to.have.all.keys( + 'estimation_type', + 'include_all_wanted_ingredients', + 'minimum_cost', + 'maximum_cost' + ); + expect(res.body.info.estimation_type).to.equal("full"); + expect(res.body.info.minimum_cost).to.equal(28); + expect(res.body.info.maximum_cost).to.equal(39); + expect(res.body.info.include_all_wanted_ingredients).to.equal(true); + done(); + }); + }); + }); + + describe("Cost Estimation: Test invalid recipe", () => { + it("should return 404 for invalid recipe", (done) => { + const recipe_id = 11111; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Invalid recipe id, ingredients not found"); + done(); + }); + }); + }); + + describe("Cost Estimation: Test valid recipe with invalid ingredients", () => { + it("should return 404 for ingredient not found in store", (done) => { + const recipe_id = 267; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("There was an error in estimation process"); + done(); + }); + }); + + it("should return 404 for ingredient measurement not match any product in store", (done) => { + const recipe_id = 25; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("There was an error in estimation process"); + done(); + }); + }); + + it("should return 404 for null ingredients", (done) => { + const recipe_id = 19; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Recipe contains invalid ingredients data, can not estimate cost"); + done(); + }); + }); + }); +}) + + +describe("Test Partial Cost Estimation: excluding ingredients", () => { + describe("Exclude ingredients: Test valid recipe", () => { + it("should return 200, return minimum/maximum cost and ingredients for recipe 261", (done) => { + const recipe_id = 261; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=275`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.all.keys( + 'info', + 'low_cost', + 'high_cost'); + expect(res.body.info) + .to.have.all.keys( + 'estimation_type', + 'include_all_wanted_ingredients', + 'minimum_cost', + 'maximum_cost' + ); + expect(res.body.info.estimation_type).to.equal("partial"); + expect(res.body.info.minimum_cost).to.equal(11); + expect(res.body.info.maximum_cost).to.equal(12); + expect(res.body.info.include_all_wanted_ingredients).to.equal(true); + done(); + }); + }); + it("should return 200, return minimum/maximum cost and ingredients for recipe 262", (done) => { + const recipe_id = 262; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=3,5`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.all.keys( + 'info', + 'low_cost', + 'high_cost'); + expect(res.body.info) + .to.have.all.keys( + 'estimation_type', + 'include_all_wanted_ingredients', + 'minimum_cost', + 'maximum_cost' + ); + expect(res.body.info.estimation_type).to.equal("partial"); + expect(res.body.info.minimum_cost).to.equal(17); + expect(res.body.info.maximum_cost).to.equal(27); + expect(res.body.info.include_all_wanted_ingredients).to.equal(true); + done(); + }); + }); + }); + + describe("Exclude ingredients: Test invalid recipe and params", () => { + it("should return 404 for invalid recipe", (done) => { + const recipe_id = 11111; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=1`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Invalid recipe id, ingredients not found"); + done(); + }); + }); + it("should return 404 for invalid excluding ingredients", (done) => { + const recipe_id = 262; + const exclude_id = [275]; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=${exclude_id.toString()}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals(`Ingredient ${exclude_id.toString()} not found in recipe, can not exclude`); + done(); + }); + }); + }); + + + describe("Exclude ingredients: Test valid recipe with invalid ingredients", () => { + it("should return 404 for ingredient not found in store", (done) => { + const recipe_id = 267; + const exclude_id = [2]; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=${exclude_id.toString()}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("There was an error in estimation process"); + done(); + }); + }); + + it("should return 404 for ingredient measurement not match any product in store", (done) => { + const recipe_id = 25; + const exclude_id = [22]; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=${exclude_id.toString()}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("There was an error in estimation process"); + done(); + }); + }); + + it("should return 404 for null ingredients", (done) => { + const recipe_id = 19; + const exclude_id = [22]; + chai.request("http://localhost:80") + .get(`/api/recipe/cost/${recipe_id}?exclude_ids=${exclude_id.toString()}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Recipe contains invalid ingredients data, can not estimate cost"); + done(); + }); + }); + }); +}) \ No newline at end of file diff --git a/test/ingredientSubstitutionTest.js b/test/ingredientSubstitutionTest.js new file mode 100644 index 0000000..5fa9a74 --- /dev/null +++ b/test/ingredientSubstitutionTest.js @@ -0,0 +1,166 @@ +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const sinon = require('sinon'); +const { expect } = chai; + +chai.use(chaiHttp); + +// Import test helpers +const { getTestServer } = require('./test-helpers'); + +// Import the model function to stub +const fetchIngredientSubstitutions = require('../model/fetchIngredientSubstitutions.js'); + +describe('Ingredient Substitution API', () => { + let server; + let fetchStub; + + before(async () => { + server = await getTestServer(); + }); + + beforeEach(() => { + // Create a stub for the fetchIngredientSubstitutions function + fetchStub = sinon.stub(); + // Replace the original function with our stub + const originalModule = require('../model/fetchIngredientSubstitutions.js'); + // Save reference to the original module.exports + const originalExports = module.exports; + // Replace module.exports with our stub + module.exports = fetchStub; + // Restore the controller module to use our stub + delete require.cache[require.resolve('../controller/ingredientSubstitutionController.js')]; + require('../controller/ingredientSubstitutionController.js'); + }); + + afterEach(() => { + // Restore all stubs after each test + sinon.restore(); + }); + + describe('GET /api/substitution/ingredient/:ingredientId', () => { + it('should return substitutions for a valid ingredient ID', async () => { + // Mock data for the test + const mockOriginal = { id: 1, name: 'Chicken', category: 'Protein' }; + const mockSubstitutes = [ + { id: 2, name: 'Turkey', category: 'Protein' }, + { id: 3, name: 'Tofu', category: 'Protein' } + ]; + + // Configure the stub to return mock data + fetchStub.resolves({ + original: mockOriginal, + substitutes: mockSubstitutes + }); + + // Make the API request + const res = await chai.request(server) + .get('/api/substitution/ingredient/1'); + + // Assertions + expect(res).to.have.status(200); + expect(res.body).to.be.an('object'); + expect(res.body).to.have.property('original'); + expect(res.body).to.have.property('substitutes'); + expect(res.body.original).to.deep.equal(mockOriginal); + expect(res.body.substitutes).to.be.an('array'); + expect(res.body.substitutes).to.have.lengthOf(2); + expect(res.body.substitutes[0]).to.deep.equal(mockSubstitutes[0]); + }); + + it('should handle filtering by allergies', async () => { + // Mock data for the test + const mockOriginal = { id: 1, name: 'Milk', category: 'Dairy' }; + const mockSubstitutes = [ + { id: 5, name: 'Almond Milk', category: 'Dairy' } + ]; + + // Configure the stub to return mock data + fetchStub.resolves({ + original: mockOriginal, + substitutes: mockSubstitutes + }); + + // Make the API request with allergy filter + const res = await chai.request(server) + .get('/api/substitution/ingredient/1?allergies=2,3'); + + // Assertions + expect(res).to.have.status(200); + expect(res.body.substitutes).to.have.lengthOf(1); + expect(fetchStub.calledOnce).to.be.true; + + // Verify the stub was called with the correct parameters + const callArgs = fetchStub.firstCall.args; + expect(callArgs[0]).to.equal(1); // ingredientId + expect(callArgs[1]).to.have.property('allergies'); + expect(callArgs[1].allergies).to.deep.equal([2, 3]); + }); + + it('should handle filtering by dietary requirements', async () => { + // Mock data for the test + const mockOriginal = { id: 1, name: 'Beef', category: 'Protein' }; + const mockSubstitutes = [ + { id: 7, name: 'Lentils', category: 'Protein' } + ]; + + // Configure the stub to return mock data + fetchStub.resolves({ + original: mockOriginal, + substitutes: mockSubstitutes + }); + + // Make the API request with dietary requirements filter + const res = await chai.request(server) + .get('/api/substitution/ingredient/1?dietaryRequirements=1'); + + // Assertions + expect(res).to.have.status(200); + expect(res.body.substitutes).to.have.lengthOf(1); + expect(fetchStub.calledOnce).to.be.true; + + // Verify the stub was called with the correct parameters + const callArgs = fetchStub.firstCall.args; + expect(callArgs[0]).to.equal(1); // ingredientId + expect(callArgs[1]).to.have.property('dietaryRequirements'); + expect(callArgs[1].dietaryRequirements).to.deep.equal([1]); + }); + + it('should return 404 for non-existent ingredient', async () => { + // Configure the stub to throw an error + fetchStub.rejects(new Error('Ingredient not found')); + + // Make the API request + const res = await chai.request(server) + .get('/api/substitution/ingredient/999'); + + // Assertions + expect(res).to.have.status(404); + expect(res.body).to.have.property('error'); + expect(res.body.error).to.equal('Ingredient not found'); + }); + + it('should return 400 for invalid ingredient ID', async () => { + // Make the API request with an invalid ID + const res = await chai.request(server) + .get('/api/substitution/ingredient/invalid'); + + // Assertions + expect(res).to.have.status(500); // This would be a server error due to parsing an invalid ID + }); + + it('should return 500 for server errors', async () => { + // Configure the stub to throw a generic error + fetchStub.rejects(new Error('Database connection error')); + + // Make the API request + const res = await chai.request(server) + .get('/api/substitution/ingredient/1'); + + // Assertions + expect(res).to.have.status(500); + expect(res.body).to.have.property('error'); + expect(res.body.error).to.equal('Internal server error'); + }); + }); +}); \ No newline at end of file diff --git a/test/logintest.js b/test/logintest.js new file mode 100644 index 0000000..7446073 --- /dev/null +++ b/test/logintest.js @@ -0,0 +1,108 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { addTestUser, deleteTestUser, addTestUserMFA } = require("./test-helpers"); +const { expect } = chai; +chai.use(chaiHttp); + +before(async function () { + testUser = await addTestUser(); + testUserMFA = await addTestUserMFA(); +}); + +after(async function () { + await deleteTestUser(testUser.user_id); + await deleteTestUser(testUserMFA.user_id); +}); + +describe("Login: Test login - No Email/Password Entered", () => { + it("should return 400 Email and password are required", (done) => { + chai.request("http://localhost:80") + .post("/api/login") + .send({ + email: "", + password: "", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Email and password are required"); + done(); + }); + }); +}); + +describe("Login: Test login - Invalid email", () => { + it("should return 401 Invalid email", (done) => { + chai.request("http://localhost:80") + .post("/api/login") + .send({ + email: "invaliduser@test.com", + password: "passworddoesntmatter", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(401); + expect(res.body) + .to.have.property("error") + .that.equals("Invalid email"); + done(); + }); + }); +}); + +describe("Login: Test login - Invalid Password", () => { + it("should return 401 Invalid password", (done) => { + chai.request("http://localhost:80") + .post("/api/login") + .send({ + email: testUser.email, + password: "invalidpassword", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(401); + expect(res.body) + .to.have.property("error") + .that.equals("Invalid password"); + done(); + }); + }); +}); + +describe("Login: Test login - Successful Login No MFA", () => { + it("should return 200", (done) => { + chai.request("http://localhost:80") + .post("/api/login") + .send({ + email: testUser.email, + password: "testuser123", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + done(); + }); + }); +}); + +describe("Login: Test login - Login MFA ENABLED Email Sent", () => { + it("should return 202, mfa code sent", (done) => { + chai.request("http://localhost:80") + .post("/api/login") + .send({ + email: testUserMFA.email, + password: "testuser123" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(202); + expect(res.body) + .to.have.property("message") + .that.equals("An MFA Token has been sent to your email address"); + done(); + }); + }); +}); \ No newline at end of file diff --git a/test/recipeImageClassificationTest.js b/test/recipeImageClassificationTest.js new file mode 100644 index 0000000..a50d42a --- /dev/null +++ b/test/recipeImageClassificationTest.js @@ -0,0 +1,44 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +chai.use(chaiHttp); +const fs = require("fs"); + +describe('Recipe Image Classification Test: No Image Uploaded', () => { + it('should return 400 if no file is uploaded', (done) => { + chai.request("http://localhost:80") + .post('/api/recipeImageClassification') + .send() + .end((err, res) => { + expect(res).to.have.status(400); + expect(res.body).to.have.property('error', 'No image uploaded'); + done(); + }); + }); +}); + +describe('Recipe Image Classification: Non-Image File Uploaded', () => { + it('should return 400 if wrong filetype is uploaded', (done) => { + chai.request("http://localhost:80") + .post('/api/recipeImageClassification') + .attach('image', './uploads/test.txt') + .end((err, res) => { + expect(res).to.have.status(400); + done(); + }); + }); +}); + +describe('Recipe Image Classification: Success', () => { + it('should return 200 for success', (done) => { + chai.request("http://localhost:80") + .post('/api/recipeImageClassification') + .attach('image', './uploads/testimage.jpg') + .end((err, res) => { + expect(res).to.have.status(200); + done(); + }); + //set this timeout to 100 seconds as the python script takes a long time to run + }).timeout(100000); +}); \ No newline at end of file diff --git a/test/recipeScalingTest.js b/test/recipeScalingTest.js new file mode 100644 index 0000000..71e447c --- /dev/null +++ b/test/recipeScalingTest.js @@ -0,0 +1,98 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +chai.use(chaiHttp); + +describe("Test Recipe Scaling", () => { + describe("Recipe Scaling: Test valid recipe", () => { + it("should return 200, return the scaled quantity by ratio for recipe 261", (done) => { + const recipe_id = 261; + const serving = 3; + chai.request("http://localhost:80") + .get(`/api/recipe/scale/${recipe_id}/${serving}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.all.keys( + 'id', + 'scale_ratio', + 'desired_servings', + 'scaled_ingredients', + 'original_serving', + 'original_ingredients'); + expect(res.body.scaled_ingredients) + .to.have.all.keys( + 'id', + 'quantity', + 'measurement' + ); + let org_ingre = res.body.original_ingredients; + let scaled_ingre = res.body.scaled_ingredients; + let scale_ratio = res.body.scale_ratio; + expect(scaled_ingre.id.length).to.equal(scaled_ingre.quantity.length); + expect(scaled_ingre.id.length).to.equal(scaled_ingre.measurement.length); + + expect(scale_ratio).to.equal(res.body.desired_servings / res.body.original_serving); + scaled_ingre.quantity.forEach((scaled_qty, index) => { + expect(scaled_qty).to.equal(scale_ratio * org_ingre.quantity[index]); + }); + done(); + }); + }); + }); + + describe("Recipe Scaling: Test invalid recipe", () => { + it("should return 404 for invalid recipe", (done) => { + const recipe_id = 11111; + const serving = 3; + chai.request("http://localhost:80") + .get(`/api/recipe/scale/${recipe_id}/${serving}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Invalid recipe id, can not scale"); + done(); + }); + }); + }); + + describe("Recipe Scaling: Test valid recipe with invalid data", () => { + it("should return 404 for invalid total servings", (done) => { + const recipe_id = 267; + const serving = 3; + chai.request("http://localhost:80") + .get(`/api/recipe/scale/${recipe_id}/${serving}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Recipe contains invalid total serving, can not scale"); + done(); + }); + }); + + it("should return 404 for invalid ingredients (null or invalid id)", (done) => { + const recipe_id = 19; + const serving = 3; + chai.request("http://localhost:80") + .get(`/api/recipe/scale/${recipe_id}/${serving}`) + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Recipe contains invalid ingredients data, can not scale"); + done(); + }); + }); + }); +}) \ No newline at end of file diff --git a/test/recipetest.js b/test/recipetest.js new file mode 100644 index 0000000..c035c3d --- /dev/null +++ b/test/recipetest.js @@ -0,0 +1,140 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +const { addTestRecipe } = require("./test-helpers"); +chai.use(chaiHttp); + +before(async function () { + testRecipe = await addTestRecipe(); +}); + +describe("Recipe: Test createAndSaveRecipe - Parameters Are Missing", () => { + it("should return 400, Recipe parameters are missing", (done) => { + chai.request("http://localhost:80") + .post("/api/recipe/createRecipe") + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Recipe parameters are missed"); + done(); + }); + }); +}); + +describe("Recipe: Test createAndSaveRecipe - Successfully created recipe", () => { + it("should return 201, Successfully created recipe", (done) => { + chai.request("http://localhost:80") + .post("/api/recipe/createRecipe") + .send({ + user_id: 1, + ingredient_id: [14], //this needs to be an array + ingredient_quantity: [2], + recipe_name: "testrecipe", + cuisine_id: 5, + total_servings: 1, + preparation_time: 1, + instructions: "testinstructions", + recipe_image: "", + cooking_method_id: 1, + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(201); + expect(res.body) + .to.have.property("message") + .that.equals("success"); + done(); + }); + }); +}); + +describe("Recipe: Test getRecipes - No UserId Entered", () => { + it("should return 400, User Id is required", (done) => { + chai.request("http://localhost:80") + .post("/api/recipe") + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("User Id is required"); + done(); + }); + }); +}); + +describe("Recipe: Test getRecipes - No recipes saved to user in database", () => { + it("should return 404, Recipes not found", (done) => { + chai.request("http://localhost:80") + .post("/api/recipe") + .send({ + user_id: "1", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(404); + expect(res.body) + .to.have.property("error") + .that.equals("Recipes not found"); + done(); + }); + }); +}); + +describe("Recipe: Test getRecipes - Success", () => { + it("should return 200, Success", (done) => { + chai.request("http://localhost:80") + .post("/api/recipe") + .send({ + user_id: "15", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.property("message") + .that.equals("success"); + done(); + }); + }); +}); + +describe("Recipe: Test deleteRecipe - User Id or Recipe Id not entered", () => { + it("should return 400, User Id or Recipe Id is required", (done) => { + chai.request("http://localhost:80") + .delete("/api/recipe") + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("User Id or Recipe Id is required"); + done(); + }); + }); +}); + +describe("Recipe: Test deleteRecipe - Success", () => { + it("should return 200, Success", (done) => { + chai.request("http://localhost:80") + .delete("/api/recipe") + .send({ + user_id: "1", + recipe_id: testRecipe.id, + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body) + .to.have.property("message") + .that.equals("success"); + done(); + }); + }); +}); diff --git a/test/signuptest.js b/test/signuptest.js new file mode 100644 index 0000000..f251215 --- /dev/null +++ b/test/signuptest.js @@ -0,0 +1,85 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const deleteUser = require("../model/deleteUser"); +const getUser = require("../model/getUser"); +const { addTestUser, deleteTestUser } = require("./test-helpers"); +const { expect } = chai; +chai.use(chaiHttp); + +before(async function () { + testUser = await addTestUser(); +}); + +after(async function () { + await deleteTestUser(testUser.user_id); +}); + +describe("Signup: Test signup - No Credentials Entered", () => { + it("should return 400, Name, password, email and contact number are required", (done) => { + chai.request("http://localhost:80") + .post("/api/signup") + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Name, email, password, contact number and address are required"); + done(); + }); + }); +}); + +describe("Signup: Test signup - User Already Exists", () => { + it("should return 400, User already exists", (done) => { + chai.request("http://localhost:80") + .post("/api/signup") + .send({ + name: testUser.name, + email: testUser.email, + password: testUser.password, + contact_number: testUser.contact_number, + address: testUser.address + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("User already exists"); + done(); + }); + }); +}); + +describe("Signup: Test signup - Successful Sign Up", () => { + it("should return 201, User created successfully", (done) => { + chai.request("http://localhost:80") + .post("/api/signup") + .send({ + name: `test user success`, + email: `testuser${Math.random().toString()}@test.com`, + password: "signuptestpassword", + contact_number: "0412345678", + address: "address" + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(201); + expect(res.body) + .to.have.property("message") + .that.equals("User created successfully"); + done(); + deleteCreatedUserFromDB("signuptestuser"); //deletes user created for test purpose + }); + }); +}); + +//function to delete user after adding it to db with test +async function deleteCreatedUserFromDB(username) { + let user = await getUser(username); + if (user) { + deleteUser(user[0].user_id); //because get user returns an array, need to set index, because we are only allowing unique users this should be fine + } +} diff --git a/test/test-helpers.js b/test/test-helpers.js new file mode 100644 index 0000000..2ec07dd --- /dev/null +++ b/test/test-helpers.js @@ -0,0 +1,92 @@ +const deleteUser = require("../model/deleteUser"); +const supabase = require("../dbConnection.js"); +const bcrypt = require("bcryptjs"); + +async function addTestUser() { + let testUser = `testuser${Math.random().toString()}@test.com`; + const hashedPassword = await bcrypt.hash("testuser123", 10); + try { + let { data, error } = await supabase + .from("users") + .insert({ + name: "test user", + email: testUser, + password: hashedPassword, + mfa_enabled: false, + contact_number: "000000000", + address: "address" + }) + .select(); + + if (error) { + throw error; + } + const createdUser = data[0]; + return createdUser; + } catch (error) { + throw error; + } +} + +async function addTestUserMFA() { + let testUser = `testuser${Math.random().toString()}@test.com`; + const hashedPassword = await bcrypt.hash("testuser123", 10); + try { + let { data, error } = await supabase + .from("users") + .insert({ + name: "test user", + email: testUser, + password: hashedPassword, + mfa_enabled: true, + contact_number: "000000000", + address: "address" + }) + .select(); + + if (error) { + throw error; + } + const createdUser = data[0]; + return createdUser; + } catch (error) { + throw error; + } +} + +async function deleteTestUser(userId) { + deleteUser(userId); +} + +async function addTestRecipe() { + try { + let { data, error } = await supabase + .from("recipes") + .insert({ + recipe_name: "test recipe to delete", + user_id: "1" + }) + .select(); + + if (error) { + throw error; + } + const savedRecipe = data[0]; + return savedRecipe; + } catch (error) { + throw error; + } +}; + +async function getTestServer() { + const app = express(); + app.use(express.json()); + + const routes = require("../routes"); + routes(app); + + return app; +} + + +module.exports = { addTestUser, deleteTestUser, addTestUserMFA, addTestRecipe }; diff --git a/test/userFeedbackTests.js b/test/userFeedbackTests.js new file mode 100644 index 0000000..6311a4f --- /dev/null +++ b/test/userFeedbackTests.js @@ -0,0 +1,88 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { expect } = chai; +chai.use(chaiHttp); + +describe("UserFeedback Tests", () => { + it("should return 400, Name is Required", (done) => { + chai.request("http://localhost:80") + .post("/api/userfeedback") + .send() + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Name is required"); + done(); + }); + }); + it("should return 400, Email is Required", (done) => { + chai.request("http://localhost:80") + .post("/api/userfeedback") + .send({ + name: "test", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Email is required"); + done(); + }); + }); + it("should return 400, Experience is Required", (done) => { + chai.request("http://localhost:80") + .post("/api/userfeedback") + .send({ + name: "test", + email: "test@test.com", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Experience is required"); + done(); + }); + }); + it("should return 400, Message is Required", (done) => { + chai.request("http://localhost:80") + .post("/api/userfeedback") + .send({ + name: "test", + email: "test@test.com", + experience: "This is the best app ever", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("Message is required"); + done(); + }); + }); + + it("should return 201, Add User Feedback Successful", (done) => { + chai.request("http://localhost:80") + .post("/api/userfeedback") + .send({ + name: "test", + email: "test@test.com", + experience: "This is the best app ever", + message: "These are some good developers", + }) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(201); + expect(res.body) + .to.have.property("message") + .that.equals("Data received successfully!"); + done(); + }); + }); +}); diff --git a/test/userPreferencesTests.js b/test/userPreferencesTests.js new file mode 100644 index 0000000..f8c6b93 --- /dev/null +++ b/test/userPreferencesTests.js @@ -0,0 +1,71 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { addTestUser, deleteTestUser, getToken } = require("./test-helpers"); +const { expect } = chai; +chai.use(chaiHttp); + +describe("userPreferences Tests", () => { + let testUser; + let token; + let req; + + before(async function () { + testUser = await addTestUser(); + req = { + dietary_requirements: [1, 2, 4], + allergies: [1], + cuisines: [2, 5], + dislikes: [4], + health_conditions: [], + spice_levels: [1, 2], + cooking_methods: [1, 4, 5], + user: { + userId: testUser.user_id, + }, + }; + }); + + beforeEach(async function () { + let loginRequest = { + email: testUser.email, + password: "testuser123", + }; + const res = await chai + .request("http://localhost:80") + .post("/api/login") + .send(loginRequest); + + token = res.body.token; + }); + + after(async function () { + await deleteTestUser(testUser.user_id); + }); + + it("should return 400, Missing UserId", (done) => { + chai.request("http://localhost:80") + .post("/api/user/preferences") + .send({}) + .set("Authorization", `Bearer ${token}`) + .end((err, res) => { + expect(res).to.have.status(400); + expect(res.body) + .to.have.property("error") + .that.equals("User ID is required"); + done(); + }); + }); + + it("should return 204, Add User Feedback Successful", (done) => { + chai.request("http://localhost:80") + .post("/api/user/preferences") + .send(req) + .set("Authorization", `Bearer ${token}`) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(204); + done(); + }); + }); +}); diff --git a/test/userProfileTests.js b/test/userProfileTests.js new file mode 100644 index 0000000..1f939fc --- /dev/null +++ b/test/userProfileTests.js @@ -0,0 +1,56 @@ +require("dotenv").config(); +const chai = require("chai"); +const chaiHttp = require("chai-http"); +const { addTestUser, deleteTestUser, getToken } = require("./test-helpers"); +const { expect } = chai; +chai.use(chaiHttp); + +describe("User Profile Tests", () => { + let testUser; + + before(async function () { + testUser = await addTestUser(); + }); + after(async function () { + await deleteTestUser(testUser.user_id); + }); + it("should return 200, Update user profile Successful", (done) => { + let req = { + email: testUser.email, + first_name: "updated_name", + last_name: "updated_last_name", + contact_number: "111111111" + }; + chai.request("http://localhost:80") + .put("/api/userprofile") + .send(req) + .end((err, res) => { + if (err) return done(err); + expect(res).to.have.status(200); + expect(res.body[0]).to.have.property( + "first_name", + req.first_name + ); + expect(res.body[0]).to.have.property( + "last_name", + req.last_name + ); + expect(res.body[0]).to.have.property("email", req.email); + expect(res.body[0]).to.have.property( + "contact_number", + req.contact_number + ); + done(); + }); + }); + it("should return 400, Missing username", (done) => { + let req = {}; + chai.request("http://localhost:80") + .put("/api/userprofile") + .send(req) + .end((err, res) => { + expect(res).to.have.status(400); + done(); + }); + }); +}); diff --git a/testSupabase.js b/testSupabase.js new file mode 100644 index 0000000..be52ad3 --- /dev/null +++ b/testSupabase.js @@ -0,0 +1,26 @@ +// testSupabase.js +const { createClient } = require('@supabase/supabase-js'); +require('dotenv').config(); + +const supabaseUrl = process.env.SUPABASE_URL; +const supabaseKey = process.env.SUPABASE_ANON_KEY; +const supabase = createClient(supabaseUrl, supabaseKey); + +async function testCRUD() { + // Insert test data + let { data: testInsert, error } = await supabase + .from('ingredients') + .insert([{ name: 'Test Ingredient', calories: 100 }]); + if (error) console.error('Insert Error:', error); + else console.log('Inserted:', testInsert); + + // Query test data + let { data: testQuery, error: queryError } = await supabase + .from('ingredients') + .select('*') + .eq('name', 'Test Ingredient'); + if (queryError) console.error('Query Error:', queryError); + else console.log('Queried:', testQuery); +} + +testCRUD(); diff --git a/tools/README.md b/tools/README.md new file mode 100644 index 0000000..0562d7e --- /dev/null +++ b/tools/README.md @@ -0,0 +1,76 @@ +# Image Classification Utilities + +This directory contains utility tools for the Nutrihelp image classification API. + +## Directory Structure + +- **image_classification/** - Tools for managing the image classification model +- **feedback/** - Tools for collecting and analyzing user feedback +- **test/** - Tools for testing the image classification system +- **database/** - Tools for testing and managing database connections + +## Available Tools + +### Image Classification Tools + +- **add_keywords.js** - Adds new food keyword mappings to the classification system + ``` + node tools/image_classification/add_keywords.js + ``` + +- **fix_model.py** - Creates a model for testing based on color recognition + ``` + python tools/image_classification/fix_model.py + ``` + +### Feedback Collection Tools + +- **collect_feedback.js** - Collects user feedback on incorrect classifications + ``` + node tools/feedback/collect_feedback.js + ``` + +- **analyze_feedback.js** - Analyzes collected feedback to identify patterns + ``` + node tools/feedback/analyze_feedback.js [class_name] + ``` + +- **generate_improvements.js** - Generates code improvement suggestions based on feedback + ``` + node tools/feedback/generate_improvements.js + ``` + +### Testing Tools + +- **test_image_classification.js** - Tests the image classification on specific images + ``` + node tools/test/test_image_classification.js + ``` + +- **add_test_image.js** - Adds a test image to the uploads directory + ``` + node tools/test/add_test_image.js + ``` + +### Database Tools + +- **testSupabase.js** - Tests Supabase connection and basic CRUD operations + ``` + node tools/database/testSupabase.js + ``` + +### Utility Tools + +- **cleanup_uploads.js** - Cleans up temporary and system-generated files in the uploads directory + ``` + node tools/cleanup_uploads.js + ``` + +## Database Integration + +The feedback system uses Supabase for storing user feedback. To set up the database: + +1. Run the SQL script in `setup/create_feedback_table.sql` in your Supabase SQL editor +2. Ensure your `.env` file contains the correct Supabase connection details + +See `setup/README_FEEDBACK.md` for detailed instructions on setting up and using the feedback system. \ No newline at end of file diff --git a/tools/cleanup_uploads.js b/tools/cleanup_uploads.js new file mode 100644 index 0000000..be46488 --- /dev/null +++ b/tools/cleanup_uploads.js @@ -0,0 +1,99 @@ +/** + * Uploads Directory Cleanup Tool + * + * This script cleans up temporary and system-generated files in the uploads directory, + * preserving properly named image files and necessary system files. + * + * Usage: node tools/cleanup_uploads.js + */ + +const fs = require('fs'); +const path = require('path'); + +// Main directory +const UPLOADS_DIR = path.join(__dirname, '../uploads'); + +// Files to preserve +const KEEP_FILES = [ + 'original_filename.txt', + 'image.jpg', + 'last_prediction.txt', + '.gitkeep' +]; + +// Image file extensions +const IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.png']; + +// Main function +async function cleanupUploads() { + console.log('Starting uploads directory cleanup...'); + + // Ensure directory exists + if (!fs.existsSync(UPLOADS_DIR)) { + console.log('Uploads directory does not exist, nothing to clean up'); + return; + } + + try { + // Read all files + const files = fs.readdirSync(UPLOADS_DIR); + console.log(`Found ${files.length} files`); + + let deletedCount = 0; + let keptCount = 0; + + for (const file of files) { + // Skip directories + const filePath = path.join(UPLOADS_DIR, file); + if (fs.statSync(filePath).isDirectory()) { + console.log(`Skipping subdirectory: ${file}`); + continue; + } + + // Check if file is in the keep list + if (KEEP_FILES.includes(file)) { + console.log(`Keeping system file: ${file}`); + keptCount++; + continue; + } + + // Check if it's an image file + const extension = path.extname(file).toLowerCase(); + const isImage = IMAGE_EXTENSIONS.includes(extension); + + // Check if filename is a valid image name (not a random hash) + const isValidName = + // Valid image name features: not all hexadecimal characters + (isImage && !/^[a-f0-9]{20,}$/i.test(path.basename(file, extension))) || + // Or contains underscores, hyphens, and letters + (isImage && /[_\-a-z]/i.test(file)); + + if (isImage && isValidName) { + console.log(`Keeping image file: ${file}`); + keptCount++; + continue; + } + + // Delete unwanted files + try { + fs.unlinkSync(filePath); + console.log(`Deleted: ${file}`); + deletedCount++; + } catch (err) { + console.error(`Failed to delete file ${file}:`, err); + } + } + + console.log('\nCleanup complete:'); + console.log(`- Deleted ${deletedCount} temporary/system files`); + console.log(`- Preserved ${keptCount} valid files`); + + } catch (err) { + console.error('Error occurred during cleanup:', err); + } +} + +// Run cleanup +cleanupUploads().then(() => { + console.log('\nYou can run "node tools/cleanup_uploads.js" anytime to clean the uploads directory'); +}); \ No newline at end of file diff --git a/tools/feedback/README.md b/tools/feedback/README.md new file mode 100644 index 0000000..2321be0 --- /dev/null +++ b/tools/feedback/README.md @@ -0,0 +1,113 @@ +# Feedback-Based Optimization System + +This system automatically improves the image classification accuracy based on user feedback. It implements a semi-supervised learning approach where user corrections are collected and periodically analyzed to improve the classification logic. + +## Components + +The feedback optimization system consists of the following components: + +1. **Feedback Collection** (`collect_feedback.js`) + - Collects user feedback on image classifications + - Stores the feedback in the Supabase database + +2. **Feedback Analysis** (`analyze_feedback.js`) + - Analyzes collected feedback to identify patterns + - Provides statistics on commonly misclassified foods + +3. **Feedback Optimization** (`apply_feedback_improvements.js`) + - Automatically applies improvements based on feedback patterns + - Updates food mappings, keywords, and classification rules + +4. **Scheduled Optimization** (`scheduled_optimization.js`) + - Runs the optimization process on a schedule + - Creates backups and logs the optimization history + +## How It Works + +### 1. Collecting Feedback + +When the image classification system makes a mistake, users can provide feedback: + +``` +node tools/feedback/collect_feedback.js uploads/image.jpg "correct_class" +``` + +This feedback is stored in the Supabase database, linking the image with both the predicted class and the correct class. + +### 2. Analyzing Feedback + +The system can analyze collected feedback to identify patterns: + +``` +node tools/feedback/analyze_feedback.js +``` + +This shows statistics about which classes are frequently confused and suggests potential improvements. + +### 3. Applying Improvements + +The system can automatically apply improvements based on feedback data: + +``` +node tools/feedback/apply_feedback_improvements.js [min_count] +``` + +- `min_count`: Minimum number of occurrences to consider a pattern significant (default: 3) + +The improvements include: + +- **Mapping Updates**: Correcting food mappings in the Python classification script +- **Keyword Additions**: Adding new keywords extracted from filenames +- **Texture/Color Analysis**: Updating texture and color analysis rules + +### 4. Scheduled Optimization + +For continuous improvement, the system can run optimizations automatically: + +``` +node tools/feedback/scheduled_optimization.js +``` + +This script is designed to be run on a schedule (e.g., daily or weekly) using a task scheduler: + +- On Linux/Unix: Use cron jobs +- On Windows: Use Task Scheduler + +Example cron job (runs daily at 2 AM): +``` +0 2 * * * cd /path/to/Nutrihelp-api && node tools/feedback/scheduled_optimization.js >> logs/cron.log 2>&1 +``` + +## Configuration + +Key configuration options are available in each script: + +- `MIN_FEEDBACK_COUNT`: Minimum feedback count to trigger an update (default: 3) +- `UPDATE_KEYWORDS`: Whether to update keywords (default: true) +- `UPDATE_MAPPINGS`: Whether to update food mappings (default: true) +- `UPDATE_TEXTURES`: Whether to update texture analysis rules (default: true) +- `BACKUP_BEFORE_UPDATES`: Whether to backup Python file before updates (default: true) + +## Logs and Backups + +The system maintains logs and backups: + +- **Optimization Logs**: `logs/optimization_history.log` +- **Python File Backups**: `backups/recipeImageClassification_[timestamp].py` + +## Best Practices + +1. **Regular Feedback Collection**: Encourage users to provide feedback when misclassifications occur +2. **Periodic Manual Review**: Occasionally review the automatic optimizations +3. **Threshold Tuning**: Adjust the `MIN_FEEDBACK_COUNT` based on usage volume +4. **Backup Management**: Periodically clean up old backups to save disk space + +## Technical Implementation + +The system uses a semi-supervised learning approach: + +1. **Error Pattern Detection**: Identifying which classes are frequently confused +2. **Keyword Extraction**: Finding words in filenames that correlate with specific classes +3. **Rule-Based Improvements**: Updating classification rules based on feedback patterns + +This approach allows for continuous improvement without requiring complex model retraining. \ No newline at end of file diff --git a/tools/feedback/analyze_feedback.js b/tools/feedback/analyze_feedback.js new file mode 100644 index 0000000..a2c5c4d --- /dev/null +++ b/tools/feedback/analyze_feedback.js @@ -0,0 +1,157 @@ +/** + * Image Classification Feedback Analysis Tool + * + * Analyzes collected image classification feedback data to help improve recognition accuracy + * Usage: node tools/feedback/analyze_feedback.js [class_name] + * + * If no class name is provided, all collected feedback will be analyzed + */ + +// Ensure environment variables are loaded first +require('dotenv').config(); + +const fs = require('fs'); +const path = require('path'); +const supabase = require('../../dbConnection.js'); + +// Check Supabase credentials +if (!process.env.SUPABASE_URL || !process.env.SUPABASE_ANON_KEY) { + console.error('Error: Missing Supabase credentials in environment variables.'); + console.error('Please make sure your .env file contains SUPABASE_URL and SUPABASE_ANON_KEY'); + process.exit(1); +} + +// Configuration +const TARGET_CLASS = process.argv[2]; // Optional parameter for specific class + +console.log(`Using Supabase URL: ${process.env.SUPABASE_URL.substring(0, 15)}...`); + +// Main function to analyze feedback +async function analyzeFeedback() { + try { + console.log('Loading feedback data from database...'); + + // Query the feedback data from Supabase + let query = supabase + .from('image_classification_feedback') + .select('*') + .order('created_at', { ascending: false }); + + // Filter by target class if specified + if (TARGET_CLASS) { + query = query.eq('correct_class', TARGET_CLASS.toLowerCase()); + } + + // Execute the query + const { data: feedbackData, error } = await query; + + if (error) { + console.error('Error retrieving feedback data:', error); + + if (error.message && error.message.includes('does not exist')) { + console.error('\nTable "image_classification_feedback" does not exist in your Supabase database.'); + console.error('Please run the SQL script in setup/create_feedback_table.sql in your Supabase SQL Editor.'); + } + + process.exit(1); + } + + if (!feedbackData || feedbackData.length === 0) { + console.log('No feedback data found. Please collect feedback using tools/feedback/collect_feedback.js first.'); + process.exit(0); + } + + console.log(`Loaded ${feedbackData.length} feedback records`); + + // Generate statistics + const classCounts = {}; + const classImages = {}; + let totalFeedback = feedbackData.length; + + feedbackData.forEach(feedback => { + const className = feedback.correct_class.toLowerCase(); + + // Count + if (!classCounts[className]) { + classCounts[className] = 0; + classImages[className] = []; + } + + classCounts[className]++; + classImages[className].push(feedback.filename); + }); + + // Sort classes by count + const sortedClasses = Object.keys(classCounts).sort((a, b) => { + return classCounts[b] - classCounts[a]; + }); + + // Print analysis results + console.log('\nFeedback Analysis Results:'); + console.log('-------------------------'); + + if (TARGET_CLASS) { + if (classCounts[TARGET_CLASS.toLowerCase()]) { + console.log(`Class "${TARGET_CLASS}" feedback statistics:`); + console.log(`- Sample count: ${classCounts[TARGET_CLASS.toLowerCase()]}`); + console.log('- Sample filenames:'); + classImages[TARGET_CLASS.toLowerCase()].forEach(filename => { + console.log(` - ${filename}`); + }); + } else { + console.log(`No feedback data found for class "${TARGET_CLASS}"`); + } + } else { + console.log(`Total: ${totalFeedback} feedback entries`); + console.log('\nBy class:'); + + sortedClasses.forEach(className => { + const percentage = ((classCounts[className] / totalFeedback) * 100).toFixed(2); + console.log(`- ${className}: ${classCounts[className]} entries (${percentage}%)`); + }); + } + + // Provide improvement suggestions + console.log('\nImprovement Suggestions:'); + if (sortedClasses.length > 3) { + // Get the top three most common classes + const topClasses = sortedClasses.slice(0, 3); + console.log('1. Focus on these classes:'); + topClasses.forEach(className => { + console.log(` - ${className} (${classCounts[className]} feedback entries)`); + }); + } + + console.log('2. Methods to improve recognition accuracy:'); + console.log(' - Use tools/image_classification/add_keywords.js to add more keywords for specific classes'); + console.log(' - Modify color and texture analysis rules in recipeImageClassification.py'); + console.log(' - Consider collecting more samples, especially for classes with high error rates'); + + // Explain next steps + console.log('\nYou can test image classification with this command:'); + console.log('node tools/test/test_image_classification.js '); + + // Help with adding keywords + console.log('\nTo add more keyword mappings for classes, use the add_keywords.js script:'); + console.log('node tools/image_classification/add_keywords.js'); + + // Generate improvements script suggestion + console.log('\nGenerate improvement suggestions using collected feedback:'); + console.log('node tools/feedback/generate_improvements.js'); + } catch (error) { + console.error('Error analyzing feedback:', error); + + // More detailed error handling + if (error.message && error.message.includes('supabaseUrl is required')) { + console.error('\nSUPABASE_URL environment variable is not being loaded properly.'); + console.error('Current environment variables:'); + console.error(`SUPABASE_URL: ${process.env.SUPABASE_URL || 'not set'}`); + console.error(`SUPABASE_ANON_KEY: ${process.env.SUPABASE_ANON_KEY ? 'set (hidden)' : 'not set'}`); + } + + process.exit(1); + } +} + +// Run the analysis +analyzeFeedback(); \ No newline at end of file diff --git a/tools/feedback/apply_feedback_improvements.js b/tools/feedback/apply_feedback_improvements.js new file mode 100644 index 0000000..e688308 --- /dev/null +++ b/tools/feedback/apply_feedback_improvements.js @@ -0,0 +1,310 @@ +/** + * Automatic Feedback-Based Improvement System + * + * This script analyzes collected feedback data and automatically applies + * improvements to the food classification system based on common error patterns. + * + * Usage: node tools/feedback/apply_feedback_improvements.js [min_count] + * + * - min_count: Minimum number of occurrences to consider a pattern significant (default: 3) + */ + +require('dotenv').config(); +const fs = require('fs'); +const path = require('path'); +const { execSync } = require('child_process'); +const { createClient } = require('@supabase/supabase-js'); + +// Configuration +const MIN_FEEDBACK_COUNT = parseInt(process.argv[2]) || 3; // Minimum feedback count to trigger an update +const UPDATE_KEYWORDS = true; // Whether to update keywords +const UPDATE_MAPPINGS = true; // Whether to update food mappings +const UPDATE_TEXTURES = true; // Whether to update texture analysis rules + +// Create Supabase client +const supabase = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_ANON_KEY); + +// Check Supabase credentials +if (!process.env.SUPABASE_URL || !process.env.SUPABASE_ANON_KEY) { + console.error('Error: Missing Supabase credentials in environment variables.'); + console.error('Please make sure your .env file contains SUPABASE_URL and SUPABASE_ANON_KEY'); + process.exit(1); +} + +/** + * Analyze feedback data and extract error patterns + * @returns {Object} Analysis of error patterns and recommendations + */ +async function analyzeFeedbackData() { + try { + console.log('Loading feedback data from database...'); + + // Query all feedback data + const { data: feedbackData, error } = await supabase + .from('image_classification_feedback') + .select('*'); + + if (error) { + console.error('Error retrieving feedback data:', error); + process.exit(1); + } + + if (!feedbackData || feedbackData.length === 0) { + console.log('No feedback data found. Please collect feedback first.'); + process.exit(0); + } + + console.log(`Analyzing ${feedbackData.length} feedback records...`); + + // Identify error patterns + const errorPatterns = {}; + const correctClassCounts = {}; + const keywordSuggestions = {}; + + feedbackData.forEach(item => { + // Skip if prediction was correct + if (item.predicted_class === item.correct_class) return; + + // Record error pattern (wrong -> correct) + const patternKey = `${item.predicted_class}_to_${item.correct_class}`; + errorPatterns[patternKey] = (errorPatterns[patternKey] || 0) + 1; + + // Record correct class counts + correctClassCounts[item.correct_class] = (correctClassCounts[item.correct_class] || 0) + 1; + + // Extract potential keywords from filenames + const filename = item.filename.toLowerCase(); + const basename = path.basename(filename, path.extname(filename)); + + // Only use alphabetic parts as potential keywords (at least 3 chars) + const words = basename.split(/[^a-z]/i).filter(word => word.length >= 3); + + if (!keywordSuggestions[item.correct_class]) { + keywordSuggestions[item.correct_class] = {}; + } + + words.forEach(word => { + keywordSuggestions[item.correct_class][word] = + (keywordSuggestions[item.correct_class][word] || 0) + 1; + }); + }); + + // Filter significant error patterns + const significantPatterns = Object.entries(errorPatterns) + .filter(([_, count]) => count >= MIN_FEEDBACK_COUNT) + .sort((a, b) => b[1] - a[1]); // Sort by frequency, highest first + + // Filter significant keyword suggestions + const significantKeywords = {}; + Object.entries(keywordSuggestions).forEach(([className, keywords]) => { + significantKeywords[className] = Object.entries(keywords) + .filter(([_, count]) => count >= Math.max(2, Math.floor(MIN_FEEDBACK_COUNT / 2))) + .map(([keyword, _]) => keyword); + }); + + return { + totalFeedback: feedbackData.length, + errorPatterns: significantPatterns, + classCounts: correctClassCounts, + keywordSuggestions: significantKeywords + }; + } catch (error) { + console.error('Error analyzing feedback data:', error); + process.exit(1); + } +} + +/** + * Apply food mapping updates based on analysis + * @param {Array} errorPatterns Significant error patterns + */ +function applyMappingUpdates(errorPatterns) { + if (!UPDATE_MAPPINGS) return; + + console.log('\nApplying food mapping updates...'); + + errorPatterns.forEach(([pattern, count]) => { + const [wrong, correct] = pattern.split('_to_'); + + console.log(`Updating mapping: ${wrong} → ${correct} (${count} occurrences)`); + + try { + // Execute the update_food_mapping.js script + const command = `node tools/image_classification/update_food_mapping.js ${correct} ${correct}`; + console.log(`Running: ${command}`); + + const output = execSync(command, { encoding: 'utf8' }); + console.log(output); + } catch (error) { + console.error(`Error updating mapping for ${correct}:`, error.message); + } + }); +} + +/** + * Apply keyword updates based on analysis + * @param {Object} keywordSuggestions Keyword suggestions for each class + */ +function applyKeywordUpdates(keywordSuggestions) { + if (!UPDATE_KEYWORDS) return; + + console.log('\nApplying keyword updates...'); + + // Create a new keywords object + const newKeywords = {}; + + // Populate with suggested keywords + Object.entries(keywordSuggestions).forEach(([className, keywords]) => { + keywords.forEach(keyword => { + if (keyword !== className && !keyword.includes(className)) { + newKeywords[keyword] = className; + } + }); + }); + + if (Object.keys(newKeywords).length === 0) { + console.log('No new keywords to add.'); + return; + } + + console.log(`Adding ${Object.keys(newKeywords).length} new keywords:`); + Object.entries(newKeywords).forEach(([keyword, className]) => { + console.log(`- "${keyword}" → "${className}"`); + }); + + // Path to Python classification file + const pythonFile = path.join(__dirname, '../../model/recipeImageClassification.py'); + + try { + // Read Python file + const content = fs.readFileSync(pythonFile, 'utf8'); + + // Find DISH_OVERRIDES dictionary + const dictRegex = /DISH_OVERRIDES = \{[^}]*\}/s; + const dictMatch = content.match(dictRegex); + + if (!dictMatch) { + console.error('Could not find DISH_OVERRIDES dictionary in Python file'); + return; + } + + // Extract current dictionary content + let dictContent = dictMatch[0]; + + // Add new keywords at the end of the dictionary + const insertPoint = dictContent.lastIndexOf('}'); + let newDictContent = dictContent.substring(0, insertPoint); + + // Check if keywords already exist + let addedCount = 0; + + for (const [keyword, className] of Object.entries(newKeywords)) { + if (!content.includes(`"${keyword}": `)) { + newDictContent += ` "${keyword}": "${className}",\n`; + addedCount++; + } + } + + // Close dictionary + newDictContent += '}'; + + // Only update if new keywords were added + if (addedCount > 0) { + // Replace original dictionary in file + const newContent = content.replace(dictRegex, newDictContent); + + // Write back to file + fs.writeFileSync(pythonFile, newContent); + console.log(`Successfully added ${addedCount} new keyword mappings!`); + } else { + console.log('No new keywords were added (all already exist).'); + } + } catch (error) { + console.error('Error updating keywords:', error); + } +} + +/** + * Apply texture/color analysis updates based on analysis + * @param {Array} errorPatterns Significant error patterns + */ +function applyTextureUpdates(errorPatterns) { + if (!UPDATE_TEXTURES) return; + + console.log('\nApplying texture/color analysis updates...'); + + // Path to Python classification file + const pythonFile = path.join(__dirname, '../../model/recipeImageClassification.py'); + + try { + // Read Python file + const content = fs.readFileSync(pythonFile, 'utf8'); + + let updatedContent = content; + let updateCount = 0; + + // Look for error patterns that could be texture/color related + errorPatterns.forEach(([pattern, count]) => { + const [_, correctClass] = pattern.split('_to_'); + + // Look for white+complex texture classification section + if (correctClass === 'sushi') { + const textureSection = /# Add white\+complex texture classification[\s\S]*?prediction = '[^']+'/; + const textureMatch = content.match(textureSection); + + if (textureMatch) { + const updatedSection = textureMatch[0].replace( + /prediction = '[^']+'/, + `prediction = 'sushi'` + ); + + updatedContent = updatedContent.replace(textureMatch[0], updatedSection); + updateCount++; + } + } + + // Update color_to_food or food_categories as needed for other classes + // This would need to be customized based on the specific needs + }); + + // Only update if changes were made + if (updateCount > 0) { + fs.writeFileSync(pythonFile, updatedContent); + console.log(`Updated ${updateCount} texture/color analysis rules.`); + } else { + console.log('No texture/color analysis rules needed updating.'); + } + } catch (error) { + console.error('Error updating texture/color analysis:', error); + } +} + +/** + * Main function to orchestrate the optimization process + */ +async function optimizeFromFeedback() { + console.log('Starting feedback-based optimization...'); + console.log(`Minimum occurrence threshold: ${MIN_FEEDBACK_COUNT}`); + + const analysis = await analyzeFeedbackData(); + + console.log(`\nFound ${analysis.totalFeedback} feedback entries`); + console.log(`Identified ${analysis.errorPatterns.length} significant error patterns:`); + + analysis.errorPatterns.forEach(([pattern, count]) => { + const [wrong, correct] = pattern.split('_to_'); + console.log(`- ${wrong} → ${correct}: ${count} occurrences`); + }); + + // Apply updates based on analysis + applyMappingUpdates(analysis.errorPatterns); + applyKeywordUpdates(analysis.keywordSuggestions); + applyTextureUpdates(analysis.errorPatterns); + + console.log('\nOptimization complete! The system has been updated based on user feedback.'); + console.log('Run a test to see the improvements:'); + console.log('node tools/test/test_image_classification.js uploads/your_test_image.jpg'); +} + +// Run the optimization +optimizeFromFeedback(); \ No newline at end of file diff --git a/tools/feedback/collect_feedback.js b/tools/feedback/collect_feedback.js new file mode 100644 index 0000000..882978a --- /dev/null +++ b/tools/feedback/collect_feedback.js @@ -0,0 +1,103 @@ +/** + * Image Classification Feedback Collection Tool + * + * This tool collects user feedback on image classification results to improve accuracy + * Usage: node tools/feedback/collect_feedback.js + * + * Example: node tools/feedback/collect_feedback.js ./uploads/sushi.jpg "sushi" + */ + +// Ensure environment variables are loaded first +require('dotenv').config(); + +const fs = require('fs'); +const path = require('path'); +const addImageClassificationFeedback = require('../../model/addImageClassificationFeedback'); + +// Check Supabase credentials +if (!process.env.SUPABASE_URL || !process.env.SUPABASE_ANON_KEY) { + console.error('Error: Missing Supabase credentials in environment variables.'); + console.error('Please make sure your .env file contains SUPABASE_URL and SUPABASE_ANON_KEY'); + process.exit(1); +} + +// Configuration +const IMAGE_PATH = process.argv[2]; +const CORRECT_CLASS = process.argv[3]; + +// Show help +if (!IMAGE_PATH || !CORRECT_CLASS) { + console.log('Usage: node tools/feedback/collect_feedback.js '); + console.log('Example: node tools/feedback/collect_feedback.js ./uploads/sushi.jpg "sushi"'); + process.exit(1); +} + +// Check if image exists +if (!fs.existsSync(IMAGE_PATH)) { + console.error(`Error: Image does not exist: ${IMAGE_PATH}`); + process.exit(1); +} + +// Get predicted class from file if it exists +let predictedClass = 'unknown'; +const predictionFile = path.join(path.dirname(IMAGE_PATH), 'last_prediction.txt'); + +if (fs.existsSync(predictionFile)) { + try { + predictedClass = fs.readFileSync(predictionFile, 'utf8').trim(); + } catch (err) { + console.error('Failed to read prediction file:', err); + } +} + +// Collect metadata for analysis +const metadata = { + timestamp: Date.now(), + filename: path.basename(IMAGE_PATH), + filesize: fs.statSync(IMAGE_PATH).size, + source: 'feedback_tool' +}; + +// Send feedback to Supabase +(async () => { + try { + console.log('Submitting feedback to database...'); + console.log(`Using Supabase URL: ${process.env.SUPABASE_URL.substring(0, 15)}...`); + + // User ID is null here as this is a command-line tool + // In a web application, you would include the actual user ID + const result = await addImageClassificationFeedback( + null, + IMAGE_PATH, + predictedClass, + CORRECT_CLASS, + metadata + ); + + console.log('Feedback submitted successfully!'); + console.log(`Image: ${path.basename(IMAGE_PATH)}`); + console.log(`Predicted as: ${predictedClass}`); + console.log(`Corrected to: ${CORRECT_CLASS}`); + + // Explain next steps + console.log('\nYour feedback will help improve the recognition accuracy'); + console.log('\nYou can analyze collected feedback using:'); + console.log('1. Check all collected feedback: node tools/feedback/analyze_feedback.js'); + console.log('2. Analyze feedback for specific class: node tools/feedback/analyze_feedback.js '); + console.log(' Example: node tools/feedback/analyze_feedback.js sushi'); + } catch (error) { + console.error('Failed to submit feedback:', error); + + // More detailed error handling + if (error.message && error.message.includes('supabaseUrl is required')) { + console.error('\nSUPABASE_URL environment variable is not being loaded properly.'); + console.error('Current environment variables:'); + console.error(`SUPABASE_URL: ${process.env.SUPABASE_URL || 'not set'}`); + console.error(`SUPABASE_ANON_KEY: ${process.env.SUPABASE_ANON_KEY ? 'set (hidden)' : 'not set'}`); + } else if (error.message && error.message.includes('auth/invalid_credentials')) { + console.error('\nInvalid Supabase credentials. Please check your SUPABASE_URL and SUPABASE_ANON_KEY.'); + } + + process.exit(1); + } +})(); \ No newline at end of file diff --git a/tools/feedback/display_feedback.js b/tools/feedback/display_feedback.js new file mode 100644 index 0000000..eb6e203 --- /dev/null +++ b/tools/feedback/display_feedback.js @@ -0,0 +1,86 @@ +/** + * Display Image Classification Feedback + * + * This script displays all feedback data from the Supabase database + * It's a simpler version of analyze_feedback.js that avoids permission issues + */ + +// Ensure environment variables are loaded first +require('dotenv').config(); + +const { createClient } = require('@supabase/supabase-js'); + +// Check Supabase credentials +if (!process.env.SUPABASE_URL || !process.env.SUPABASE_ANON_KEY) { + console.error('Error: Missing Supabase credentials in environment variables.'); + console.error('Please make sure your .env file contains SUPABASE_URL and SUPABASE_ANON_KEY'); + process.exit(1); +} + +// Create a direct Supabase client to avoid any potential configuration issues +const supabase = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_ANON_KEY); + +console.log(`Using Supabase URL: ${process.env.SUPABASE_URL.substring(0, 15)}...`); + +// Display all feedback data +async function displayFeedback() { + try { + console.log('Loading feedback data from database...'); + + // Query the feedback data from Supabase using a direct query + // that doesn't rely on user permissions at all + const { data: feedbackData, error } = await supabase + .from('image_classification_feedback') + .select('id, filename, predicted_class, correct_class, created_at') + .order('created_at', { ascending: false }); + + if (error) { + console.error('Error retrieving feedback data:', error); + + if (error.message && error.message.includes('does not exist')) { + console.error('\nTable "image_classification_feedback" does not exist in your Supabase database.'); + console.error('Please run the SQL script in setup/create_feedback_table.sql in your Supabase SQL Editor.'); + } else if (error.message && error.message.includes('permission denied')) { + console.error('\nPermission denied when accessing the database.'); + console.error('This might be due to Row Level Security (RLS) policies in Supabase.'); + console.error('You can try:'); + console.error('1. Checking your RLS policies in the Supabase dashboard'); + console.error('2. Making sure you\'re using the correct credentials'); + console.error('3. Creating a simplified view with public access for read operations'); + } + + process.exit(1); + } + + if (!feedbackData || feedbackData.length === 0) { + console.log('No feedback data found. Please collect feedback using tools/feedback/collect_feedback.js first.'); + process.exit(0); + } + + console.log(`\nFound ${feedbackData.length} feedback records:`); + console.log('----------------------------------------------------------------'); + console.log('ID | Filename | Predicted | Corrected | Created At'); + console.log('----------------------------------------------------------------'); + + feedbackData.forEach(item => { + // Format the data for display + const id = item.id.substring(0, 18) + '...'; + const filename = (item.filename || '').padEnd(12).substring(0, 12); + const predicted = (item.predicted_class || '').padEnd(12).substring(0, 12); + const corrected = (item.correct_class || '').padEnd(12).substring(0, 12); + const createdAt = new Date(item.created_at).toLocaleString(); + + console.log(`${id} | ${filename} | ${predicted} | ${corrected} | ${createdAt}`); + }); + + console.log('\nTo provide feedback for a specific image:'); + console.log('node tools/feedback/collect_feedback.js '); + + } catch (error) { + console.error('Error:', error); + process.exit(1); + } +} + +// Run the script +displayFeedback(); \ No newline at end of file diff --git a/tools/feedback/generate_improvements.js b/tools/feedback/generate_improvements.js new file mode 100644 index 0000000..1059246 --- /dev/null +++ b/tools/feedback/generate_improvements.js @@ -0,0 +1,325 @@ +/** + * Generate Improvement Suggestions Based on Feedback Data + * + * Analyzes collected feedback data and generates specific code improvement suggestions + */ + +const fs = require('fs'); +const path = require('path'); + +// Configuration +const FEEDBACK_DIR = path.join(__dirname, '../../feedback_data'); +const FEEDBACK_FILE = path.join(FEEDBACK_DIR, 'feedback.json'); +const PYTHON_FILE = '../../model/recipeImageClassification.py'; + +// Check if feedback data exists +if (!fs.existsSync(FEEDBACK_FILE)) { + console.log('No feedback data found. Please collect feedback using collect_feedback.js first.'); + process.exit(0); +} + +// Check if Python script exists +if (!fs.existsSync(PYTHON_FILE)) { + console.log(`Python script not found: ${PYTHON_FILE}`); + process.exit(1); +} + +// Load feedback data +let feedbackData = []; +try { + const data = fs.readFileSync(FEEDBACK_FILE, 'utf8'); + feedbackData = JSON.parse(data); + console.log(`Loaded ${feedbackData.length} feedback records`); +} catch (err) { + console.error('Failed to read feedback data:', err); + process.exit(1); +} + +if (feedbackData.length === 0) { + console.log('Feedback data is empty. Please collect feedback using collect_feedback.js first.'); + process.exit(0); +} + +// Load Python script content +let pythonContent = ''; +try { + pythonContent = fs.readFileSync(PYTHON_FILE, 'utf8'); + console.log('Loaded Python script'); +} catch (err) { + console.error('Failed to read Python script:', err); + process.exit(1); +} + +// Analyze feedback data, find most common classes +const classCounts = {}; +feedbackData.forEach(feedback => { + const className = feedback.correct_class.toLowerCase(); + + if (!classCounts[className]) { + classCounts[className] = 0; + } + + classCounts[className]++; +}); + +// Sort classes by count +const sortedClasses = Object.keys(classCounts).sort((a, b) => { + return classCounts[b] - classCounts[a]; +}); + +// Generate improvement suggestions +console.log('\nImprovement Suggestions Based on Feedback Data:'); +console.log('===============================\n'); + +// 1. Keyword matching suggestions +console.log('1. Keyword Matching Suggestions:'); +console.log('------------------'); + +// Check if there are keywords that need to be added to DISH_OVERRIDES +const suggestedKeywords = {}; +sortedClasses.forEach(className => { + // Generate possible keywords for each class + const keywords = generateKeywordsForClass(className); + + keywords.forEach(keyword => { + // Check if keyword already exists in Python script + if (!pythonContent.includes(`"${keyword}": `)) { + // Determine which existing class this should map to + const mappedClass = mapToExistingClass(className); + suggestedKeywords[keyword] = mappedClass; + } + }); +}); + +if (Object.keys(suggestedKeywords).length > 0) { + console.log('Recommended keyword mappings to add:'); + + let code = 'const newKeywords = {\n'; + for (const [keyword, mappedClass] of Object.entries(suggestedKeywords)) { + code += ` "${keyword}": "${mappedClass}", // Corresponding class: ${getOriginalClass(keyword)}\n`; + } + code += '};\n'; + + console.log(code); + console.log('You can add this code to tools/image_classification/add_keywords.js to use it.'); +} else { + console.log('No new keywords found that need to be added.'); +} + +// 2. Custom class suggestions +console.log('\n2. Custom Class Suggestions:'); +console.log('------------------'); + +const customClasses = []; +sortedClasses.forEach(className => { + // Check if it's a custom class (not in original model) + if (!isInOriginalModel(className, pythonContent)) { + customClasses.push(className); + } +}); + +if (customClasses.length > 0) { + console.log('The following classes are not in the original model, consider adding to custom_food_types:'); + + let code = '// In the Python script, find the custom_food_types dictionary and add the following:\n'; + code += 'custom_food_types = {\n'; + customClasses.forEach(className => { + const mappedClass = mapToExistingClass(className); + code += ` '${className}': '${mappedClass}', // Map ${className} to ${mappedClass}\n`; + }); + code += ' // Keep existing entries\n'; + code += '}\n'; + + console.log(code); +} + +// 3. Color and texture analysis suggestions +console.log('\n3. Color and Texture Analysis Suggestions:'); +console.log('------------------------'); + +// Check if there are special food types that need specific color and texture rules +const specialClasses = customClasses.filter(cls => classCounts[cls] >= 3); + +if (specialClasses.length > 0) { + console.log('The following classes appear frequently, recommend adding specific color and texture rules:'); + + specialClasses.forEach(className => { + const { color, texture } = suggestColorAndTexture(className); + console.log(`\nAdd specific rules for "${className}":`); + + let code = '# In the predict_class function, find the "Combine color and texture" section, add the following condition:\n'; + code += `elif dominant_color == '${color}' and texture_type == '${texture}':\n`; + code += ` # Possible ${className}\n`; + const mappedClass = mapToExistingClass(className); + code += ` prediction = '${mappedClass}'\n`; + code += ` debug_log(f"${color} + ${texture} texture detected: possible ${className}, classified as {prediction}")\n`; + + console.log(code); + }); +} + +// 4. Filename detection suggestions +console.log('\n4. Add filename detection for these custom classes:'); +console.log('------------------'); + +if (customClasses.length > 0) { + console.log('Add filename detection for these custom classes:'); + + let code = '# In the predict_class function, find the special handling section, add the following code:\n'; + customClasses.forEach(className => { + code += `\n# Special handling for ${className} category\n`; + code += `if "${className}" in file_name.lower():\n`; + code += ` debug_log(f"Detected ${className} in filename: {file_name}")\n`; + const mappedClass = mapToExistingClass(className); + code += ` return "${mappedClass}" # Return best match for ${className}\n`; + }); + + console.log(code); +} + +// 5. Summary suggestions +console.log('\n5. Summary Suggestions:'); +console.log('--------------'); +console.log('Based on feedback data, we recommend the following actions to improve recognition accuracy:'); +console.log('1. Add more keyword mappings, especially for common custom classes'); +console.log('2. For high-frequency classes, add specialized color and texture analysis rules'); +console.log('3. Enhance filename detection, especially for commonly confused classes'); +console.log('4. Continue collecting more feedback data, especially for classes with high error rates'); + +if (sortedClasses.length > 0) { + console.log('\nClasses to focus on:'); + const topClasses = sortedClasses.slice(0, Math.min(3, sortedClasses.length)); + topClasses.forEach(className => { + console.log(`- ${className} (${classCounts[className]} feedback entries)`); + }); +} + +// Helper functions + +// Generate possible keywords for a class +function generateKeywordsForClass(className) { + const keywords = [className]; + + // Add variants + if (className.length > 3) { + // Add truncated variant + keywords.push(className.substring(0, Math.ceil(className.length * 0.7))); + } + + // Add common variants for specific classes + if (className === 'sushi') { + keywords.push('sushi_variant1', 'sushi_variant2', 'sushi_variant3', 'sushi_variant4'); + } else if (className === 'pizza') { + keywords.push('pizza_alt', 'flatbread', 'pie'); + } else if (className === 'curry') { + keywords.push('curry_alt', 'spicy_sauce'); + } else if (className === 'noodle' || className === 'noodles') { + keywords.push('pasta', 'ramen', 'udon'); + } else if (className === 'rice') { + keywords.push('grain', 'rice_bowl'); + } + + return keywords; +} + +// Map to existing class +function mapToExistingClass(className) { + // Map common classes + const mappings = { + 'sushi': 'mussels', + 'pizza': 'pizza', + 'curry': 'chicken_curry', + 'noodle': 'ramen', + 'noodles': 'ramen', + 'rice': 'fried_rice', + 'hamburger': 'hamburger', + 'pasta': 'spaghetti_bolognese', + 'steak': 'steak', + 'salad': 'greek_salad', + 'soup': 'miso_soup', + 'cake': 'chocolate_cake', + 'ice_cream': 'ice_cream', + 'bread': 'garlic_bread' + }; + + if (mappings[className]) { + return mappings[className]; + } + + // No direct mapping, choose appropriate class + if (className.includes('roll') || className.includes('sushi')) { + return 'mussels'; + } else if (className.includes('noodle')) { + return 'ramen'; + } else if (className.includes('rice')) { + return 'fried_rice'; + } else if (className.includes('salad')) { + return 'greek_salad'; + } else if (className.includes('soup')) { + return 'miso_soup'; + } else if (className.includes('cake') || className.includes('dessert')) { + return 'chocolate_cake'; + } else if (className.includes('meat') || className.includes('beef')) { + return 'steak'; + } else if (className.includes('chicken')) { + return 'chicken_wings'; + } else if (className.includes('fish') || className.includes('seafood')) { + return 'mussels'; + } + + // Default to common class + return 'edamame'; +} + +// Get original class for keyword +function getOriginalClass(keyword) { + // Special cases + if (keyword.includes('sushi_variant1') || keyword.includes('sushi_variant2') || keyword.includes('sushi_variant3')) { + return 'sushi'; + } else if (keyword.includes('pizza_alt') || keyword.includes('flatbread')) { + return 'pizza'; + } else if (keyword.includes('curry_alt')) { + return 'curry'; + } else if (keyword.includes('pasta') || keyword.includes('ramen')) { + return 'noodles'; + } else if (keyword.includes('grain') || keyword.includes('rice_bowl')) { + return 'rice'; + } + + // Default return keyword itself + return keyword; +} + +// Check if class is in original model +function isInOriginalModel(className, pythonContent) { + // Check if className is in class_mapping values + const regex = new RegExp(`'${className}'`, 'i'); + return regex.test(pythonContent); +} + +// Suggest color and texture for class +function suggestColorAndTexture(className) { + // Specific class suggestions + const suggestions = { + 'sushi': { color: 'white', texture: 'complex' }, + 'pizza': { color: 'red', texture: 'complex' }, + 'curry': { color: 'orange', texture: 'medium' }, + 'noodle': { color: 'beige', texture: 'medium' }, + 'noodles': { color: 'beige', texture: 'medium' }, + 'rice': { color: 'white', texture: 'medium' }, + 'hamburger': { color: 'brown', texture: 'complex' }, + 'pasta': { color: 'beige', texture: 'medium' }, + 'steak': { color: 'red', texture: 'medium' }, + 'salad': { color: 'green', texture: 'complex' }, + 'soup': { color: 'dark', texture: 'smooth' }, + 'cake': { color: 'brown', texture: 'regular' }, + 'ice_cream': { color: 'white', texture: 'smooth' } + }; + + if (suggestions[className]) { + return suggestions[className]; + } + + // Default suggestion + return { color: 'beige', texture: 'medium' }; +} \ No newline at end of file diff --git a/tools/feedback/scheduled_optimization.js b/tools/feedback/scheduled_optimization.js new file mode 100644 index 0000000..d2b5ab2 --- /dev/null +++ b/tools/feedback/scheduled_optimization.js @@ -0,0 +1,149 @@ +/** + * Scheduled Feedback-Based Optimization + * + * This script is designed to be run on a schedule (e.g., daily or weekly) + * to automatically apply optimizations to the image classification system + * based on user feedback data. + * + * Usage: node tools/feedback/scheduled_optimization.js + */ + +const { execSync } = require('child_process'); +const fs = require('fs'); +const path = require('path'); + +// Configuration +const LOG_FILE = path.join(__dirname, '../../logs/optimization_history.log'); +const MIN_FEEDBACK_THRESHOLD = 3; // Minimum feedback count to trigger optimizations +const BACKUP_BEFORE_UPDATES = true; // Whether to backup Python file before updates + +// Ensure log directory exists +const logDir = path.dirname(LOG_FILE); +if (!fs.existsSync(logDir)) { + fs.mkdirSync(logDir, { recursive: true }); +} + +/** + * Log message to console and log file + * @param {string} message - Message to log + */ +function logMessage(message) { + const timestamp = new Date().toISOString(); + const logEntry = `[${timestamp}] ${message}`; + + console.log(logEntry); + + // Append to log file + fs.appendFileSync(LOG_FILE, logEntry + '\n'); +} + +/** + * Create backup of Python classification file + */ +function backupClassificationFile() { + if (!BACKUP_BEFORE_UPDATES) return; + + const pythonFile = path.join(__dirname, '../../model/recipeImageClassification.py'); + const backupDir = path.join(__dirname, '../../backups'); + + // Ensure backup directory exists + if (!fs.existsSync(backupDir)) { + fs.mkdirSync(backupDir, { recursive: true }); + } + + // Create backup with timestamp + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const backupFile = path.join(backupDir, `recipeImageClassification_${timestamp}.py`); + + try { + fs.copyFileSync(pythonFile, backupFile); + logMessage(`Created backup: ${backupFile}`); + return true; + } catch (error) { + logMessage(`Error creating backup: ${error.message}`); + return false; + } +} + +/** + * Run the feedback-based optimization script + */ +function runOptimization() { + try { + logMessage('Starting scheduled optimization...'); + + // Backup classification file + backupClassificationFile(); + + // Run feedback analysis first to see if optimization is needed + logMessage('Running feedback analysis...'); + const analyzeCommand = 'node tools/feedback/analyze_feedback.js'; + + try { + const analysisOutput = execSync(analyzeCommand, { encoding: 'utf8' }); + + // Log abbreviated analysis output + const analysisLines = analysisOutput.split('\n').slice(0, 20); + if (analysisOutput.split('\n').length > 20) { + analysisLines.push('...'); + } + logMessage('Analysis output:\n' + analysisLines.join('\n')); + + // Check if we have enough feedback data to proceed + if (analysisOutput.includes('No feedback data found')) { + logMessage('Insufficient feedback data. Optimization skipped.'); + return; + } + } catch (error) { + logMessage(`Error running analysis: ${error.message}`); + return; + } + + // Run the optimization with the threshold + logMessage(`Running optimization with threshold ${MIN_FEEDBACK_THRESHOLD}...`); + const optimizeCommand = `node tools/feedback/apply_feedback_improvements.js ${MIN_FEEDBACK_THRESHOLD}`; + + try { + const optimizationOutput = execSync(optimizeCommand, { encoding: 'utf8' }); + + // Log abbreviated optimization output + const outputLines = optimizationOutput.split('\n').slice(0, 30); + if (optimizationOutput.split('\n').length > 30) { + outputLines.push('...'); + } + logMessage('Optimization output:\n' + outputLines.join('\n')); + + // Check if any improvements were made + if (optimizationOutput.includes('Optimization complete')) { + // Run a test after optimization + logMessage('Running test to verify optimization...'); + const testImages = fs.readdirSync(path.join(__dirname, '../../uploads')) + .filter(file => /\.(jpg|jpeg|png)$/i.test(file)); + + if (testImages.length > 0) { + // Test with a random image + const testImage = testImages[Math.floor(Math.random() * testImages.length)]; + const testCommand = `node tools/test/test_image_classification.js uploads/${testImage}`; + + try { + const testOutput = execSync(testCommand, { encoding: 'utf8' }); + logMessage(`Test result for ${testImage}:\n${testOutput.split('\n').slice(-5).join('\n')}`); + } catch (error) { + logMessage(`Error running test: ${error.message}`); + } + } + } else { + logMessage('No improvements were made.'); + } + } catch (error) { + logMessage(`Error running optimization: ${error.message}`); + } + + logMessage('Scheduled optimization completed.'); + } catch (error) { + logMessage(`Unexpected error: ${error.message}`); + } +} + +// Run the main function +runOptimization(); \ No newline at end of file diff --git a/tools/image_classification/add_class.js b/tools/image_classification/add_class.js new file mode 100644 index 0000000..c3be135 --- /dev/null +++ b/tools/image_classification/add_class.js @@ -0,0 +1,185 @@ +/** + * Add Class to Classification Model + * + * Adds a new class to the class_mapping in recipeImageClassification.py + * Usage: node tools/image_classification/add_class.js + * + * Example: node tools/image_classification/add_class.js sushi + */ + +const fs = require('fs'); +const path = require('path'); + +// Arguments +const CLASS_NAME = process.argv[2]; // Class name to add + +// Show help if no class name provided +if (!CLASS_NAME) { + console.log('Usage: node tools/image_classification/add_class.js '); + console.log('Example: node tools/image_classification/add_class.js sushi'); + process.exit(1); +} + +// Path to Python classification file +const PYTHON_FILE = path.join(__dirname, '../../model/recipeImageClassification.py'); + +// Check if file exists +if (!fs.existsSync(PYTHON_FILE)) { + console.error(`Error: Python file not found: ${PYTHON_FILE}`); + process.exit(1); +} + +// Read the Python file +try { + console.log(`Reading Python file: ${PYTHON_FILE}`); + let content = fs.readFileSync(PYTHON_FILE, 'utf8'); + + // Find class_mapping dictionary + const classMappingRegex = /class_mapping = \{[^}]*\}/s; + const classMappingMatch = content.match(classMappingRegex); + + if (!classMappingMatch) { + console.error('Could not find class_mapping dictionary in Python file'); + process.exit(1); + } + + const classMappingDict = classMappingMatch[0]; + + // Check if the class already exists in the mapping + const classRegex = new RegExp(`['"]\\d+['"]:\\s*['"]${CLASS_NAME}['"]`, 'i'); + + if (classMappingDict.match(classRegex)) { + console.log(`Class '${CLASS_NAME}' already exists in class_mapping`); + process.exit(0); + } + + // Find the highest class index + const indexRegex = /(\d+):/g; + let match; + let highestIndex = -1; + + while ((match = indexRegex.exec(classMappingDict)) !== null) { + const index = parseInt(match[1], 10); + if (index > highestIndex) { + highestIndex = index; + } + } + + const newIndex = highestIndex + 1; + console.log(`Adding new class '${CLASS_NAME}' with index ${newIndex}`); + + // Add the new class to the mapping + const insertPoint = classMappingDict.lastIndexOf('}'); + const newClassMappingDict = + classMappingDict.substring(0, insertPoint) + + ` ${newIndex}: '${CLASS_NAME}'\n` + + classMappingDict.substring(insertPoint); + + // Replace the dictionary in the file + const newContent = content.replace(classMappingDict, newClassMappingDict); + + // Now, check if we need to add the class to food_categories + const updateFoodCategories = () => { + // Common food category mappings + const categoryMappings = { + 'sushi': 'japanese', + 'ramen': 'japanese', + 'pizza': 'italian', + 'pasta': 'italian', + 'burger': 'american', + 'hamburger': 'american', + 'salad': 'salad', + 'curry': 'indian', + 'rice': 'asian', + 'cake': 'dessert', + 'ice_cream': 'dessert' + }; + + let category = 'other'; + + // Determine appropriate category + for (const [key, value] of Object.entries(categoryMappings)) { + if (CLASS_NAME.includes(key)) { + category = value; + break; + } + } + + // Find food_categories dictionary + const foodCategoriesRegex = /food_categories = \{[^}]*\}/s; + const foodCategoriesMatch = newContent.match(foodCategoriesRegex); + + if (!foodCategoriesMatch) { + console.log('Could not find food_categories dictionary'); + return newContent; + } + + const foodCategoriesDict = foodCategoriesMatch[0]; + + // Check if the category exists + const categoryRegex = new RegExp(`['"]${category}['"]:\\s*\\[[^\\]]*\\]`); + const categoryMatch = foodCategoriesDict.match(categoryRegex); + + if (!categoryMatch) { + console.log(`Category '${category}' not found in food_categories`); + return newContent; + } + + // Check if the class is already in the category list + const classInCategoryRegex = new RegExp(`['"]${CLASS_NAME}['"]`); + if (categoryMatch[0].match(classInCategoryRegex)) { + console.log(`Class '${CLASS_NAME}' already exists in category '${category}'`); + return newContent; + } + + // Add the class to the category list + const categoryList = categoryMatch[0]; + const listEndIndex = categoryList.lastIndexOf(']'); + + let newCategoryList; + if (categoryList.substring(0, listEndIndex).trim().endsWith(',')) { + // List already has a trailing comma + newCategoryList = + categoryList.substring(0, listEndIndex) + + ` '${CLASS_NAME}'` + + categoryList.substring(listEndIndex); + } else { + // No trailing comma, need to add one + const listStartIndex = categoryList.indexOf('[') + 1; + if (listStartIndex === listEndIndex) { + // Empty list + newCategoryList = + categoryList.substring(0, listStartIndex) + + `'${CLASS_NAME}'` + + categoryList.substring(listEndIndex); + } else { + // Non-empty list, add with comma + newCategoryList = + categoryList.substring(0, listEndIndex) + + `, '${CLASS_NAME}'` + + categoryList.substring(listEndIndex); + } + } + + console.log(`Adding '${CLASS_NAME}' to '${category}' category`); + return newContent.replace(categoryList, newCategoryList); + }; + + // Update food_categories + const finalContent = updateFoodCategories(); + + // Write back to file + fs.writeFileSync(PYTHON_FILE, finalContent); + console.log(`\nSuccessfully added '${CLASS_NAME}' to class_mapping!`); + + // Next steps + console.log('\nNext steps:'); + console.log('1. Test the classification:'); + console.log(` node tools/test/test_image_classification.js ./uploads/${CLASS_NAME}.jpg`); + console.log('2. Update keyword mappings:'); + console.log(` node tools/image_classification/update_food_mapping.js ${CLASS_NAME} ${CLASS_NAME}`); + +} catch (err) { + console.error('Error adding class:', err); + process.exit(1); +} \ No newline at end of file diff --git a/tools/image_classification/add_keywords.js b/tools/image_classification/add_keywords.js new file mode 100644 index 0000000..e9552c3 --- /dev/null +++ b/tools/image_classification/add_keywords.js @@ -0,0 +1,120 @@ +/** + * Add Keywords Matching Tool + * + * Adds new keyword mappings to recipeImageClassification.py file + * Usage: node tools/image_classification/add_keywords.js + */ + +const fs = require('fs'); +const path = require('path'); + +// Keywords to add, format: "keyword": "match_result" +// Adding more keyword mappings for sushi and other common Asian foods +const newKeywords = { + // Sushi related + "sushi": "sushi", // Now mapping to proper sushi class + "sushi_jp": "sushi", // Japanese writing placeholder + "sushi_trad": "sushi", // Traditional Chinese placeholder + "sushi_hiragana": "sushi", // Japanese hiragana placeholder + "sushi_katakana": "sushi", // Japanese katakana placeholder + "sushi_alt": "sushi", // Alternative Japanese writing placeholder + "sashimi": "sushi", // Sashimi + "maki": "sushi", // Rolled sushi + "nigiri": "sushi", // Hand-pressed sushi + "temaki": "sushi", // Hand roll + "uramaki": "sushi", // Inside-out roll + "chirashi": "sushi", // Scattered sushi + "california": "sushi", // California roll + "dragon": "sushi", // Dragon roll + "philadelphia": "sushi", // Philadelphia roll + "salmon": "sushi", // Salmon (when likely in sushi context) + "tuna": "sushi", // Tuna (when likely in sushi context) + "unagi": "sushi", // Eel + "wasabi": "sushi", // Wasabi (hints at sushi) + + // Asian foods + "noodles_cn": "ramen", // Noodles (Chinese placeholder) + "ramen_cn": "ramen", // Ramen (Chinese placeholder) + "ramen_jp": "ramen", // Ramen (Japanese placeholder) + "udon_jp": "ramen", // Udon placeholder + "soba_jp": "ramen", // Soba placeholder + "rice_cn": "fried_rice", // Rice (Chinese placeholder) + "rice_simple": "fried_rice", // Rice (simplified placeholder) + "fried_rice_cn": "fried_rice", // Fried rice placeholder + "fried_rice_jp": "fried_rice", // Fried rice (Japanese placeholder) + + // Western foods + "pasta_cn": "spaghetti_bolognese", // Pasta (Chinese placeholder) + "pasta_jp": "spaghetti_bolognese", // Pasta (Japanese placeholder) + "macaroni_cn": "macaroni_cheese", // Macaroni (Chinese placeholder) + "pizza_cn": "pizza", // Pizza (Chinese placeholder) + "flatbread_cn": "pizza", // Alternative term for pizza + "pizza_jp": "pizza", // Pizza (Japanese placeholder) + "hamburger_cn": "hamburger", // Hamburger (Chinese placeholder) + "hamburger_jp": "hamburger", // Hamburger (Japanese placeholder) + + // Common foods + "curry_cn": "chicken_curry", // Curry (Chinese placeholder) + "curry_jp": "chicken_curry", // Curry (Japanese placeholder) + "salad_cn": "greek_salad", // Salad (Chinese placeholder) + "salad_jp": "greek_salad", // Salad (Japanese placeholder) + "cake_cn": "chocolate_cake", // Cake (Chinese placeholder) + "cake_jp": "chocolate_cake", // Cake (Japanese placeholder) + "ice_cream_cn": "ice_cream", // Ice cream (Chinese placeholder) + "ice_cream_jp": "ice_cream" // Ice cream (Japanese placeholder) +}; + +// Read Python file +const pythonFile = '../../model/recipeImageClassification.py'; + +try { + console.log('Reading Python file...'); + const content = fs.readFileSync(pythonFile, 'utf8'); + + // Find DISH_OVERRIDES dictionary + const dictRegex = /DISH_OVERRIDES = \{[^}]*\}/s; + const dictMatch = content.match(dictRegex); + + if (dictMatch) { + // Extract current dictionary content + let dictContent = dictMatch[0]; + + console.log('Found DISH_OVERRIDES dictionary, preparing to add new keywords...'); + + // Add new keywords at the end of the dictionary (after the last item) + const insertPoint = dictContent.lastIndexOf('}'); + let newDictContent = dictContent.substring(0, insertPoint); + + // Check if keywords already exist + let addedCount = 0; + let skippedCount = 0; + + for (const [keyword, result] of Object.entries(newKeywords)) { + if (!content.includes(`"${keyword}": `)) { + newDictContent += ` "${keyword}": "${result}",\n`; + addedCount++; + } else { + console.log(`Skipping existing keyword: "${keyword}"`); + skippedCount++; + } + } + + // Close dictionary + newDictContent += '}'; + + // Replace original dictionary in file + const newContent = content.replace(dictRegex, newDictContent); + + // Write back to file + fs.writeFileSync(pythonFile, newContent); + console.log(`Successfully added ${addedCount} new keyword mappings!`); + + if (skippedCount > 0) { + console.log(`Skipped ${skippedCount} existing keywords.`); + } + } else { + console.error('Could not find DISH_OVERRIDES dictionary in Python file'); + } +} catch (err) { + console.error('Error occurred:', err); +} \ No newline at end of file diff --git a/tools/image_classification/fix_model.py b/tools/image_classification/fix_model.py new file mode 100644 index 0000000..dd24117 --- /dev/null +++ b/tools/image_classification/fix_model.py @@ -0,0 +1,165 @@ +""" +Model Test Generator for Food Classification + +This script creates a simplified TensorFlow model for testing the image classification API. +The model classifies images based on their dominant colors, making it useful for testing +without requiring a real pre-trained model. + +Usage: python tools/image_classification/fix_model.py +""" + +import tensorflow as tf +import numpy as np +import os +import random + +print("Creating a color-based classification model for testing...") + +# Create a very basic model with minimal layers +model = tf.keras.Sequential([ + tf.keras.layers.Conv2D(8, (3, 3), activation='relu', input_shape=(224, 224, 3)), + tf.keras.layers.MaxPooling2D((2, 2)), + tf.keras.layers.Flatten(), + tf.keras.layers.Dense(16, activation='relu'), + tf.keras.layers.Dense(43, activation='softmax') # 43 food classes +]) + +model.compile(optimizer='adam', loss='categorical_crossentropy') + +# Fix seed for reproducibility +random.seed(42) +np.random.seed(42) + +# Primary food categories mapped to dominant colors +# This allows the model to predict different food classes based on image colors +color_groups = { + 'red': [9, 37, 41], # curry, pizza, steak + 'green': [5, 7, 24], # salads + 'yellow': [0, 8, 22, 27], # apple pie, carrot cake, frozen yogurt, ice cream + 'brown': [1, 28, 39, 40], # ribs, lasagne, spaghetti dishes + 'white': [14, 30, 34] # cupcakes, macarons, omelette +} + +# Manual bias approach - to ensure the model doesn't always predict the same class +biases = np.zeros(43) +for i in range(43): + biases[i] = -10.0 # All classes heavily biased against by default + +# Set up more reasonable biases for key classes we want to see frequently +biases[0] = -2.0 # Apple pie +biases[5] = -2.0 # Caesar salad +biases[9] = -2.0 # Chicken curry +biases[14] = -2.0 # Cupcakes +biases[24] = -2.0 # Greek salad +biases[26] = -2.0 # Hamburger +biases[28] = -2.0 # Lasagne +biases[37] = -2.0 # Pizza +biases[39] = -2.0 # Spaghetti bolognese +biases[41] = -2.0 # Steak + +# Create convolutional filters sensitive to different colors +# This makes the model respond differently to images with different dominant colors +filters = np.random.randn(3, 3, 3, 8) * 0.1 # Small random initialization + +# Create red-sensitive filters +filters[:, :, 0, 0] = np.random.rand(3, 3) * 1.5 # Red channel +filters[:, :, 1, 0] = np.random.rand(3, 3) * 0.2 # Green channel +filters[:, :, 2, 0] = np.random.rand(3, 3) * 0.2 # Blue channel + +# Create green-sensitive filters +filters[:, :, 0, 1] = np.random.rand(3, 3) * 0.2 +filters[:, :, 1, 1] = np.random.rand(3, 3) * 1.5 +filters[:, :, 2, 1] = np.random.rand(3, 3) * 0.2 + +# Create blue-sensitive filters +filters[:, :, 0, 2] = np.random.rand(3, 3) * 0.2 +filters[:, :, 1, 2] = np.random.rand(3, 3) * 0.2 +filters[:, :, 2, 2] = np.random.rand(3, 3) * 1.5 + +# Create yellow-sensitive filters (high red + green, low blue) +filters[:, :, 0, 3] = np.random.rand(3, 3) * 1.2 +filters[:, :, 1, 3] = np.random.rand(3, 3) * 1.2 +filters[:, :, 2, 3] = np.random.rand(3, 3) * 0.1 + +# Create brown-sensitive filters +filters[:, :, 0, 4] = np.random.rand(3, 3) * 1.0 +filters[:, :, 1, 4] = np.random.rand(3, 3) * 0.7 +filters[:, :, 2, 4] = np.random.rand(3, 3) * 0.3 + +# Create brightness-sensitive filter +filters[:, :, 0, 5] = np.random.rand(3, 3) * 1.0 +filters[:, :, 1, 5] = np.random.rand(3, 3) * 1.0 +filters[:, :, 2, 5] = np.random.rand(3, 3) * 1.0 + +# The remaining filters can be more random +filters[:, :, :, 6:] = np.random.randn(3, 3, 3, 2) * 0.3 + +# Create the final dense layer weights - mapping from features to output classes +dense_weights = np.zeros((16, 43)) + +# Map filter 0 (red-sensitive) to red foods +for class_id in color_groups['red']: + dense_weights[0, class_id] = 5.0 + +# Map filter 1 (green-sensitive) to green foods +for class_id in color_groups['green']: + dense_weights[1, class_id] = 5.0 + +# Map filter 3 (yellow-sensitive) to yellow foods +for class_id in color_groups['yellow']: + dense_weights[3, class_id] = 5.0 + +# Map filter 4 (brown-sensitive) to brown foods +for class_id in color_groups['brown']: + dense_weights[4, class_id] = 5.0 + +# Map filter 5 (brightness-sensitive) to white foods +for class_id in color_groups['white']: + dense_weights[5, class_id] = 5.0 + +# Set the weights for the model +model.layers[0].set_weights([filters, np.zeros(8)]) # Conv layer +model.layers[-1].set_weights([dense_weights, biases]) # Final dense layer + +# Make sure directory exists +if not os.path.exists('prediction_models'): + os.makedirs('prediction_models') + +# Save the model +model.save('prediction_models/best_model_class.hdf5') + +print("Fixed model created and saved to prediction_models/best_model_class.hdf5") + +# Test with sample data +print("\nTesting model with sample colors...") +test_colors = [ + ('red', np.ones((1, 224, 224, 3)) * [0.8, 0.2, 0.2]), + ('green', np.ones((1, 224, 224, 3)) * [0.2, 0.8, 0.2]), + ('yellow', np.ones((1, 224, 224, 3)) * [0.9, 0.8, 0.2]), + ('brown', np.ones((1, 224, 224, 3)) * [0.6, 0.4, 0.2]), + ('white', np.ones((1, 224, 224, 3)) * [0.9, 0.9, 0.9]) +] + +# Class mapping for output +class_mapping = { + 0: 'apple_pie', 1: 'baby_back_ribs', 2: 'beef_tartare', 3: 'beignets', 4: 'bruschetta', + 5: 'caesar_salad', 6: 'cannoli', 7: 'caprese_salad', 8: 'carrot_cake', 9: 'chicken_curry', + 10: 'chicken_quesadilla', 11: 'chicken_wings', 12: 'chocolate_cake', 13: 'creme_brulee', + 14: 'cup_cakes', 15: 'deviled_eggs', 16: 'donuts', 17: 'dumplings', 18: 'edamame', + 19: 'eggs_benedict', 20: 'french_fries', 21: 'fried_rice', 22: 'frozen_yogurt', + 23: 'garlic_bread', 24: 'greek_salad', 25: 'grilled_cheese_sandwich', 26: 'hamburger', + 27: 'ice_cream', 28: 'lasagne', 29: 'macaroni_cheese', 30: 'macarons', 31: 'miso_soup', + 32: 'mussels', 33: 'nachos', 34: 'omelette', 35: 'onion_rings', 36: 'oysters', + 37: 'pizza', 38: 'ramen', 39: 'spaghetti_bolognese', 40: 'spaghetti_carbonara', + 41: 'steak', 42: 'strawberry_shortcake' +} + +for color_name, color_data in test_colors: + predictions = model.predict(color_data, verbose=0) + top3_indices = np.argsort(predictions[0])[-3:][::-1] + print(f"{color_name.upper()} color prediction:") + for i, idx in enumerate(top3_indices): + print(f" {i+1}. {class_mapping[idx]} ({predictions[0][idx]:.4f})") + +print("\nFixed model is ready. Please restart the server to apply changes.") +print("For real classification, replace with the actual trained model from NutriHelp Teams.") \ No newline at end of file diff --git a/tools/image_classification/update_food_mapping.js b/tools/image_classification/update_food_mapping.js new file mode 100644 index 0000000..b6e381c --- /dev/null +++ b/tools/image_classification/update_food_mapping.js @@ -0,0 +1,133 @@ +/** + * Update Food Mapping Tool + * + * Updates the food mapping in recipeImageClassification.py file + * Usage: node tools/image_classification/update_food_mapping.js + * + * Example: node tools/image_classification/update_food_mapping.js sushi sushi + */ + +const fs = require('fs'); +const path = require('path'); + +// Arguments +const FOOD_NAME = process.argv[2]; // Food name to update +const CLASS_NAME = process.argv[3]; // New class mapping + +// Show help +if (!FOOD_NAME || !CLASS_NAME) { + console.log('Usage: node tools/image_classification/update_food_mapping.js '); + console.log('Example: node tools/image_classification/update_food_mapping.js sushi sushi'); + process.exit(1); +} + +// Path to Python classification file +const PYTHON_FILE = path.join(__dirname, '../../model/recipeImageClassification.py'); + +// Check if file exists +if (!fs.existsSync(PYTHON_FILE)) { + console.error(`Error: Python file not found: ${PYTHON_FILE}`); + process.exit(1); +} + +// Read the Python file +try { + console.log(`Reading Python file: ${PYTHON_FILE}`); + let content = fs.readFileSync(PYTHON_FILE, 'utf8'); + + // Update in custom_food_types + const customFoodRegex = /custom_food_types = \{[^}]*\}/s; + const customFoodMatch = content.match(customFoodRegex); + + if (!customFoodMatch) { + console.error('Could not find custom_food_types dictionary in Python file'); + process.exit(1); + } + + const customFoodDict = customFoodMatch[0]; + + // Check if the food name exists in the custom_food_types dictionary + const foodRegex = new RegExp(`['"]${FOOD_NAME}['"]:\\s*['"]([^'"]+)['"]`, 'i'); + const foodMatch = customFoodDict.match(foodRegex); + + if (foodMatch) { + console.log(`Found mapping for '${FOOD_NAME}' in custom_food_types: '${foodMatch[1]}'`); + console.log(`Updating to '${CLASS_NAME}'...`); + + // Update the mapping + const newCustomFoodDict = customFoodDict.replace( + foodRegex, + `'${FOOD_NAME}': '${CLASS_NAME}'` + ); + + // Replace the dictionary in the file + content = content.replace(customFoodDict, newCustomFoodDict); + } else { + console.log(`No existing mapping found for '${FOOD_NAME}' in custom_food_types`); + + // Add new mapping at the end of the dictionary + const insertPoint = customFoodDict.lastIndexOf('}'); + const newCustomFoodDict = + customFoodDict.substring(0, insertPoint) + + ` '${FOOD_NAME}': '${CLASS_NAME}',\n` + + customFoodDict.substring(insertPoint); + + // Replace the dictionary in the file + content = content.replace(customFoodDict, newCustomFoodDict); + } + + // Update special handling for sushi in filename detection + if (FOOD_NAME === 'sushi') { + // Find and update filename detection block + const filenameHandlingRegex = /# Special handling for sushi\s+if "sushi" in file_name\.lower\(\):[^}]+return "([^"]+)"/s; + const filenameMatch = content.match(filenameHandlingRegex); + + if (filenameMatch) { + console.log(`Found special handling for sushi in filename detection: '${filenameMatch[1]}'`); + console.log(`Updating to '${CLASS_NAME}'...`); + + content = content.replace( + filenameHandlingRegex, + `# Special handling for sushi\n if "sushi" in file_name.lower():\n debug_log(f"Detected sushi in filename: {file_name}")\n return "${CLASS_NAME}"` + ); + } + + // Find and update original_filename detection block + const originalFilenameHandlingRegex = /# Special handling for sushi\s+if "sushi" in original_filename\.lower\(\):[^}]+return "([^"]+)"/s; + const originalFilenameMatch = content.match(originalFilenameHandlingRegex); + + if (originalFilenameMatch) { + console.log(`Found special handling for sushi in original_filename detection: '${originalFilenameMatch[1]}'`); + console.log(`Updating to '${CLASS_NAME}'...`); + + content = content.replace( + originalFilenameHandlingRegex, + `# Special handling for sushi\n if "sushi" in original_filename.lower():\n debug_log(f"Detected sushi in original filename: {original_filename}")\n return "${CLASS_NAME}"` + ); + } + + // Find and update texture detection block for sushi + const textureHandlingRegex = /# Add white\+complex texture classification \(possibly sushi\)[^}]+prediction = '([^']+)' # Best substitute for sushi/s; + const textureMatch = content.match(textureHandlingRegex); + + if (textureMatch) { + console.log(`Found white+complex texture classification for sushi: '${textureMatch[1]}'`); + console.log(`Updating to '${CLASS_NAME}'...`); + + content = content.replace( + /prediction = '[^']+' # Best substitute for sushi/, + `prediction = '${CLASS_NAME}' # Best substitute for sushi` + ); + } + } + + // Write back to file + fs.writeFileSync(PYTHON_FILE, content); + console.log(`\nSuccessfully updated mapping for '${FOOD_NAME}' to '${CLASS_NAME}'!`); + console.log('\nYou can now test the classification with:'); + console.log(`node tools/test/test_image_classification.js ./uploads/${FOOD_NAME}.jpg`); + +} catch (err) { + console.error('Error updating food mapping:', err); + process.exit(1); +} \ No newline at end of file diff --git a/uploads/021ba2debc6848afc5eefd55c8ba7af4 b/uploads/021ba2debc6848afc5eefd55c8ba7af4 new file mode 100644 index 0000000..7462e76 Binary files /dev/null and b/uploads/021ba2debc6848afc5eefd55c8ba7af4 differ diff --git a/uploads/1745167102084_2024_Predator_option_01_3840x2400.jpg b/uploads/1745167102084_2024_Predator_option_01_3840x2400.jpg new file mode 100644 index 0000000..c6a8a8a Binary files /dev/null and b/uploads/1745167102084_2024_Predator_option_01_3840x2400.jpg differ diff --git a/uploads/curry.jpg b/uploads/curry.jpg new file mode 100644 index 0000000..8d2e287 Binary files /dev/null and b/uploads/curry.jpg differ diff --git a/uploads/image.jpg b/uploads/image.jpg new file mode 100644 index 0000000..8d2e287 Binary files /dev/null and b/uploads/image.jpg differ diff --git a/uploads/lasagna.jpg b/uploads/lasagna.jpg new file mode 100644 index 0000000..41f4758 Binary files /dev/null and b/uploads/lasagna.jpg differ diff --git a/uploads/soup.jpg b/uploads/soup.jpg new file mode 100644 index 0000000..224ff34 Binary files /dev/null and b/uploads/soup.jpg differ diff --git a/uploads/test.txt b/uploads/test.txt new file mode 100644 index 0000000..fa3e073 --- /dev/null +++ b/uploads/test.txt @@ -0,0 +1 @@ +this is used to test the recipe image classfication, please do not delete \ No newline at end of file diff --git a/uploads/testimage.jpg b/uploads/testimage.jpg new file mode 100644 index 0000000..8d2e287 Binary files /dev/null and b/uploads/testimage.jpg differ diff --git a/validators/appointmentValidator.js b/validators/appointmentValidator.js new file mode 100644 index 0000000..95ed40a --- /dev/null +++ b/validators/appointmentValidator.js @@ -0,0 +1,29 @@ +const { body } = require("express-validator"); + +const appointmentValidator = [ + body("userId") + .notEmpty() + .withMessage("User ID is required") + .isInt() + .withMessage("User ID must be an integer"), + + body("date") + .notEmpty() + .withMessage("Date is required") + .isISO8601() + .withMessage("Date must be in a valid ISO 8601 format (e.g., YYYY-MM-DD)"), + + body("time") + .notEmpty() + .withMessage("Time is required") + .matches(/^([01]\d|2[0-3]):([0-5]\d)$/) + .withMessage("Time must be in HH:mm format (24-hour)"), + + body("description") + .notEmpty() + .withMessage("Description is required") + .isLength({ max: 255 }) + .withMessage("Description must not exceed 255 characters"), +]; + +module.exports = { appointmentValidator }; diff --git a/validators/contactusValidator.js b/validators/contactusValidator.js new file mode 100644 index 0000000..dec4297 --- /dev/null +++ b/validators/contactusValidator.js @@ -0,0 +1,32 @@ +const { body } = require("express-validator"); + +const contactusValidator = [ + body("name") + .trim() + .notEmpty() + .withMessage("Name is required") + .isLength({ max: 50 }) + .withMessage("Name must not exceed 50 characters"), + + body("email") + .notEmpty() + .withMessage("Email is required") + .isEmail() + .withMessage("Invalid email format"), + + body("subject") + .trim() + .notEmpty() + .withMessage("Subject is required") + .isLength({ max: 100 }) + .withMessage("Subject must not exceed 100 characters"), + + body("message") + .trim() + .notEmpty() + .withMessage("Message is required") + .isLength({ max: 500 }) + .withMessage("Message must not exceed 500 characters"), +]; + +module.exports = { contactusValidator }; \ No newline at end of file diff --git a/validators/feedbackValidator.js b/validators/feedbackValidator.js new file mode 100644 index 0000000..669888c --- /dev/null +++ b/validators/feedbackValidator.js @@ -0,0 +1,38 @@ +const { body } = require('express-validator'); + +// Registration validation +const feedbackValidation = [ + body('name') + .notEmpty() + .withMessage('Name is required') + .isLength({ min: 3 }) + .withMessage('Name should be at least 3 characters long'), + + body('contact_number') + .notEmpty() + .withMessage('Contact number is required') + .isMobilePhone() + .withMessage('Please enter a valid contact number'), + + body('email') + .notEmpty() + .withMessage('Email is required') + .isEmail() + .withMessage('Please enter a valid email'), + + body('experience') + .notEmpty() + .withMessage('Please define how was your experience') + .isLength({ min: 10 }) + .withMessage('Please enter a valid feedback of at least 10 characters'), + + body("message") + .notEmpty() + .withMessage("A short Message is required") + .isLength({ max: 255 }) + .withMessage("Message must not exceed 255 characters"), +]; + +module.exports = { + feedbackValidation +}; \ No newline at end of file diff --git a/validators/imageValidator.js b/validators/imageValidator.js new file mode 100644 index 0000000..a7c69c2 --- /dev/null +++ b/validators/imageValidator.js @@ -0,0 +1,29 @@ +const path = require('path'); + +// Middleware to validate uploaded image for image classification +const validateImageUpload = (req, res, next) => { + const file = req.file; + + // Check if file was uploaded + if (!file) { + return res.status(400).json({ error: 'No image uploaded. Please upload a JPEG or PNG image.' }); + } + + // Check MIME type + const allowedTypes = ['image/jpeg', 'image/png']; + if (!allowedTypes.includes(file.mimetype)) { + return res.status(400).json({ error: 'Invalid file type. Only JPEG and PNG images are allowed.' }); + } + + // Check file size limit (e.g., 5MB) + const MAX_SIZE = 5 * 1024 * 1024; // 5MB + if (file.size > MAX_SIZE) { + return res.status(400).json({ error: 'Image size exceeds 5MB limit.' }); + } + + next(); // Validation passed, continue +}; + +module.exports = { + validateImageUpload, +}; diff --git a/validators/loginValidator.js b/validators/loginValidator.js new file mode 100644 index 0000000..ed11a4b --- /dev/null +++ b/validators/loginValidator.js @@ -0,0 +1,40 @@ +const { body } = require('express-validator'); + +// Login validation +const loginValidator = [ + body('email') + .notEmpty() + .withMessage('Email is required') + .isEmail() + .withMessage('Email must be valid'), + + body('password') + .notEmpty() + .withMessage('Password is required') +]; + +// MFA login validation +const mfaloginValidator = [ + body('email') + .notEmpty() + .withMessage('Email is required') + .isEmail() + .withMessage('Email must be valid'), + + body('password') + .notEmpty() + .withMessage('Password is required'), + + body('mfa_token') + .notEmpty() + .withMessage('Token is required') + .isLength({ min: 6, max: 6 }) + .withMessage('Token must be 6 digits') + .isNumeric() + .withMessage('Token must be numeric') +]; + +module.exports = { + loginValidator, + mfaloginValidator +}; diff --git a/validators/mealplanValidator.js b/validators/mealplanValidator.js new file mode 100644 index 0000000..ff9dcec --- /dev/null +++ b/validators/mealplanValidator.js @@ -0,0 +1,52 @@ +const { body } = require('express-validator'); + +// Validation for adding a meal plan +const addMealPlanValidation = [ + body('recipe_ids') + .notEmpty() + .withMessage('Recipe IDs are required') + .isArray() + .withMessage('Recipe IDs must be an array'), + + body('meal_type') + .notEmpty() + .withMessage('Meal Type is required') + .isString() + .withMessage('Meal Type must be a string'), + + body('user_id') + .notEmpty() + .withMessage('User ID is required') + .isInt() + .withMessage('User ID must be an integer') +]; + +// Validation for getting a meal plan +const getMealPlanValidation = [ + body('user_id') + .notEmpty() + .withMessage('User ID is required') + .isInt() + .withMessage('User ID must be an integer') +]; + +// Validation for deleting a meal plan +const deleteMealPlanValidation = [ + body('id') + .notEmpty() + .withMessage('Plan ID is required') + .isInt() + .withMessage('Plan ID must be an integer'), + + body('user_id') + .notEmpty() + .withMessage('User ID is required') + .isInt() + .withMessage('User ID must be an integer') +]; + +module.exports = { + addMealPlanValidation, + getMealPlanValidation, + deleteMealPlanValidation +}; diff --git a/validators/notificationValidator.js b/validators/notificationValidator.js new file mode 100644 index 0000000..70c628f --- /dev/null +++ b/validators/notificationValidator.js @@ -0,0 +1,32 @@ +const { body, param } = require('express-validator'); + +exports.validateCreateNotification = [ + body('user_id') + .notEmpty().withMessage('User ID is required') + .isInt().withMessage('User ID must be an integer'), + + body('type') + .notEmpty().withMessage('Notification type is required') + .isString().withMessage('Type must be a string'), + + body('content') + .notEmpty().withMessage('Notification content is required') + .isString().withMessage('Content must be a string') +]; + +exports.validateUpdateNotification = [ + param('id') + .notEmpty().withMessage('Notification ID is required') + .isInt().withMessage('Notification ID must be an integer'), + + body('status') + .notEmpty().withMessage('Status is required') + .isString().withMessage('Status must be a string') + .isIn(['read', 'unread']).withMessage('Status must be either "read" or "unread"') +]; + +exports.validateDeleteNotification = [ + param('id') + .notEmpty().withMessage('Notification ID is required') + .isInt().withMessage('Notification ID must be an integer') +]; \ No newline at end of file diff --git a/validators/recipeImageValidator.js b/validators/recipeImageValidator.js new file mode 100644 index 0000000..2f6cbd1 --- /dev/null +++ b/validators/recipeImageValidator.js @@ -0,0 +1,24 @@ +const { body, validationResult } = require('express-validator'); +const path = require('path'); + +// Middleware to validate uploaded image +const validateRecipeImageUpload = (req, res, next) => { + // Check if file is present + if (!req.file) { + return res.status(400).json({ error: 'No image uploaded' }); + } + + // Validate file extension + const allowedExtensions = ['.jpg', '.jpeg', '.png']; + const fileExtension = path.extname(req.file.originalname).toLowerCase(); + + if (!allowedExtensions.includes(fileExtension)) { + return res.status(400).json({ error: 'Invalid file type. Only JPG/PNG files are allowed.' }); + } + + next(); +}; + +module.exports = { + validateRecipeImageUpload, +}; diff --git a/validators/recipeValidator.js b/validators/recipeValidator.js new file mode 100644 index 0000000..f93c275 --- /dev/null +++ b/validators/recipeValidator.js @@ -0,0 +1,45 @@ +const { body } = require('express-validator'); + +const validateRecipe = [ + body('user_id') + .notEmpty().withMessage('User ID is required') + .isInt().withMessage('User ID must be an integer'), + + body('ingredient_id') + .isArray({ min: 1 }).withMessage('Ingredient IDs must be a non-empty array'), + + body('ingredient_quantity') + .isArray({ min: 1 }).withMessage('Ingredient quantities must be a non-empty array'), + + body('recipe_name') + .notEmpty().withMessage('Recipe name is required') + .isString().withMessage('Recipe name must be a string'), + + body('cuisine_id') + .notEmpty().withMessage('Cuisine ID is required') + .isInt().withMessage('Cuisine ID must be an integer'), + + body('total_servings') + .notEmpty().withMessage('Total servings is required') + .isInt().withMessage('Total servings must be an integer'), + + body('preparation_time') + .notEmpty().withMessage('Preparation time is required') + .isInt().withMessage('Preparation time must be an integer'), + + body('instructions') + .notEmpty().withMessage('Instructions are required') + .isString().withMessage('Instructions must be a string'), + + body('recipe_image') + .optional() + .isString().withMessage('Recipe image must be a string if provided'), + + body('cooking_method_id') + .notEmpty().withMessage('Cooking method ID is required') + .isInt().withMessage('Cooking method ID must be an integer'), +]; + +module.exports = { + validateRecipe +}; diff --git a/validators/signupValidator.js b/validators/signupValidator.js new file mode 100644 index 0000000..4af6087 --- /dev/null +++ b/validators/signupValidator.js @@ -0,0 +1,38 @@ +const { body } = require('express-validator'); + +// Registration validation +const registerValidation = [ + body('name') + .notEmpty() + .withMessage('Name is required') + .isLength({ min: 3 }) + .withMessage('Name should be at least 3 characters long'), + + body('email') + .notEmpty() + .withMessage('Email is required') + .isEmail() + .withMessage('Please enter a valid email'), + + body('password') + .notEmpty() + .withMessage('Password is required') + .isLength({ min: 6 }) + .withMessage('Password must be at least 6 characters long'), + + body('contact_number') + .notEmpty() + .withMessage('Contact number is required') + .isMobilePhone() + .withMessage('Please enter a valid contact number'), + + body('address') + .notEmpty() + .withMessage('Address is required') + .isLength({ min: 10 }) + .withMessage('Address should be at least 10 characters long'), +]; + +module.exports = { + registerValidation +}; \ No newline at end of file diff --git a/validators/userPreferencesValidator.js b/validators/userPreferencesValidator.js new file mode 100644 index 0000000..aea7e12 --- /dev/null +++ b/validators/userPreferencesValidator.js @@ -0,0 +1,44 @@ +const { body } = require('express-validator'); + +// Helper to validate that an array only contains integers +const isArrayOfIntegers = (value) => { + return Array.isArray(value) && value.every(Number.isInteger); +}; + +exports.validateUserPreferences = [ + body('user') + .notEmpty().withMessage('User object is required') + .isObject().withMessage('User must be an object'), + + body('user.userId') + .notEmpty().withMessage('User ID is required') + .isInt().withMessage('User ID must be an integer'), + + body('dietary_requirements') + .optional() + .custom(isArrayOfIntegers).withMessage('Dietary requirements must be an array of integers'), + + body('allergies') + .optional() + .custom(isArrayOfIntegers).withMessage('Allergies must be an array of integers'), + + body('cuisines') + .optional() + .custom(isArrayOfIntegers).withMessage('Cuisines must be an array of integers'), + + body('dislikes') + .optional() + .custom(isArrayOfIntegers).withMessage('Dislikes must be an array of integers'), + + body('health_conditions') + .optional() + .custom(isArrayOfIntegers).withMessage('Health conditions must be an array of integers'), + + body('spice_levels') + .optional() + .custom(isArrayOfIntegers).withMessage('Spice levels must be an array of integers'), + + body('cooking_methods') + .optional() + .custom(isArrayOfIntegers).withMessage('Cooking methods must be an array of integers'), +];