diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 75e947dd3..2c8924df4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,7 @@ jobs: strategy: matrix: #TODO: pypy3 has problems compiling lxml - python-version: [ '3.9', '3.10', '3.11', '3.12' ] + python-version: [ '3.10', '3.11', '3.12' ] name: CI job (python ${{ matrix.python-version }}) steps: @@ -46,6 +46,7 @@ jobs: run: | pip install --upgrade pip pip install wheel coverage + pip install -r requirements_dev.txt pip install -r requirements_prod.txt pip install -e . diff --git a/extras/docker/development/settings.py b/extras/docker/development/settings.py index 19dfc3276..08a17ca7d 100644 --- a/extras/docker/development/settings.py +++ b/extras/docker/development/settings.py @@ -98,6 +98,7 @@ WGER_SETTINGS["SYNC_EXERCISES_CELERY"] = env.bool("SYNC_EXERCISES_CELERY", False) WGER_SETTINGS["SYNC_EXERCISE_IMAGES_CELERY"] = env.bool("SYNC_EXERCISE_IMAGES_CELERY", False) WGER_SETTINGS["SYNC_EXERCISE_VIDEOS_CELERY"] = env.bool("SYNC_EXERCISE_VIDEOS_CELERY", False) +WGER_SETTINGS["SYNC_INGREDIENTS_CELERY"] = env.bool("SYNC_INGREDIENTS_CELERY", False) WGER_SETTINGS["USE_CELERY"] = env.bool("USE_CELERY", False) # Cache diff --git a/wger/exercises/sync.py b/wger/exercises/sync.py index 15372cc15..acdb9e3fc 100644 --- a/wger/exercises/sync.py +++ b/wger/exercises/sync.py @@ -58,6 +58,7 @@ WorkoutLog, ) from wger.utils.requests import ( + get_all_paginated, get_paginated, wger_headers, ) @@ -73,9 +74,7 @@ def sync_exercises( print_fn('*** Synchronizing exercises...') url = make_uri(EXERCISE_ENDPOINT, server_url=remote_url, query={'limit': 100}) - result = get_paginated(url, headers=wger_headers()) - - for data in result: + for data in get_paginated(url, headers=wger_headers()): uuid = data['uuid'] created = data['created'] @@ -166,8 +165,8 @@ def sync_languages( print_fn('*** Synchronizing languages...') headers = wger_headers() url = make_uri(LANGUAGE_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=headers) - for data in result: + + for data in get_all_paginated(url, headers=headers): short_name = data['short_name'] full_name = data['full_name'] @@ -190,8 +189,8 @@ def sync_licenses( """Synchronize the licenses from the remote server""" print_fn('*** Synchronizing licenses...') url = make_uri(LICENSE_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=wger_headers()) - for data in result: + + for data in get_all_paginated(url, headers=wger_headers()): short_name = data['short_name'] full_name = data['full_name'] license_url = data['url'] @@ -219,8 +218,8 @@ def sync_categories( print_fn('*** Synchronizing categories...') url = make_uri(CATEGORY_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=wger_headers()) - for data in result: + + for data in get_all_paginated(url, headers=wger_headers()): category_id = data['id'] category_name = data['name'] @@ -244,9 +243,8 @@ def sync_muscles( print_fn('*** Synchronizing muscles...') url = make_uri(MUSCLE_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=wger_headers()) - for data in result: + for data in get_all_paginated(url, headers=wger_headers()): muscle_id = data['id'] muscle_name = data['name'] muscle_is_front = data['is_front'] @@ -280,9 +278,8 @@ def sync_equipment( print_fn('*** Synchronizing equipment...') url = make_uri(EQUIPMENT_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=wger_headers()) - for data in result: + for data in get_all_paginated(url, headers=wger_headers()): equipment_id = data['id'] equipment_name = data['name'] @@ -303,7 +300,6 @@ def handle_deleted_entries( style_fn=lambda x: x, ): if not print_fn: - def print_fn(_): return None @@ -311,9 +307,8 @@ def print_fn(_): print_fn('*** Deleting exercise data that was removed on the server...') url = make_uri(DELETION_LOG_ENDPOINT, server_url=remote_url, query={'limit': 100}) - result = get_paginated(url, headers=wger_headers()) - for data in result: + for data in get_paginated(url, headers=wger_headers()): uuid = data['uuid'] replaced_by_uuid = data['replaced_by'] model_type = data['model_type'] @@ -382,7 +377,6 @@ def download_exercise_images( ): headers = wger_headers() url = make_uri(IMAGE_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=headers) print_fn('*** Processing images ***') @@ -396,7 +390,7 @@ def download_exercise_images( if deleted: print_fn(f'Deleted {deleted} images without associated image files') - for image_data in result: + for image_data in get_paginated(url, headers=headers): image_uuid = image_data['uuid'] print_fn(f'Processing image {image_uuid}') @@ -426,11 +420,10 @@ def download_exercise_videos( ): headers = wger_headers() url = make_uri(VIDEO_ENDPOINT, server_url=remote_url) - result = get_paginated(url, headers=headers) print_fn('*** Processing videos ***') - for video_data in result: + for video_data in get_paginated(url, headers=headers): video_uuid = video_data['uuid'] print_fn(f'Processing video {video_uuid}') diff --git a/wger/nutrition/api/endpoints.py b/wger/nutrition/api/endpoints.py index 6f225e199..4e0fe8e49 100644 --- a/wger/nutrition/api/endpoints.py +++ b/wger/nutrition/api/endpoints.py @@ -1 +1,2 @@ IMAGE_ENDPOINT = "ingredient-image" +INGREDIENTS_ENDPOINT = "ingredient" diff --git a/wger/nutrition/management/commands/sync-ingredients.py b/wger/nutrition/management/commands/sync-ingredients.py new file mode 100644 index 000000000..0965c222b --- /dev/null +++ b/wger/nutrition/management/commands/sync-ingredients.py @@ -0,0 +1,65 @@ +# This file is part of wger Workout Manager. +# +# wger Workout Manager is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# wger Workout Manager is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License + +# Django +from django.conf import settings +from django.core.exceptions import ValidationError +from django.core.management.base import ( + BaseCommand, + CommandError, +) +from django.core.validators import URLValidator + +# wger +from wger.exercises.sync import ( + handle_deleted_entries, + sync_categories, + sync_equipment, + sync_exercises, + sync_languages, + sync_licenses, + sync_muscles, +) +from wger.nutrition.sync import sync_ingredients + + +class Command(BaseCommand): + """ + Synchronizes ingredient data from a wger instance to the local database + """ + remote_url = settings.WGER_SETTINGS['WGER_INSTANCE'] + + help = """Synchronizes ingredient data from a wger instance to the local database""" + + def add_arguments(self, parser): + parser.add_argument( + '--remote-url', + action='store', + dest='remote_url', + default=settings.WGER_SETTINGS['WGER_INSTANCE'], + help=f'Remote URL to fetch the ingredients from (default: WGER_SETTINGS' + f'["WGER_INSTANCE"] - {settings.WGER_SETTINGS["WGER_INSTANCE"]})' + ) + + def handle(self, **options): + remote_url = options['remote_url'] + + try: + val = URLValidator() + val(remote_url) + self.remote_url = remote_url + except ValidationError: + raise CommandError('Please enter a valid URL') + + sync_ingredients(self.stdout.write, self.remote_url, self.style.SUCCESS) diff --git a/wger/nutrition/sync.py b/wger/nutrition/sync.py index ffc0b2599..6b675e160 100644 --- a/wger/nutrition/sync.py +++ b/wger/nutrition/sync.py @@ -25,19 +25,23 @@ import requests # wger -from wger.nutrition.api.endpoints import IMAGE_ENDPOINT +from wger.nutrition.api.endpoints import ( + IMAGE_ENDPOINT, + INGREDIENTS_ENDPOINT, +) from wger.nutrition.models import ( Image, Ingredient, Source, ) from wger.utils.constants import ( + API_MAX_ITEMS, CC_BY_SA_3_LICENSE_ID, DOWNLOAD_INGREDIENT_OFF, DOWNLOAD_INGREDIENT_WGER, ) from wger.utils.requests import ( - get_paginated_generator, + get_paginated, wger_headers, ) from wger.utils.url import make_uri @@ -162,30 +166,69 @@ def download_ingredient_images( headers = wger_headers() url = make_uri(IMAGE_ENDPOINT, server_url=remote_url, query={'limit': 100}) - print_fn('*** Processing images ***') - for result in get_paginated_generator(url, headers=headers): + print_fn('*** Processing ingredient images ***') + for image_data in get_paginated(url, headers=headers): + image_uuid = image_data['uuid'] + print_fn(f'Processing image {image_uuid}') - for image_data in result: - image_uuid = image_data['uuid'] + try: + ingredient = Ingredient.objects.get(uuid=image_data['ingredient_uuid']) + except Ingredient.DoesNotExist: + print_fn(' Remote ingredient not found in local DB, skipping...') + continue - print_fn(f'Processing image {image_uuid}') + if hasattr(ingredient, 'image'): + continue - try: - ingredient = Ingredient.objects.get(uuid=image_data['ingredient_uuid']) - except Ingredient.DoesNotExist: - print_fn(' Remote ingredient not found in local DB, skipping...') - continue + try: + Image.objects.get(uuid=image_uuid) + print_fn(' Image already present locally, skipping...') + continue + except Image.DoesNotExist: + print_fn(' Image not found in local DB, creating now...') + retrieved_image = requests.get(image_data['image'], headers=headers) + Image.from_json(ingredient, retrieved_image, image_data) - if hasattr(ingredient, 'image'): - continue + print_fn(style_fn(' successfully saved')) - try: - Image.objects.get(uuid=image_uuid) - print_fn(' Image already present locally, skipping...') - continue - except Image.DoesNotExist: - print_fn(' Image not found in local DB, creating now...') - retrieved_image = requests.get(image_data['image'], headers=headers) - Image.from_json(ingredient, retrieved_image, image_data) - print_fn(style_fn(' successfully saved')) +def sync_ingredients( + print_fn, + remote_url=settings.WGER_SETTINGS['WGER_INSTANCE'], + style_fn=lambda x: x, +): + """Synchronize the ingredients from the remote server""" + print_fn('*** Synchronizing ingredients...') + + url = make_uri(INGREDIENTS_ENDPOINT, server_url=remote_url, query={'limit': API_MAX_ITEMS}) + for data in get_paginated(url, headers=wger_headers()): + uuid = data['uuid'] + name = data['name'] + + ingredient, created = Ingredient.objects.update_or_create( + uuid=uuid, + defaults={ + 'name': name, + 'code': data['code'], + 'language_id': data['language'], + 'created': data['created'], + 'license_id': data['license'], + 'license_object_url': data['license_object_url'], + 'license_author': data['license_author_url'], + 'license_author_url': data['license_author_url'], + 'license_title': data['license_title'], + 'license_derivative_source_url': data['license_derivative_source_url'], + 'energy': data['energy'], + 'carbohydrates': data['carbohydrates'], + 'carbohydrates_sugar': data['carbohydrates_sugar'], + 'fat': data['fat'], + 'fat_saturated': data['fat_saturated'], + 'protein': data['protein'], + 'fibres': data['fibres'], + 'sodium': data['sodium'], + }, + ) + + print_fn(f"{'created' if created else 'updated'} ingredient {uuid} - {name}") + + print_fn(style_fn('done!\n')) diff --git a/wger/nutrition/tasks.py b/wger/nutrition/tasks.py index 40352ee3c..8e3b569c0 100644 --- a/wger/nutrition/tasks.py +++ b/wger/nutrition/tasks.py @@ -14,15 +14,22 @@ # Standard Library import logging +import random + +# Django +from django.conf import settings + +# Third Party +from celery.schedules import crontab # wger from wger.celery_configuration import app from wger.nutrition.sync import ( download_ingredient_images, fetch_ingredient_image, + sync_ingredients, ) - logger = logging.getLogger(__name__) @@ -44,3 +51,25 @@ def fetch_all_ingredient_images_task(): Returns the image if it is already present in the DB """ download_ingredient_images(logger.info) + + +@app.task +def sync_all_ingredients_task(): + """ + Fetches the current ingredients from the default wger instance + """ + sync_ingredients(logger.info) + + +@app.on_after_finalize.connect +def setup_periodic_tasks(sender, **kwargs): + if settings.WGER_SETTINGS['SYNC_INGREDIENTS_CELERY']: + sender.add_periodic_task( + crontab( + hour=random.randint(0, 23), + minute=random.randint(0, 59), + day_of_week=random.randint(0, 6), + ), + sync_all_ingredients_task.s(), + name='Sync exercises', + ) diff --git a/wger/nutrition/tests/test_sync.py b/wger/nutrition/tests/test_sync.py new file mode 100644 index 000000000..1d474f6e2 --- /dev/null +++ b/wger/nutrition/tests/test_sync.py @@ -0,0 +1,127 @@ +# This file is part of wger Workout Manager. +# +# wger Workout Manager is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# wger Workout Manager is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License + +# Standard Library +from decimal import Decimal +from unittest.mock import patch +from uuid import UUID + +# wger +from wger.core.tests.base_testcase import WgerTestCase +from wger.nutrition.models import Ingredient +from wger.nutrition.sync import sync_ingredients +from wger.utils.requests import wger_headers + + +class MockIngredientResponse: + + def __init__(self): + self.status_code = 200 + self.content = b'1234' + + # yapf: disable + @staticmethod + def json(): + return { + "count": 2, + "next": None, + "previous": None, + "results": [ + { + "id": 1, + "uuid": "7908c204-907f-4b1e-ad4e-f482e9769ade", + "code": "0013087245950", + "name": "Gâteau double chocolat", + "created": "2020-12-20T01:00:00+01:00", + "last_update": "2020-12-20T01:00:00+01:00", + "energy": 360, + "protein": "5.000", + "carbohydrates": "45.000", + "carbohydrates_sugar": "27.000", + "fat": "18.000", + "fat_saturated": "4.500", + "fibres": "2.000", + "sodium": "0.356", + "license": 5, + "license_title": " Gâteau double chocolat ", + "license_object_url": "", + "license_author": "Open Food Facts", + "license_author_url": "", + "license_derivative_source_url": "", + "language": 2 + }, + { + "id": 22634, + "uuid": "582f1b7f-a8bd-4951-9edd-247bc68b28f4", + "code": "3181238941963", + "name": "Maxi Hot Dog New York Style", + "created": "2020-12-20T01:00:00+01:00", + "last_update": "2020-12-20T01:00:00+01:00", + "energy": 256, + "protein": "11.000", + "carbohydrates": "27.000", + "carbohydrates_sugar": "5.600", + "fat": "11.000", + "fat_saturated": "4.600", + "fibres": None, + "sodium": "0.820", + "license": 5, + "license_title": " Maxi Hot Dog New York Style", + "license_object_url": "", + "license_author": "Open Food Facts", + "license_author_url": "", + "license_derivative_source_url": "", + "language": 3 + }, + ] + } + # yapf: enable + + +class TestSyncMethods(WgerTestCase): + + @patch('requests.get', return_value=MockIngredientResponse()) + def test_ingredient_sync(self, mock_request): + # Arrange + ingredient = Ingredient.objects.get(pk=1) + + self.assertEqual(Ingredient.objects.count(), 14) + self.assertEqual(ingredient.name, 'Test ingredient 1') + self.assertEqual(ingredient.energy, 176) + self.assertAlmostEqual(ingredient.protein, Decimal(25.63), 2) + self.assertEqual(ingredient.code, '1234567890987654321') + + # Act + sync_ingredients(lambda x: x) + mock_request.assert_called_with( + 'https://wger.de/api/v2/ingredient/?limit=999', + headers=wger_headers(), + ) + + # Assert + self.assertEqual(Ingredient.objects.count(), 15) + + ingredient = Ingredient.objects.get(pk=1) + self.assertEqual(ingredient.name, 'Gâteau double chocolat') + self.assertEqual(ingredient.energy, 360) + self.assertAlmostEqual(ingredient.protein, Decimal(5), 2) + self.assertEqual(ingredient.code, '0013087245950') + self.assertEqual(ingredient.license.pk, 5) + self.assertEqual(ingredient.uuid, UUID('7908c204-907f-4b1e-ad4e-f482e9769ade')) + + new_ingredient = Ingredient.objects.get(uuid='582f1b7f-a8bd-4951-9edd-247bc68b28f4') + self.assertEqual(new_ingredient.name, 'Maxi Hot Dog New York Style') + self.assertEqual(new_ingredient.energy, 256) + self.assertAlmostEqual(new_ingredient.protein, Decimal(11), 2) + self.assertEqual(new_ingredient.code, '3181238941963') diff --git a/wger/settings_global.py b/wger/settings_global.py index 2b2374128..2cf80e560 100644 --- a/wger/settings_global.py +++ b/wger/settings_global.py @@ -536,6 +536,7 @@ 'SYNC_EXERCISES_CELERY': False, 'SYNC_EXERCISE_IMAGES_CELERY': False, 'SYNC_EXERCISE_VIDEOS_CELERY': False, + 'SYNC_INGREDIENTS_CELERY': False, 'TWITTER': False, 'MASTODON': 'https://fosstodon.org/@wger', 'USE_CELERY': False, diff --git a/wger/utils/constants.py b/wger/utils/constants.py index 58d4fa47b..ce9242860 100644 --- a/wger/utils/constants.py +++ b/wger/utils/constants.py @@ -59,8 +59,16 @@ # Possible values for ingredient image download DOWNLOAD_INGREDIENT_WGER = 'WGER' DOWNLOAD_INGREDIENT_OFF = 'OFF' -DOWNLOAD_INGREDIENT_OPTIONS = (DOWNLOAD_INGREDIENT_WGER, DOWNLOAD_INGREDIENT_OFF, None) +DOWNLOAD_INGREDIENT_NONE = 'None' +DOWNLOAD_INGREDIENT_OPTIONS = ( + DOWNLOAD_INGREDIENT_WGER, + DOWNLOAD_INGREDIENT_OFF, + DOWNLOAD_INGREDIENT_NONE, +) # OFF Api OFF_SEARCH_PRODUCT_FOUND = 1 OFF_SEARCH_PRODUCT_NOT_FOUND = 0 + +# API +API_MAX_ITEMS = 999 diff --git a/wger/utils/requests.py b/wger/utils/requests.py index abcf11834..d59020e63 100644 --- a/wger/utils/requests.py +++ b/wger/utils/requests.py @@ -27,7 +27,7 @@ def wger_headers(): return {'User-agent': wger_user_agent()} -def get_paginated(url: str, headers=None): +def get_all_paginated(url: str, headers=None): """ Fetch all results from a paginated endpoint. @@ -49,7 +49,7 @@ def get_paginated(url: str, headers=None): return results -def get_paginated_generator(url: str, headers=None): +def get_paginated(url: str, headers=None): """ Generator that iterates over a paginated endpoint @@ -62,7 +62,9 @@ def get_paginated_generator(url: str, headers=None): while True: response = requests.get(url, headers=headers).json() - yield response['results'] + + for result in response['results']: + yield result url = response['next'] if not url: