Skip to content

Commit

Permalink
Fixed qrcode size and removed duplicate code
Browse files Browse the repository at this point in the history
  • Loading branch information
jlucaspains committed Jan 12, 2025
1 parent d88487d commit 9a57e28
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 120 deletions.
61 changes: 2 additions & 59 deletions api/functions/parse_recipe.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,13 @@
import logging
import json
import re

import azure.functions as func
from contextlib import suppress

from pint import UnitRegistry
from uuid import uuid4
from time import perf_counter

from .util import parse_recipe_ingredient, parse_recipe_instruction, get_recipe_image, get_html
from .util import get_recipe_from_scraper, get_html

ureg = UnitRegistry()
bp = func.Blueprint()

@bp.route(route="parse-recipe", methods=["POST"])
Expand All @@ -26,28 +22,7 @@ def parse_recipe(req: func.HttpRequest) -> func.HttpResponse:
logging.info(f"processing parse request id {correlation_id} for url: {url}")
scraper = get_html(url)

lang = scraper.language() or "en"

ingredients = map(lambda x: parse_recipe_ingredient(x, lang, ureg), scraper.ingredients())
instructions = map(lambda x: parse_recipe_instruction(x, lang), scraper.instructions_list())
yields, yields_description = parse_yields(scraper.yields())
result = {
"title": scraper.title(),
"totalTime": scraper.total_time(),
"yields": yields,
"yieldsDescription": yields_description,
"ingredients": list(ingredients),
"steps": list(instructions),
"image": scraper.image(),
"host": scraper.host(),
"language": scraper.language()
}

# since nutrients are not always available, we need to suppress the exception
with suppress(NotImplementedError):
result["nutrients"] = parse_nutrients(scraper.nutrients())

result["image"] = get_recipe_image(result["image"]) if download_image else result["image"]
result = get_recipe_from_scraper(scraper, download_image)

return func.HttpResponse(json.dumps(result), status_code=200, mimetype="application/json")
except Exception as e:
Expand All @@ -57,35 +32,3 @@ def parse_recipe(req: func.HttpRequest) -> func.HttpResponse:
finally:
end = perf_counter()
logging.info(f"Finished processing parse request id {correlation_id}. Time taken: {end - start:0.4f}s")

def parse_nutrients(nutrients: dict):
return {
"calories": parse_nutrient_value(nutrients.get("calories")),
"totalFat": parse_nutrient_value(nutrients.get("fatContent")),
"saturatedFat": parse_nutrient_value(nutrients.get("saturatedFatContent")),
"unsaturatedFat": parse_nutrient_value(nutrients.get("unsaturatedFatContent")),
"transFat": parse_nutrient_value(nutrients.get("transFatContent")),
"carbohydrates": parse_nutrient_value(nutrients.get("carbohydrateContent")),
"sugar": parse_nutrient_value(nutrients.get("sugarContent")),
"cholesterol": parse_nutrient_value(nutrients.get("cholesterolContent")),
"sodium": parse_nutrient_value(nutrients.get("sodiumContent")),
"protein": parse_nutrient_value(nutrients.get("proteinContent")),
"fiber": parse_nutrient_value(nutrients.get("fiberContent"))
}

def parse_yields(yields: str):
if not yields:
return 0, ""

parts = yields.split(" ")

return float(parts[0]), parts[1] if len(parts) > 1 else ""

def parse_nutrient_value(value: str) -> float:
if not value:
return 0

qty_re = re.search(r"^(?P<Value>\d{1,5})", value)
qty = qty_re.group("Value")

return float(qty) if qty else 0
61 changes: 2 additions & 59 deletions api/functions/parse_recipe_html.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,15 @@
import logging
import json
import re

import azure.functions as func
from contextlib import suppress

from pint import UnitRegistry
from uuid import uuid4
from time import perf_counter

from recipe_scrapers import scrape_html, AbstractScraper

from .util import parse_recipe_ingredient, parse_recipe_instruction, get_recipe_image, get_html
from .util import get_recipe_from_scraper

ureg = UnitRegistry()
bp = func.Blueprint()

@bp.route(route="parse-recipe-html", methods=["POST"])
Expand All @@ -30,28 +26,7 @@ def parse_recipe_html(req: func.HttpRequest) -> func.HttpResponse:
for file in req.files.values():
scraper = scrape_html(file.stream.read(), url, wild_mode=True)

lang = scraper.language() or "en"

ingredients = map(lambda x: parse_recipe_ingredient(x, lang, ureg), scraper.ingredients())
instructions = map(lambda x: parse_recipe_instruction(x, lang), scraper.instructions_list())
yields, yields_description = parse_yields(scraper.yields())
result = {
"title": scraper.title(),
"totalTime": scraper.total_time(),
"yields": yields,
"yieldsDescription": yields_description,
"ingredients": list(ingredients),
"steps": list(instructions),
"image": scraper.image(),
"host": scraper.host(),
"language": scraper.language()
}

# since nutrients are not always available, we need to suppress the exception
with suppress(NotImplementedError):
result["nutrients"] = parse_nutrients(scraper.nutrients())

result["image"] = get_recipe_image(result["image"]) if download_image else result["image"]
result = get_recipe_from_scraper(scraper, download_image)

return func.HttpResponse(json.dumps(result), status_code=200, mimetype="application/json")
except Exception as e:
Expand All @@ -61,35 +36,3 @@ def parse_recipe_html(req: func.HttpRequest) -> func.HttpResponse:
finally:
end = perf_counter()
logging.info(f"Finished processing parse request id {correlation_id}. Time taken: {end - start:0.4f}s")

def parse_nutrients(nutrients: dict):
return {
"calories": parse_nutrient_value(nutrients.get("calories")),
"totalFat": parse_nutrient_value(nutrients.get("fatContent")),
"saturatedFat": parse_nutrient_value(nutrients.get("saturatedFatContent")),
"unsaturatedFat": parse_nutrient_value(nutrients.get("unsaturatedFatContent")),
"transFat": parse_nutrient_value(nutrients.get("transFatContent")),
"carbohydrates": parse_nutrient_value(nutrients.get("carbohydrateContent")),
"sugar": parse_nutrient_value(nutrients.get("sugarContent")),
"cholesterol": parse_nutrient_value(nutrients.get("cholesterolContent")),
"sodium": parse_nutrient_value(nutrients.get("sodiumContent")),
"protein": parse_nutrient_value(nutrients.get("proteinContent")),
"fiber": parse_nutrient_value(nutrients.get("fiberContent"))
}

def parse_yields(yields: str):
if not yields:
return 0, ""

parts = yields.split(" ")

return float(parts[0]), parts[1] if len(parts) > 1 else ""

def parse_nutrient_value(value: str) -> float:
if not value:
return 0

qty_re = re.search(r"^(?P<Value>\d{1,5})", value)
qty = qty_re.group("Value")

return float(qty) if qty else 0
70 changes: 69 additions & 1 deletion api/functions/util.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from contextlib import suppress
import io
from zipfile import ZipFile
from fractions import Fraction
Expand All @@ -10,6 +11,8 @@
import pillow_avif
from recipe_scrapers import scrape_html, AbstractScraper

ureg = UnitRegistry()

def parse_recipe_ingredients(text: str, ureg: UnitRegistry):
"""Parses a recipe collection of ingredientes that are formatted in a single string separated by \n
Args:
Expand Down Expand Up @@ -186,4 +189,69 @@ def get_recipe_image(image_url: str):
def get_html(url: str) -> AbstractScraper:
html = requests.get(url, headers=request_headers).content

return scrape_html(html, url, wild_mode=True)
return scrape_html(html, url, wild_mode=True)

def get_recipe_from_scraper(scraper: AbstractScraper, download_image: bool = False):
"""Parses a recipe from a scraper
Args:
scraper (AbstractScraper): scraper object
download_image (bool): whether to download the image or not, default is False
Returns:
dict: dictionary with recipe information
"""
lang = scraper.language() or "en"

ingredients = map(lambda x: parse_recipe_ingredient(x, lang, ureg), scraper.ingredients())
instructions = map(lambda x: parse_recipe_instruction(x, lang), scraper.instructions_list())
yields, yields_description = parse_yields(scraper.yields())
result = {
"title": scraper.title(),
"totalTime": scraper.total_time(),
"yields": yields,
"yieldsDescription": yields_description,
"ingredients": list(ingredients),
"steps": list(instructions),
"image": scraper.image(),
"host": scraper.host(),
"language": scraper.language()
}

# since nutrients are not always available, we need to suppress the exception
with suppress(NotImplementedError):
result["nutrients"] = parse_nutrients(scraper.nutrients())

result["image"] = get_recipe_image(result["image"]) if download_image else result["image"]

return result

def parse_nutrients(nutrients: dict):
return {
"calories": parse_nutrient_value(nutrients.get("calories")),
"totalFat": parse_nutrient_value(nutrients.get("fatContent")),
"saturatedFat": parse_nutrient_value(nutrients.get("saturatedFatContent")),
"unsaturatedFat": parse_nutrient_value(nutrients.get("unsaturatedFatContent")),
"transFat": parse_nutrient_value(nutrients.get("transFatContent")),
"carbohydrates": parse_nutrient_value(nutrients.get("carbohydrateContent")),
"sugar": parse_nutrient_value(nutrients.get("sugarContent")),
"cholesterol": parse_nutrient_value(nutrients.get("cholesterolContent")),
"sodium": parse_nutrient_value(nutrients.get("sodiumContent")),
"protein": parse_nutrient_value(nutrients.get("proteinContent")),
"fiber": parse_nutrient_value(nutrients.get("fiberContent"))
}

def parse_yields(yields: str):
if not yields:
return 0, ""

parts = yields.split(" ")

return float(parts[0]), parts[1] if len(parts) > 1 else ""

def parse_nutrient_value(value: str) -> float:
if not value:
return 0

qty_re = re.search(r"^(?P<Value>\d{1,5})", value)
qty = qty_re.group("Value")

return float(qty) if qty else 0
5 changes: 4 additions & 1 deletion src/pages/recipe/[id]/index.vue
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,10 @@ async function shareOnline() {
const result = await response.json();
shareCode.value = result.id;
shareQRCode.value = result.qr_code;
shareQRCode.value = result.qr_code
// HACK: remove width and height from QR Code as library that generates it
// doesn't have an option to remove it
.replace("width=\"29mm\"", "").replace("height=\"29mm\"", "");
isShareOptionsModalOpen.value = true;
Expand Down

0 comments on commit 9a57e28

Please sign in to comment.