diff --git a/.gitignore b/.gitignore
index 462824c..5f9a035 100644
--- a/.gitignore
+++ b/.gitignore
@@ -199,7 +199,7 @@ poetry.toml
# ruff
.ruff_cache/
-
+pgdata
# LSP config files
pyrightconfig.json
diff --git a/README.md b/README.md
index 1f162aa..3bdaee6 100644
--- a/README.md
+++ b/README.md
@@ -253,6 +253,32 @@ docker-compose logs django # Только Django
- [Docker Documentation](https://docs.docker.com/)
- [PostgreSQL Documentation](https://www.postgresql.org/docs/)
+
+**Сборка и запуск контейнеров:**
+```bash
+docker-compose build --no-cache
+docker-compose up # Соберет и запустит сервисы
+```
+
+## Запуск задачи в Celery:
+
+**Команда для запуска задачи:**
+сначала redis:
+```bash
+docker run -d -p 6379:6379 --name redis redis:alpine
+```
+
+потом celery:
+```bash
+celery -A config beat -l info
+```
+
+```markdown
+-A config - указывает где находится Celery-приложение
+beat - запускает Celery Beat — компонент, который периодически отправляет задачи в очередь
+-l info - уровень логирования (DEBUG, INFO, WARNING, ERROR)
+```
+
## 📄 Лицензия
Этот проект лицензирован под MIT License - см. файл [LICENSE](LICENSE) для деталей.
@@ -266,4 +292,5 @@ docker-compose logs django # Только Django
---
-**Разработано с ❤️ используя современные инструменты Python**
\ No newline at end of file
+**Разработано с ❤️ используя современные инструменты Python**
+
diff --git a/apps/books/management/__init__.py b/apps/books/management/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/apps/books/management/commands/__init__.py b/apps/books/management/commands/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/apps/books/management/commands/parse_books.py b/apps/books/management/commands/parse_books.py
new file mode 100644
index 0000000..3cb2238
--- /dev/null
+++ b/apps/books/management/commands/parse_books.py
@@ -0,0 +1,83 @@
+import asyncio
+from asgiref.sync import sync_to_async
+from django.core.management.base import BaseCommand
+from urllib.parse import urljoin
+
+from apps.books.models import Book, Author, Publisher
+from apps.books.services.book_saver import BookSaver
+from apps.books.scrapers.piter_publ.book_parser import BookParser
+from apps.books.scrapers.piter_publ.piter_scraper import PiterScraper
+from apps.books.scrapers.base_scraper import BaseScraper
+from apps.books.services.author_service import AuthorService
+from apps.books.services.publisher_service import PublisherService
+from logger.books.log import get_logger
+
+logger = get_logger(__name__)
+author_service = AuthorService(Author)
+publisher_service = PublisherService(Publisher)
+book_saver = BookSaver(Book, publisher_service, author_service, logger)
+
+
+class AsyncBookFetcher(BaseScraper):
+ def __init__(self, base_domain: str, delay: float = 2.0, max_concurrent: int = 3):
+ super().__init__(delay)
+ self.base_domain = base_domain
+ self.semaphore = asyncio.Semaphore(max_concurrent)
+ self._last_request_time = 0
+
+ async def scrape_book(self, url: str):
+ async with self.semaphore:
+ now = asyncio.get_event_loop().time()
+ elapsed = now - self._last_request_time
+ if elapsed < self.delay:
+ await asyncio.sleep(self.delay - elapsed)
+ self._last_request_time = asyncio.get_event_loop().time()
+
+ if url.startswith("/"):
+ url = urljoin(self.base_domain, url)
+
+ logger.info(f"fetching book: {url}")
+ html = await self.fetch(url)
+ if not html:
+ logger.warning(f"failed to fetch {url}")
+ return None
+
+ parser = BookParser(html)
+ book_data = {
+ "url": url,
+ "book_title": parser.extract_book_name().get("book_title", ""),
+ "author": parser.extract_authors(),
+ "price": parser.extract_price(),
+ "details": parser.extract_all_params(),
+ "description": parser.extract_description().get("description", ""),
+ "cover": parser.extract_cover_image(),
+ }
+ logger.debug(f"parsed book data for: {book_data['book_title']}")
+ return book_data
+
+
+class Command(BaseCommand):
+ help = "Парсит книги с сайта Piter и сохраняет в базу данных"
+
+ def handle(self, *args, **kwargs):
+ logger.info("starting book import from Piter")
+ asyncio.run(self.import_books())
+ logger.info("book import finished")
+
+ async def import_books(self):
+ piter = PiterScraper()
+ book_scraper = AsyncBookFetcher(base_domain="https://www.piter.com")
+
+ tasks = []
+ async for link in piter.scrape_book_links():
+ logger.debug(f"found book link: {link}")
+ task = asyncio.create_task(book_scraper.scrape_book(link))
+ tasks.append(task)
+
+ for coro in asyncio.as_completed(tasks):
+ book = await coro
+ if book:
+ await self.save_book(book)
+
+ async def save_book(self, item: dict):
+ await sync_to_async(book_saver.save_book)(item)
diff --git a/apps/books/scrapers/__init__.py b/apps/books/scrapers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/apps/books/scrapers/base_scraper.py b/apps/books/scrapers/base_scraper.py
new file mode 100644
index 0000000..e47d582
--- /dev/null
+++ b/apps/books/scrapers/base_scraper.py
@@ -0,0 +1,24 @@
+import asyncio
+import httpx
+
+from bs4 import BeautifulSoup
+
+
+class BaseScraper:
+ def __init__(self, delay: float = 1.0):
+ self.headers = {"User-Agent": "Mozilla/5.0"}
+ self.delay = delay
+
+ async def fetch(self, url):
+ try:
+ await asyncio.sleep(self.delay)
+ async with httpx.AsyncClient(timeout=10.0) as client:
+ response = await client.get(url, headers=self.headers)
+ response.raise_for_status()
+ return response.text
+ except (httpx.HTTPError, asyncio.TimeoutError) as e:
+ print(f"request failed: {url}, error: {str(e)}")
+ return None
+
+ def parse(self, html) -> BeautifulSoup:
+ return BeautifulSoup(html, "html.parser")
diff --git a/apps/books/scrapers/piter_publ/__init__.py b/apps/books/scrapers/piter_publ/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/apps/books/scrapers/piter_publ/book_parser.py b/apps/books/scrapers/piter_publ/book_parser.py
new file mode 100644
index 0000000..74f5883
--- /dev/null
+++ b/apps/books/scrapers/piter_publ/book_parser.py
@@ -0,0 +1,148 @@
+from bs4 import BeautifulSoup
+from typing import List, Dict
+from urllib.parse import urljoin
+
+from logger.books.log import get_logger
+
+logger = get_logger(__name__)
+
+
+class BookParser:
+ def __init__(self, html: str, base_domain: str = "https://www.piter.com"):
+ self.soup = BeautifulSoup(html, "lxml")
+ self.base_domain = base_domain
+
+ def extract_book_name(self):
+ try:
+ title = self.soup.select_one("div.product-info h1")
+ if not title:
+ logger.warning("book title not found in HTML")
+ return {"book_title": ""}
+
+ result = {"book_title": title.get_text(strip=True) if title else ""}
+ logger.debug(f"extracted book title: {result['book_title']}")
+ return result
+ except Exception as e:
+ logger.error(f"book title extraction failed {str(e)}")
+ return {"book_title": ""}
+
+ def extract_description(self):
+ try:
+ description = self.soup.find("div", id="tab-1")
+ if not description:
+ logger.warning("book description not found in HTML")
+ return {"description": ""}
+ full_text = description.get_text(separator="\n", strip=True)
+ logger.debug(f"extracted book description: {full_text}")
+ return {"description": full_text}
+ except Exception as e:
+ logger.error(f"description extraction failed {str(e)}")
+ return {"description": ""}
+
+ def extract_all_params(self):
+ result = {}
+ items = self.soup.select("div.params li")
+
+ for li in items:
+ label = li.select_one("span.grid-5")
+ value = li.select_one("span.grid-7")
+ if label and value:
+ key = label.get_text(strip=True).rstrip(":")
+ val = value.get_text(strip=True)
+ result[key] = val
+ return result
+
+ def extract_cover_image(self):
+ try:
+ container = self.soup.select_one('div.photo, div[class*="photo"]')
+ if container:
+ img = container.select_one("img")
+ if img and img.get("src"):
+ src = img["src"].strip()
+ logger.debug(f"extracted cover image from container: {src}")
+ return {"cover_image": urljoin("https://www.piter.com", src)}
+
+ img = self.soup.select_one("img")
+ if img and img.get("src"):
+ src = img["src"].strip()
+ logger.debug(f"extracted cover image from img: {src}")
+ return {"cover_image": urljoin("https://www.piter.com", src)}
+
+ return {"cover_image": ""}
+ except Exception as e:
+ logger.error(f"cover extraction failed {str(e)}")
+ return {"cover": ""}
+
+ def extract_authors(self) -> List[Dict[str, str]]:
+ try:
+ authors = []
+ author_blocks = self.soup.select("#tab-2 .autor-wrapper")
+
+ for block in author_blocks:
+ name_tag = block.select_one("h2")
+ if name_tag:
+ full_name = name_tag.get_text(strip=True)
+ parts = full_name.split()
+ if len(parts) == 1:
+ last_name = parts[0]
+ first_name = ""
+ elif len(parts) == 2:
+ last_name, first_name = parts
+ elif len(parts) >= 3:
+ first_name = parts[1]
+ last_name = " ".join([parts[0]] + parts[2:])
+ else:
+ last_name = ""
+ first_name = ""
+ logger.warning(f"empty author name: {full_name}")
+
+ description_block = name_tag.parent
+ bio_parts = []
+ for bio in description_block.contents:
+ if bio != name_tag and isinstance(bio, str):
+ bio_parts.append(bio.strip())
+ bio = " ".join(bio_parts).strip()
+
+ authors.append(
+ {
+ "first_name": first_name.strip("."),
+ "last_name": last_name,
+ "bio": bio,
+ }
+ )
+
+ logger.info(f"parsed {len(authors)} authors from tab-2")
+ return authors
+ except Exception as e:
+ logger.error(f"failed to parse authors from tab-2: {str(e)}")
+ logger.exception("tab-2 author parsing error details")
+ return []
+
+ def extract_author_bio(self) -> str:
+ try:
+ block = self.soup.select_one("div.author-wrapper div.grid-9.s-grid-12")
+ if not block:
+ logger.warning("block with author_bio not found in HTML")
+ return ""
+ name_tag = block.select_one("h2")
+ name_text = name_tag.get_text(strip=True) if name_tag else ""
+ full_text = block.get_text(separator=" ", strip=True)
+ logger.debug(f"extracted author_bio: {full_text}")
+ return full_text.replace(name_text, "", 1).strip()
+ except Exception as e:
+ logger.error(f"author_bio extraction failed {str(e)}")
+ return {"author_bio": ""}
+
+ def extract_price(self):
+ try:
+ price = self.soup.select("div.price.color")
+ if len(price) >= 2:
+ logger.debug(f"extracted price: {price}")
+ return {
+ "price": price[0].text.strip(),
+ "electronic_price": price[1].text.strip(),
+ }
+ return {}
+ except Exception as e:
+ logger.error(f"price extraction failed {str(e)}")
+ return {"price": "", "electronic_price": ""}
diff --git a/apps/books/scrapers/piter_publ/link_extractor.py b/apps/books/scrapers/piter_publ/link_extractor.py
new file mode 100644
index 0000000..843d202
--- /dev/null
+++ b/apps/books/scrapers/piter_publ/link_extractor.py
@@ -0,0 +1,38 @@
+from bs4 import BeautifulSoup
+from urllib.parse import urljoin
+from typing import List
+
+from logger.books.log import get_logger
+
+logger = get_logger(__name__)
+
+
+class LinkExtractor:
+ def __init__(
+ self, base_domain: str, expected_prefix: str = "/collection/all/product/"
+ ):
+ self.base_domain = base_domain
+ self.expected_prefix = expected_prefix
+
+ def extract_links(self, soup: BeautifulSoup) -> List[str]:
+ """
+ извлекает полные ссылки на книги
+ """
+ try:
+ links = []
+
+ container = soup.find("div", class_="products-list")
+ if not container:
+ return []
+
+ for tag in container.find_all("a"):
+ href = tag.get("href")
+ if href and href.startswith(self.expected_prefix):
+ full_url = urljoin(self.base_domain, href)
+ links.append(full_url)
+
+ logger.info(f"collected links: {links}")
+ return links
+ except Exception as e:
+ logger.error(f"failed to parse links: {str(e)}")
+ return []
diff --git a/apps/books/scrapers/piter_publ/paginator.py b/apps/books/scrapers/piter_publ/paginator.py
new file mode 100644
index 0000000..55fcf62
--- /dev/null
+++ b/apps/books/scrapers/piter_publ/paginator.py
@@ -0,0 +1,32 @@
+import re
+from bs4 import BeautifulSoup
+from urllib.parse import urljoin
+
+from logger.books.log import get_logger
+
+logger = get_logger(__name__)
+
+
+class Paginator:
+ def __init__(self, base_domain: str):
+ self.base_domain = base_domain
+
+ def get_next_page(self, soup: BeautifulSoup):
+ """
+ возвращает полный URL следующей страницы, если она существует
+ """
+ next_button = soup.find("a", string="Следующая")
+ if not next_button:
+ logger.info("no next page")
+ return None
+
+ href = next_button.get("href")
+ if not href:
+ return None
+
+ match = re.search(r"page=(\d+)", href)
+ if match:
+ logger.success(f"found next page: {match}")
+ return urljoin(self.base_domain, href)
+
+ return None
diff --git a/apps/books/scrapers/piter_publ/piter_scraper.py b/apps/books/scrapers/piter_publ/piter_scraper.py
new file mode 100644
index 0000000..a5035c1
--- /dev/null
+++ b/apps/books/scrapers/piter_publ/piter_scraper.py
@@ -0,0 +1,50 @@
+import asyncio
+
+from ..base_scraper import BaseScraper
+from .paginator import Paginator
+from .link_extractor import LinkExtractor
+from logger.books.log import get_logger
+
+logger = get_logger(__name__)
+BASE_DOMAIN = "https://www.piter.com"
+BASE_URL = "https://www.piter.com/collection/all?q=python"
+
+
+class PiterScraper(BaseScraper):
+ def __init__(self, base_url=None, delay=1.0, paginator=None, link_extractor=None):
+ super().__init__(delay)
+ self.base_url = base_url or BASE_URL
+ self.paginator = paginator or Paginator(BASE_DOMAIN)
+ self.link_extractor = link_extractor or LinkExtractor(BASE_DOMAIN)
+
+ async def scrape_book_links(self, url=None):
+ current_url = url or self.base_url
+ page_number = 1
+
+ while current_url:
+ logger.info(f"loading page {page_number}: {current_url}")
+ await asyncio.sleep(self.delay)
+ html = await self.fetch(current_url)
+ if not html:
+ logger.warning(f"empty html at page {page_number}, stopping")
+ break
+
+ soup = self.parse(html)
+ links = self.link_extractor.extract_links(soup)
+
+ if not links:
+ logger.warning(f"no book links found on page {page_number}, stopping")
+ break
+
+ logger.debug(f"found {len(links)} links on page {page_number}")
+
+ for link in links:
+ yield link
+
+ next_page = self.paginator.get_next_page(soup)
+ if next_page:
+ current_url = next_page
+ page_number += 1
+ else:
+ logger.info("no next page found, pagination ended")
+ break
diff --git a/apps/books/services/author_service.py b/apps/books/services/author_service.py
new file mode 100644
index 0000000..6109dee
--- /dev/null
+++ b/apps/books/services/author_service.py
@@ -0,0 +1,22 @@
+from typing import List
+
+from apps.books.models import Author
+from apps.books.validators.validators import AuthorInput
+
+
+class AuthorService:
+ def __init__(self, AuthorModel):
+ self.Author = AuthorModel
+
+ def get_or_create_authors(self, authors_data: List[AuthorInput]) -> List[Author]:
+ authors = []
+ for data in authors_data:
+ if not data.first_name and not data.last_name:
+ continue
+ author_obj, _ = self.Author.objects.get_or_create(
+ first_name=data.first_name,
+ last_name=data.last_name,
+ bio=data.bio,
+ )
+ authors.append(author_obj)
+ return authors
diff --git a/apps/books/services/book_saver.py b/apps/books/services/book_saver.py
new file mode 100644
index 0000000..25dbfde
--- /dev/null
+++ b/apps/books/services/book_saver.py
@@ -0,0 +1,77 @@
+from datetime import datetime
+from django.db import transaction
+from pydantic import ValidationError
+
+from apps.books.models import Book, Author, Publisher
+from apps.books.validators.validators import BookInput
+from apps.books.services.author_service import AuthorService
+from apps.books.services.publisher_service import PublisherService
+from logger.books.log import get_logger
+
+logger = get_logger(__name__)
+author_service = AuthorService(Author)
+publisher_service = PublisherService(Publisher)
+
+
+class BookSaver:
+ def __init__(self, BookModel, publisher_service, author_service, logger):
+ self.Book = BookModel
+ self.Publisher = publisher_service
+ self.author_service = author_service
+ self.logger = logger
+
+ @transaction.atomic
+ def save_book(self, item: dict):
+ try:
+ book_input = BookInput(**item)
+ except ValidationError as e:
+ self.logger.warning(f"Invalid book input: {e}")
+ return
+ isbn = book_input.details.isbn.strip()
+ raw_year = book_input.details.year or str(datetime.now().year)
+
+ try:
+ published_at = datetime.strptime(raw_year, "%Y").date()
+ except ValueError:
+ logger.warning(f"invalid year format '{raw_year}', defaulting to 2024")
+ published_at = datetime.strptime("2024", "%Y").date()
+
+ publisher = self.Publisher.get_or_create_publisher("Издательство Питер")
+
+ book = Book.objects.filter(isbn_code=isbn).first()
+ if book:
+ logger.info(f"updating book: {book_input.book_title} ({isbn})")
+ book.title = book_input.book_title
+ book.description = book_input.description
+ book.published_at = published_at
+ book.total_pages = book_input.details.pages
+ book.cover_image = book_input.cover.cover_image or ""
+ book.language = "Русский"
+ book.publisher = publisher
+ book.save()
+ else:
+ logger.info(f"creating new book: {book_input.book_title} ({isbn})")
+ book = Book.objects.create(
+ isbn_code=isbn,
+ title=book_input.book_title,
+ description=book_input.description,
+ published_at=published_at,
+ total_pages=book_input.details.pages,
+ cover_image=book_input.cover.cover_image or "",
+ language="Русский",
+ publisher=publisher,
+ )
+
+ authors = []
+ for author_data in book_input.author:
+ if not author_data.first_name and not author_data.last_name:
+ continue
+ author_obj, _ = Author.objects.get_or_create(
+ first_name=author_data.first_name,
+ last_name=author_data.last_name,
+ bio=author_data.bio,
+ )
+ authors.append(author_obj)
+
+ book.author.set(authors)
+ logger.debug(f"saved book with authors: {book_input.book_title}")
diff --git a/apps/books/services/publisher_service.py b/apps/books/services/publisher_service.py
new file mode 100644
index 0000000..a7bc7f9
--- /dev/null
+++ b/apps/books/services/publisher_service.py
@@ -0,0 +1,10 @@
+from apps.books.models import Publisher
+
+
+class PublisherService:
+ def __init__(self, PublisherModel):
+ self.Publisher = PublisherModel
+
+ def get_or_create_publisher(self, name: str) -> Publisher:
+ publisher, _ = self.Publisher.objects.get_or_create(name=name)
+ return publisher
diff --git a/apps/books/tasks.py b/apps/books/tasks.py
new file mode 100644
index 0000000..c043edf
--- /dev/null
+++ b/apps/books/tasks.py
@@ -0,0 +1,35 @@
+import asyncio
+
+from asgiref.sync import sync_to_async
+from celery import shared_task
+
+from apps.books.scrapers.piter_publ.piter_scraper import PiterScraper
+from apps.books.management.commands.parse_books import (
+ AsyncBookFetcher,
+ book_saver,
+ logger,
+)
+
+
+@shared_task
+def parse_books_task():
+ """
+ celery-задача для запуска парсинга книг
+ """
+
+ async def import_books():
+ piter = PiterScraper()
+ book_scraper = AsyncBookFetcher(base_domain="https://www.piter.com")
+
+ tasks = []
+ async for link in piter.scrape_book_links():
+ logger.debug(f"found book link: {link}")
+ task = asyncio.create_task(book_scraper.scrape_book(link))
+ tasks.append(task)
+
+ for coro in asyncio.as_completed(tasks):
+ book = await coro
+ if book:
+ await sync_to_async(book_saver.save_book)(book)
+
+ asyncio.run(import_books())
diff --git a/apps/books/validators/validators.py b/apps/books/validators/validators.py
new file mode 100644
index 0000000..9dfb915
--- /dev/null
+++ b/apps/books/validators/validators.py
@@ -0,0 +1,39 @@
+from pydantic import BaseModel, Field, field_validator
+from typing import List, Optional
+
+
+class AuthorInput(BaseModel):
+ first_name: str = ""
+ last_name: str = ""
+ bio: str = ""
+
+
+class CoverInput(BaseModel):
+ cover_image: Optional[str] = ""
+
+
+class BookDetails(BaseModel):
+ isbn: str = Field(alias="ISBN")
+ year: Optional[str] = Field(alias="Год", default=None)
+ pages: int = Field(default=0, alias="Страниц")
+
+ class Config:
+ populate_by_name = True
+
+
+class BookInput(BaseModel):
+ book_title: str
+ description: str
+ author: List[AuthorInput]
+ cover: CoverInput
+ details: BookDetails
+
+ @field_validator("details")
+ @classmethod
+ def ensure_isbn(cls, v: BookDetails) -> BookDetails:
+ if not v.isbn.strip():
+ raise ValueError("ISBN is required")
+ return v
+
+ class Config:
+ populate_by_name = True
diff --git a/config/__init__.py b/config/__init__.py
index e69de29..6d300d0 100644
--- a/config/__init__.py
+++ b/config/__init__.py
@@ -0,0 +1,3 @@
+from .celery_app import app as celery_app
+
+__all__ = ("celery_app",)
diff --git a/config/celery_app.py b/config/celery_app.py
new file mode 100644
index 0000000..58f5eaa
--- /dev/null
+++ b/config/celery_app.py
@@ -0,0 +1,10 @@
+import os
+from celery import Celery
+
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
+
+
+app = Celery("pythonbooks")
+app.config_from_object("django.conf:settings", namespace="CELERY")
+app.autodiscover_tasks()
diff --git a/config/settings.py b/config/settings.py
index 32012cf..63b4a7e 100644
--- a/config/settings.py
+++ b/config/settings.py
@@ -1,6 +1,7 @@
import os
from pathlib import Path
+from celery.schedules import crontab
import environ
@@ -99,7 +100,7 @@
("en", "English"),
("ru", "Russian"),
]
-TIME_ZONE = "UTC"
+TIME_ZONE = "Europe/Moscow"
USE_I18N = True
USE_L10N = True
USE_TZ = True
@@ -119,3 +120,15 @@
# DEFAULT PRIMARY KEY
# ====================
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
+
+# ====================
+# CELERY SETTINGS
+# ====================
+CELERY_BROKER_URL = "redis://localhost:6379/0"
+CELERY_RESULT_BACKEND = "redis://localhost:6379/0"
+CELERY_BEAT_SCHEDULE = {
+ "parse-books-every-night": {
+ "task": "apps.books.tasks.parse_books_task",
+ "schedule": crontab(hour=1, minute=11),
+ }
+}
diff --git a/docker-compose.yml b/docker-compose.yml
index 9d4e2d2..7582312 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -39,6 +39,38 @@ services:
restart: unless-stopped
networks:
- pythonbooks-network
+ redis:
+ image: redis:alpine
+ ports:
+ - "6379:6379"
+ celery:
+ build: .
+ container_name: celery_worker
+ command: celery -A config worker --loglevel=info
+ volumes:
+ - ./apps:/app/apps
+ - ./config:/app/config
+ - ./manage.py:/app/manage.py
+ - /app/.venv
+ env_file:
+ - .env
+ depends_on:
+ - redis
+ - django
+ celery-beat:
+ build: .
+ container_name: celery_beat
+ command: celery -A config beat --loglevel=info
+ volumes:
+ - ./apps:/app/apps
+ - ./config:/app/config
+ - ./manage.py:/app/manage.py
+ - /app/.venv
+ env_file:
+ - .env
+ depends_on:
+ - redis
+ - django
volumes:
postgres_data:
diff --git a/entrypoint.sh b/entrypoint.sh
index a67b272..054d62d 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -3,5 +3,10 @@ set -e
uv run python manage.py migrate
+if [ "$1" = "celery" ]; then
+ shift
+ exec uv run celery "$@"
+fi
+
# Transfer control to docker-compose "command"
exec "$@"
diff --git a/logger/books/log.py b/logger/books/log.py
new file mode 100644
index 0000000..6911294
--- /dev/null
+++ b/logger/books/log.py
@@ -0,0 +1,5 @@
+from logger.logger import setup_logger
+
+
+def get_logger(name=None):
+ return setup_logger(module_name=name or __name__, log_dir="logs/scrapers")
diff --git a/logger/logger.py b/logger/logger.py
new file mode 100644
index 0000000..d167f5b
--- /dev/null
+++ b/logger/logger.py
@@ -0,0 +1,49 @@
+import sys
+
+from pathlib import Path
+from loguru import logger
+
+BASE_DIR = Path(__file__).resolve().parent.parent
+
+
+def setup_logger(module_name: str, log_dir: str = "logs"):
+ log_path = BASE_DIR / log_dir
+ log_path.mkdir(parents=True, exist_ok=True)
+
+ logger.remove()
+
+ file_format = "{time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{line} | {message}"
+
+ console_format = (
+ "{time:HH:mm:ss} | "
+ "{level: <8} | "
+ "{name}:{line} | "
+ "{message}"
+ )
+
+ logger.add(
+ log_path / "debug.log",
+ rotation="10 MB",
+ format=file_format,
+ level="DEBUG",
+ enqueue=True,
+ )
+
+ logger.add(
+ log_path / "errors.log",
+ rotation="10 MB",
+ retention="3 months",
+ format=file_format,
+ level="ERROR",
+ enqueue=True,
+ )
+
+ logger.add(
+ sys.stderr,
+ format=console_format,
+ colorize=True,
+ level="DEBUG",
+ backtrace=False,
+ )
+
+ return logger.bind(module=module_name)
diff --git a/pyproject.toml b/pyproject.toml
index b7bbc1b..6b4d526 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -13,4 +13,9 @@ dependencies = [
"psycopg2-binary>=2.9.10",
"sorl-thumbnail==12.11.0",
"python-dotenv>=1.0.0",
+ "celery==5.5.3",
+ "billiard==4.2.1",
+ "kombu==5.5.4",
+ "vine==5.1.0",
+ "redis>=6.2.0",
]
\ No newline at end of file
diff --git a/uv.lock b/uv.lock
index 0fa99d8..73ac833 100644
--- a/uv.lock
+++ b/uv.lock
@@ -2,6 +2,18 @@ version = 1
revision = 2
requires-python = ">=3.13"
+[[package]]
+name = "amqp"
+version = "5.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "vine" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" },
+]
+
[[package]]
name = "asgiref"
version = "3.8.1"
@@ -20,6 +32,92 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" },
]
+[[package]]
+name = "billiard"
+version = "4.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" },
+]
+
+[[package]]
+name = "celery"
+version = "5.5.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "billiard" },
+ { name = "click" },
+ { name = "click-didyoumean" },
+ { name = "click-plugins" },
+ { name = "click-repl" },
+ { name = "kombu" },
+ { name = "python-dateutil" },
+ { name = "vine" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144, upload-time = "2025-06-01T11:08:12.563Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775, upload-time = "2025-06-01T11:08:09.94Z" },
+]
+
+[[package]]
+name = "click"
+version = "8.2.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
+]
+
+[[package]]
+name = "click-didyoumean"
+version = "0.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" },
+]
+
+[[package]]
+name = "click-plugins"
+version = "1.1.1.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" },
+]
+
+[[package]]
+name = "click-repl"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "prompt-toolkit" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
[[package]]
name = "django"
version = "5.1.7"
@@ -133,6 +231,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" },
]
+[[package]]
+name = "kombu"
+version = "5.5.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "amqp" },
+ { name = "packaging" },
+ { name = "tzdata" },
+ { name = "vine" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992, upload-time = "2025-06-01T10:19:22.281Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034, upload-time = "2025-06-01T10:19:20.436Z" },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+]
+
[[package]]
name = "pillow"
version = "11.2.1"
@@ -163,6 +285,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/67/32/32dc030cfa91ca0fc52baebbba2e009bb001122a1daa8b6a79ad830b38d3/pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681", size = 2417234, upload-time = "2025-04-12T17:49:08.399Z" },
]
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.51"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "wcwidth" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
+]
+
[[package]]
name = "psycopg2-binary"
version = "2.9.10"
@@ -182,6 +316,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" },
]
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+]
+
[[package]]
name = "python-dotenv"
version = "1.1.0"
@@ -196,28 +342,38 @@ name = "pythonbooks"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
+ { name = "billiard" },
+ { name = "celery" },
{ name = "django" },
{ name = "django-ckeditor-5" },
{ name = "django-environ" },
{ name = "django-extensions" },
{ name = "djangorestframework" },
{ name = "drf-spectacular" },
+ { name = "kombu" },
{ name = "psycopg2-binary" },
{ name = "python-dotenv" },
+ { name = "redis" },
{ name = "sorl-thumbnail" },
+ { name = "vine" },
]
[package.metadata]
requires-dist = [
+ { name = "billiard", specifier = "==4.2.1" },
+ { name = "celery", specifier = "==5.5.3" },
{ name = "django", specifier = "==5.1.7" },
{ name = "django-ckeditor-5", specifier = "==0.2.17" },
{ name = "django-environ", specifier = ">=0.12.0" },
{ name = "django-extensions", specifier = ">=4.1" },
{ name = "djangorestframework", specifier = ">=3.16.0" },
{ name = "drf-spectacular", specifier = ">=0.28.0" },
+ { name = "kombu", specifier = "==5.5.4" },
{ name = "psycopg2-binary", specifier = ">=2.9.10" },
{ name = "python-dotenv", specifier = ">=1.0.0" },
+ { name = "redis", specifier = ">=6.2.0" },
{ name = "sorl-thumbnail", specifier = "==12.11.0" },
+ { name = "vine", specifier = "==5.1.0" },
]
[[package]]
@@ -237,6 +393,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
]
+[[package]]
+name = "redis"
+version = "6.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ea/9a/0551e01ba52b944f97480721656578c8a7c46b51b99d66814f85fe3a4f3e/redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977", size = 4639129, upload-time = "2025-05-28T05:01:18.91Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/67/e60968d3b0e077495a8fee89cf3f2373db98e528288a48f1ee44967f6e8c/redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e", size = 278659, upload-time = "2025-05-28T05:01:16.955Z" },
+]
+
[[package]]
name = "referencing"
version = "0.36.2"
@@ -285,6 +450,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b6/97/5a4b59697111c89477d20ba8a44df9ca16b41e737fa569d5ae8bff99e650/rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763", size = 232218, upload-time = "2025-05-21T12:44:40.512Z" },
]
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
+]
+
[[package]]
name = "sorl-thumbnail"
version = "12.11.0"
@@ -320,3 +494,21 @@ sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/4742fdba39cd02a56
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/c0/7461b49cd25aeece13766f02ee576d1db528f1c37ce69aee300e075b485b/uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e", size = 10356, upload-time = "2021-10-13T11:15:12.316Z" },
]
+
+[[package]]
+name = "vine"
+version = "5.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" },
+]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
+]