diff --git a/.all-contributorsrc b/.all-contributorsrc
index 855bc358f6..ddbacfcd34 100644
--- a/.all-contributorsrc
+++ b/.all-contributorsrc
@@ -1669,6 +1669,25 @@
"code",
"test"
]
+ },
+ {
+ "login": "haryle",
+ "name": "harryle",
+ "avatar_url": "https://avatars.githubusercontent.com/u/64817481?v=4",
+ "profile": "https://github.com/haryle",
+ "contributions": [
+ "code",
+ "test"
+ ]
+ },
+ {
+ "login": "ubernostrum",
+ "name": "James Bennett",
+ "avatar_url": "https://avatars.githubusercontent.com/u/12384?v=4",
+ "profile": "http://www.b-list.org/",
+ "contributions": [
+ "bug"
+ ]
}
],
"contributorsPerLine": 7,
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 0fadc789d3..c96b5e05f5 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -6,6 +6,8 @@ on:
push:
branches:
- main
+ - develop
+ - v3.0
jobs:
docs:
@@ -22,7 +24,7 @@ jobs:
- uses: pdm-project/setup-pdm@v4
name: Set up PDM
with:
- python-version: "3.11"
+ python-version: "3.12"
allow-python-prereleases: false
cache: true
cache-dependency-path: |
@@ -38,9 +40,17 @@ jobs:
run: pdm run python tools/build_docs.py docs-build
if: github.event_name == 'release'
- - name: Build dev docs
- run: pdm run python tools/build_docs.py docs-build --version dev
- if: github.event_name == 'push'
+ - name: Build docs (main branch)
+ run: pdm run python tools/build_docs.py docs-build --version main
+ if: github.event_name == 'push' && github.ref == 'refs/heads/main'
+
+ - name: Build docs (develop branch)
+ run: pdm run python tools/build_docs.py docs-build --version develop
+ if: github.event_name == 'push' && github.ref == 'refs/heads/develop'
+
+ - name: Build docs (v3.0 branch)
+ run: pdm run python tools/build_docs.py docs-build --version 3-dev
+ if: github.event_name == 'push' && github.ref == 'refs/heads/v3.0'
- name: Deploy
uses: JamesIves/github-pages-deploy-action@v4
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2bfc75418a..abdb4aac6d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -23,7 +23,7 @@ repos:
- id: unasyncd
additional_dependencies: ["ruff"]
- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: "v0.3.2"
+ rev: "v0.3.4"
hooks:
- id: ruff
args: ["--fix"]
@@ -42,7 +42,7 @@ repos:
exclude: "test*|examples*|tools"
args: ["--use-tuple"]
- repo: https://github.com/ariebovenberg/slotscheck
- rev: v0.17.3
+ rev: v0.19.0
hooks:
- id: slotscheck
exclude: "test_*|docs|.github"
diff --git a/README.md b/README.md
index 0d24f547fe..fc277654d9 100644
--- a/README.md
+++ b/README.md
@@ -550,6 +550,10 @@ see [the contribution guide](CONTRIBUTING.rst).
Hugo van Kemenade 📖 |
Michael Gerbig 📖 |
CrisOG 🐛 💻 ⚠️ |
+ harryle 💻 ⚠️ |
+
+
+ James Bennett 🐛 |
diff --git a/docs/_static/versions.json b/docs/_static/versions.json
index 4eb21fef78..b98d2d9024 100644
--- a/docs/_static/versions.json
+++ b/docs/_static/versions.json
@@ -1 +1 @@
-{ "versions": ["1", "2", "dev"], "latest": "2" }
+{ "versions": ["1", "2", "main", "develop", "3-dev"], "latest": "2" }
diff --git a/docs/examples/testing/test_health_check_async.py b/docs/examples/testing/test_health_check_async.py
index 54356b0226..3806c22b61 100644
--- a/docs/examples/testing/test_health_check_async.py
+++ b/docs/examples/testing/test_health_check_async.py
@@ -1,3 +1,5 @@
+from typing import AsyncIterator
+
import pytest
from litestar import Litestar, MediaType, get
@@ -21,12 +23,12 @@ async def test_health_check() -> None:
@pytest.fixture(scope="function")
-def test_client() -> AsyncTestClient:
- return AsyncTestClient(app=app)
+async def test_client() -> AsyncIterator[AsyncTestClient[Litestar]]:
+ async with AsyncTestClient(app=app) as client:
+ yield client
-async def test_health_check_with_fixture(test_client: AsyncTestClient) -> None:
- async with test_client as client:
- response = await client.get("/health-check")
- assert response.status_code == HTTP_200_OK
- assert response.text == "healthy"
+async def test_health_check_with_fixture(test_client: AsyncTestClient[Litestar]) -> None:
+ response = await test_client.get("/health-check")
+ assert response.status_code == HTTP_200_OK
+ assert response.text == "healthy"
diff --git a/docs/examples/testing/test_health_check_sync.py b/docs/examples/testing/test_health_check_sync.py
index 87ea546180..e7886ff938 100644
--- a/docs/examples/testing/test_health_check_sync.py
+++ b/docs/examples/testing/test_health_check_sync.py
@@ -1,3 +1,5 @@
+from typing import Iterator
+
import pytest
from litestar import Litestar, MediaType, get
@@ -21,12 +23,12 @@ def test_health_check() -> None:
@pytest.fixture(scope="function")
-def test_client() -> TestClient:
- return TestClient(app=app)
+def test_client() -> Iterator[TestClient[Litestar]]:
+ with TestClient(app=app) as client:
+ yield client
-def test_health_check_with_fixture(test_client: TestClient) -> None:
- with test_client as client:
- response = client.get("/health-check")
- assert response.status_code == HTTP_200_OK
- assert response.text == "healthy"
+def test_health_check_with_fixture(test_client: TestClient[Litestar]) -> None:
+ response = test_client.get("/health-check")
+ assert response.status_code == HTTP_200_OK
+ assert response.text == "healthy"
diff --git a/docs/release-notes/changelog.rst b/docs/release-notes/changelog.rst
index 3dbbe2da95..75caa3a854 100644
--- a/docs/release-notes/changelog.rst
+++ b/docs/release-notes/changelog.rst
@@ -3,6 +3,121 @@
2.x Changelog
=============
+.. changelog:: 2.7.1
+ :date: 2024-03-22
+
+ .. change:: add default encoders for `Enums` and `EnumMeta`
+ :type: bugfix
+ :pr: 3193
+
+ This addresses an issue when serializing ``Enums`` that was reported in discord.
+
+ .. change:: replace TestClient.__enter__ return type with Self
+ :type: bugfix
+ :pr: 3194
+
+ ``TestClient.__enter__`` and ``AsyncTestClient.__enter__`` return ``Self``.
+ If you inherit ``TestClient``, its ``__enter__`` method should return derived class's instance
+ unless override the method. ``Self`` is a more flexible return type.
+
+ .. change:: use the full path for fetching openapi.json
+ :type: bugfix
+ :pr: 3196
+ :issue: 3047
+
+ This specifies the ``spec-url`` and ``apiDescriptionUrl`` of Rapidoc, and Stoplight Elements as absolute
+ paths relative to the root of the site.
+
+ This ensures that both of the send the request for the JSON of the OpenAPI schema to the right endpoint.
+
+ .. change:: JSON schema ``examples`` were OpenAPI formatted
+ :type: bugfix
+ :pr: 3224
+ :issue: 2849
+
+ The generated ``examples`` in *JSON schema* objects were formatted as:
+
+ .. code-block:: json
+
+ "examples": {
+ "some-id": {
+ "description": "Lorem ipsum",
+ "value": "the real beef"
+ }
+ }
+
+ However, above is OpenAPI example format, and must not be used in JSON schema
+ objects. Schema objects follow different formatting:
+
+ .. code-block:: json
+
+ "examples": [
+ "the real beef"
+ ]
+
+ * Explained in `APIs You Won't Hate blog post `_.
+ * `Schema objects spec `_
+ * `OpenAPI example format spec `_.
+
+ This is referenced at least from parameters, media types and components.
+
+ The technical change here is to define ``Schema.examples`` as ``list[Any]`` instead
+ of ``list[Example]``. Examples can and must still be defined as ``list[Example]``
+ for OpenAPI objects (e.g. ``Parameter``, ``Body``) but for JSON schema ``examples``
+ the code now internally generates/converts ``list[Any]`` format instead.
+
+ Extra confusion here comes from the OpenAPI 3.0 vs OpenAPI 3.1 difference.
+ OpenAPI 3.0 only allowed ``example`` (singular) field in schema objects.
+ OpenAPI 3.1 supports the full JSON schema 2020-12 spec and so ``examples`` array
+ in schema objects.
+
+ Both ``example`` and ``examples`` seem to be supported, though the former is marked
+ as deprecated in the latest specs.
+
+ This can be tested over at https://editor-next.swagger.io by loading up the
+ OpenAPI 3.1 Pet store example. Then add ``examples`` in ``components.schemas.Pet``
+ using the both ways and see the Swagger UI only render the example once it's
+ properly formatted (it ignores is otherwise).
+
+ .. change:: queue_listener handler for Python >= 3.12
+ :type: bugfix
+ :pr: 3185
+ :issue: 2954
+
+ - Fix the ``queue_listener`` handler for Python 3.12
+
+ Python 3.12 introduced a new way to configure ``QueueHandler`` and ``QueueListener`` via
+ ``logging.config.dictConfig()``. As described in the
+ `logging documentation `_.
+
+ The listener still needs to be started & stopped, as previously.
+ To do so, we've introduced ``LoggingQueueListener``.
+
+ And as stated in the doc:
+ * Any custom queue handler and listener classes will need to be defined with the same initialization signatures
+ as `QueueHandler `_ and
+ `QueueListener `_.
+
+ .. change:: extend openapi meta collected from domain models
+ :type: bugfix
+ :pr: 3237
+ :issue: 3232
+
+ :class:`~litestar.typing.FieldDefinition` s pack any OpenAPI metadata onto a ``KwargDefinition`` instance when
+ types are parsed from domain models.
+
+ When we produce a DTO type, we transfer this meta from the `KwargDefinition` to a `msgspec.Meta` instance,
+ however so far this has only included constraints, not attributes such as descriptions, examples and title.
+
+ This change ensures that we transfer the openapi meta for the complete intersection of fields that exist on b
+ oth `KwargDefinition` and `Meta`.
+
+ .. change:: kwarg ambiguity exc msg for path params
+ :type: bugfix
+ :pr: 3261
+
+ Fixes the way we construct the exception message when there is a kwarg ambiguity detected for path parameters.
+
.. changelog:: 2.7.0
:date: 2024-03-10
@@ -19,8 +134,10 @@
:pr: 3176
Fix an issue with SSE where JavaScript clients fail to receive an event without data.
- The `spec `_ is not clear in whether or not an event without data is ok.
- Considering the EventSource "client" is not ok with it, and that it's so easy DX-wise to make the mistake not explicitly sending it, this change fixes it by defaulting to the empty-string
+ The `spec `_ is
+ not clear in whether or not an event without data is ok.
+ Considering the EventSource "client" is not ok with it, and that it's so easy DX-wise to make the mistake not
+ explicitly sending it, this change fixes it by defaulting to the empty-string
.. change:: Support ``ResponseSpec(..., examples=[...])``
:type: feature
@@ -54,7 +171,7 @@
:pr: 3096
:issue: 3088
- Automatically encode responses with media type of the form "application/+json" as json.
+ Automatically encode responses with media type of the form ``application/+json`` as json.
.. change:: Allow reusable ``Router`` instances
:type: feature
@@ -64,7 +181,7 @@
It was not possible to re-attach a router instance once it was attached. This
makes that possible.
- The router instance now gets deecopied when it's registered to another router.
+ The router instance now gets deepcopied when it's registered to another router.
The application startup performance gets a hit here, but the same approach is
already used for controllers and handlers, so this only harmonizes the
diff --git a/docs/usage/applications.rst b/docs/usage/applications.rst
index 82de231bb4..69f5e92f0f 100644
--- a/docs/usage/applications.rst
+++ b/docs/usage/applications.rst
@@ -264,10 +264,13 @@ Parameters that support layering are:
* :ref:`include_in_schema `
* :doc:`middleware `
* :ref:`opt `
+* :ref:`request_class `
* :ref:`response_class `
* :ref:`response_cookies `
* :ref:`response_headers `
* :doc:`return_dto `
* ``security``
* ``tags``
+* ``type_decoders``
* ``type_encoders``
+* :ref:`websocket_class `
diff --git a/docs/usage/middleware/builtin-middleware.rst b/docs/usage/middleware/builtin-middleware.rst
index 537c31e1d9..e95102efcf 100644
--- a/docs/usage/middleware/builtin-middleware.rst
+++ b/docs/usage/middleware/builtin-middleware.rst
@@ -42,29 +42,79 @@ This middleware prevents CSRF attacks by doing the following:
1. On the first "safe" request (e.g GET) - set a cookie with a special token created by the server
2. On each subsequent "unsafe" request (e.g POST) - make sure the request contains either a
- form field or an additional header that has this token
-
+ form field or an additional header that has this token (more on this below)
To enable CSRF protection in a Litestar application simply pass an instance of
:class:`CSRFConfig <.config.csrf.CSRFConfig>` to the Litestar constructor:
.. code-block:: python
- from litestar import Litestar
+ from litestar import Litestar, get, post
from litestar.config.csrf import CSRFConfig
+
+ @get()
+ async def get_resource() -> str:
+ # GET is one of the safe methods
+ return "some_resource"
+
+ @post("{id:int}")
+ async def create_resource(id: int) -> bool:
+ # POST is one of the unsafe methods
+ return True
+
csrf_config = CSRFConfig(secret="my-secret")
- app = Litestar(route_handlers=[...], csrf_config=csrf_config)
+ app = Litestar([get_resource, create_resource], csrf_config=csrf_config)
-Routes can be marked as being exempt from the protection offered by this middleware via
-:ref:`handler opts `
+The following snippet demonstrates how to change the cookie name to "some-cookie-name" and header name to "some-header-name".
.. code-block:: python
- from litestar import post
+ csrf_config = CSRFConfig(secret="my-secret", cookie_name='some-cookie-name', header_name='some-header-name')
+
+
+A CSRF protected route can be accessed by any client that can make a request with either the header or form-data key.
+
+
+.. note::
+
+ The form-data key can not be currently configured. It should only be passed via the key "_csrf_token"
+
+In Python, any client such as `requests `_ or `httpx `_ can be used.
+The usage of clients or sessions is recommended due to the cookie persistence it offers across requests.
+The following is an example using ``httpx.Client``.
+
+.. code-block:: python
+ import httpx
+
+
+ with httpx.Client() as client:
+ get_response = client.get("http://localhost:8000/")
+
+ # "csrftoken" is the default cookie name
+ csrf = get_response.cookies["csrftoken"]
+
+ # "x-csrftoken" is the default header name
+ post_response_using_header = client.post("http://localhost:8000/", headers={"x-csrftoken": csrf})
+ assert post_response_using_header.status_code == 201
+
+ # "_csrf_token" is the default *non* configurable form-data key
+ post_response_using_form_data = client.post("http://localhost:8000/1", data={"_csrf_token": csrf})
+ assert post_response_using_form_data.status_code == 201
+
+ # despite the header being passed, this request will fail as it does not have a cookie in its session
+ # note the usage of ``httpx.post`` instead of ``client.post``
+ post_response_with_no_persisted_cookie = httpx.post("http://localhost:8000/1", headers={"x-csrftoken": csrf})
+ assert post_response_with_no_persisted_cookie.status_code == 403
+ assert "CSRF token verification failed" in post_response_with_no_persisted_cookie.text
+
+Routes can be marked as being exempt from the protection offered by this middleware via
+:ref:`handler opts `
+
+.. code-block:: python
@post("/post", exclude_from_csrf=True)
def handler() -> None: ...
@@ -74,6 +124,12 @@ If you need to exempt many routes at once you might want to consider using the
:attr:`exclude <.config.csrf.CSRFConfig.exclude>` kwarg which accepts list of path
patterns to skip in the middleware.
+.. seealso::
+
+ * `Safe and Unsafe (HTTP Methods) `_
+ * `HTTPX Clients `_
+ * `Requests Session `_
+
Allowed Hosts
-------------
diff --git a/docs/usage/security/guards.rst b/docs/usage/security/guards.rst
index 673906cac5..f32ab35de1 100644
--- a/docs/usage/security/guards.rst
+++ b/docs/usage/security/guards.rst
@@ -121,6 +121,12 @@ As you can see in the above examples - ``guards`` is a list. This means you can
Unlike ``dependencies`` , guards do not override each other but are rather *cumulative*. This means that you can define
guards on different levels of your app, and they will combine.
+.. caution::
+
+ If guards are placed at the controller or the app level, they **will** be executed on all ``OPTIONS`` requests as well.
+ For more details, including a workaround, refer https://github.com/litestar-org/litestar/issues/2314.
+
+
The route handler "opt" key
---------------------------
diff --git a/docs/usage/testing.rst b/docs/usage/testing.rst
index 33f7094675..90102f640d 100644
--- a/docs/usage/testing.rst
+++ b/docs/usage/testing.rst
@@ -79,16 +79,22 @@ Since we would probably need to use the client in multiple places, it's better t
.. code-block:: python
:caption: tests/conftest.py
+ from typing import TYPE_CHECKING, Iterator
+
import pytest
from litestar.testing import TestClient
from my_app.main import app
+ if TYPE_CHECKING:
+ from litestar import Litestar
+
@pytest.fixture(scope="function")
- def test_client() -> TestClient:
- return TestClient(app=app)
+ def test_client() -> Iterator[TestClient[Litestar]]:
+ with TestClient(app=app) as client:
+ yield client
.. tab-item:: Async
@@ -97,16 +103,22 @@ Since we would probably need to use the client in multiple places, it's better t
.. code-block:: python
:caption: tests/conftest.py
+ from typing import TYPE_CHECKING, AsyncIterator
+
import pytest
from litestar.testing import AsyncTestClient
from my_app.main import app
+ if TYPE_CHECKING:
+ from litestar import Litestar
+
@pytest.fixture(scope="function")
- async def test_client() -> AsyncTestClient:
- return AsyncTestClient(app=app)
+ async def test_client() -> AsyncIterator[AsyncTestClient[Litestar]]:
+ async with AsyncTestClient(app=app) as client:
+ yield client
We would then be able to rewrite our test like so:
diff --git a/litestar/_kwargs/kwargs_model.py b/litestar/_kwargs/kwargs_model.py
index e69563622e..01ed2e5aef 100644
--- a/litestar/_kwargs/kwargs_model.py
+++ b/litestar/_kwargs/kwargs_model.py
@@ -457,16 +457,16 @@ def _validate_raw_kwargs(
*list(layered_parameters.keys()),
}
- for intersection in (
+ intersection = (
path_parameters.intersection(dependency_keys)
or path_parameters.intersection(parameter_names)
or dependency_keys.intersection(parameter_names)
- ):
- if intersection:
- raise ImproperlyConfiguredException(
- f"Kwarg resolution ambiguity detected for the following keys: {', '.join(intersection)}. "
- f"Make sure to use distinct keys for your dependencies, path parameters and aliased parameters."
- )
+ )
+ if intersection:
+ raise ImproperlyConfiguredException(
+ f"Kwarg resolution ambiguity detected for the following keys: {', '.join(intersection)}. "
+ f"Make sure to use distinct keys for your dependencies, path parameters, and aliased parameters."
+ )
if used_reserved_kwargs := {
*parameter_names,
diff --git a/litestar/_openapi/responses.py b/litestar/_openapi/responses.py
index 7701d02659..6b0f312d3c 100644
--- a/litestar/_openapi/responses.py
+++ b/litestar/_openapi/responses.py
@@ -298,9 +298,7 @@ def create_error_responses(exceptions: list[type[HTTPException]]) -> Iterator[tu
),
},
description=pascal_case_to_text(get_name(exc)),
- examples={
- exc.__name__: Example(value={"status_code": status_code, "detail": example_detail, "extra": {}})
- },
+ examples=[{"status_code": status_code, "detail": example_detail, "extra": {}}],
)
)
if len(exceptions_schemas) > 1: # noqa: SIM108
diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py
index 3b39ed44cd..43e5d328b4 100644
--- a/litestar/_openapi/schema_generation/schema.py
+++ b/litestar/_openapi/schema_generation/schema.py
@@ -44,7 +44,7 @@
_should_create_enum_schema,
_should_create_literal_schema,
_type_or_first_not_none_inner_type,
- get_formatted_examples,
+ get_json_schema_formatted_examples,
)
from litestar.datastructures import UploadFile
from litestar.exceptions import ImproperlyConfiguredException
@@ -558,7 +558,7 @@ def process_schema_result(self, field: FieldDefinition, schema: Schema) -> Schem
not isinstance(value, Hashable) or not self.is_undefined(value)
):
if schema_key == "examples":
- value = get_formatted_examples(field, cast("list[Example]", value))
+ value = get_json_schema_formatted_examples(cast("list[Example]", value))
# we only want to transfer values from the `KwargDefinition` to `Schema` if the schema object
# doesn't already have a value for that property. For example, if a field is a constrained date,
@@ -580,7 +580,7 @@ def process_schema_result(self, field: FieldDefinition, schema: Schema) -> Schem
if not schema.examples and self.generate_examples:
from litestar._openapi.schema_generation.examples import create_examples_for_field
- schema.examples = get_formatted_examples(field, create_examples_for_field(field))
+ schema.examples = get_json_schema_formatted_examples(create_examples_for_field(field))
if schema.title and schema.type == OpenAPIType.OBJECT:
key = _get_normalized_schema_key(field.annotation)
@@ -595,7 +595,7 @@ def create_component_schema(
property_fields: Mapping[str, FieldDefinition],
openapi_type: OpenAPIType = OpenAPIType.OBJECT,
title: str | None = None,
- examples: Mapping[str, Example] | None = None,
+ examples: list[Any] | None = None,
) -> Schema:
"""Create a schema for the components/schemas section of the OpenAPI spec.
diff --git a/litestar/_openapi/schema_generation/utils.py b/litestar/_openapi/schema_generation/utils.py
index 37f7dc321c..7ce27ca945 100644
--- a/litestar/_openapi/schema_generation/utils.py
+++ b/litestar/_openapi/schema_generation/utils.py
@@ -107,3 +107,8 @@ def get_formatted_examples(field_definition: FieldDefinition, examples: Sequence
name = name.lower()
return {f"{name}-example-{i}": example for i, example in enumerate(examples, 1)}
+
+
+def get_json_schema_formatted_examples(examples: Sequence[Example]) -> list[Any]:
+ """Format the examples into the JSON schema format."""
+ return [example.value for example in examples]
diff --git a/litestar/contrib/pydantic/pydantic_schema_plugin.py b/litestar/contrib/pydantic/pydantic_schema_plugin.py
index ffc50f13e1..2c189e4416 100644
--- a/litestar/contrib/pydantic/pydantic_schema_plugin.py
+++ b/litestar/contrib/pydantic/pydantic_schema_plugin.py
@@ -4,7 +4,6 @@
from typing_extensions import Annotated
-from litestar._openapi.schema_generation.utils import get_formatted_examples
from litestar.contrib.pydantic.utils import (
create_field_definitions_for_computed_fields,
is_pydantic_2_model,
@@ -15,7 +14,7 @@
pydantic_unwrap_and_get_origin,
)
from litestar.exceptions import MissingDependencyException
-from litestar.openapi.spec import Example, OpenAPIFormat, OpenAPIType, Schema
+from litestar.openapi.spec import OpenAPIFormat, OpenAPIType, Schema
from litestar.plugins import OpenAPISchemaPlugin
from litestar.types import Empty
from litestar.typing import FieldDefinition
@@ -314,11 +313,5 @@ def for_pydantic_model(cls, field_definition: FieldDefinition, schema_creator: S
required=sorted(f.name for f in property_fields.values() if f.is_required),
property_fields=property_fields,
title=title,
- examples=(
- None
- if example is None
- else get_formatted_examples(
- field_definition, [Example(description=f"Example {field_definition.name} value", value=example)]
- )
- ),
+ examples=None if example is None else [example],
)
diff --git a/litestar/dto/_backend.py b/litestar/dto/_backend.py
index cf6cd0676c..1c48dc0555 100644
--- a/litestar/dto/_backend.py
+++ b/litestar/dto/_backend.py
@@ -750,14 +750,17 @@ def _create_struct_field_meta_for_field_definition(field_definition: TransferDTO
return None
return msgspec.Meta(
- gt=kwarg_definition.gt,
+ description=kwarg_definition.description,
+ examples=[e.value for e in kwarg_definition.examples or []],
ge=kwarg_definition.ge,
- lt=kwarg_definition.lt,
+ gt=kwarg_definition.gt,
le=kwarg_definition.le,
- multiple_of=kwarg_definition.multiple_of,
- min_length=kwarg_definition.min_length if not field_definition.is_partial else None,
+ lt=kwarg_definition.lt,
max_length=kwarg_definition.max_length if not field_definition.is_partial else None,
+ min_length=kwarg_definition.min_length if not field_definition.is_partial else None,
+ multiple_of=kwarg_definition.multiple_of,
pattern=kwarg_definition.pattern,
+ title=kwarg_definition.title,
)
diff --git a/litestar/logging/config.py b/litestar/logging/config.py
index d9d376e3f2..a4a3713939 100644
--- a/litestar/logging/config.py
+++ b/litestar/logging/config.py
@@ -2,13 +2,12 @@
import sys
from abc import ABC, abstractmethod
-from dataclasses import asdict, dataclass, field
+from dataclasses import asdict, dataclass, field, fields
from importlib.util import find_spec
from logging import INFO
from typing import TYPE_CHECKING, Any, Callable, Literal, cast
from litestar.exceptions import ImproperlyConfiguredException, MissingDependencyException
-from litestar.serialization import encode_json
from litestar.serialization.msgspec_hooks import _msgspec_json_encoder
from litestar.utils.deprecation import deprecated
@@ -49,7 +48,20 @@
}
if sys.version_info >= (3, 12, 0):
- default_handlers["queue_listener"]["handlers"] = ["console"]
+ default_handlers["queue_listener"].update(
+ {
+ "class": "logging.handlers.QueueHandler",
+ "queue": {
+ "()": "queue.Queue",
+ "maxsize": -1,
+ },
+ "listener": "litestar.logging.standard.LoggingQueueListener",
+ "handlers": ["console"],
+ }
+ )
+
+ # do not format twice, the console handler will do the job
+ del default_handlers["queue_listener"]["formatter"]
default_picologging_handlers: dict[str, dict[str, Any]] = {
@@ -228,21 +240,25 @@ def configure(self) -> GetLogger:
A 'logging.getLogger' like function.
"""
- if "picologging" in str(encode_json(self.handlers)):
+ excluded_fields: tuple[str, ...]
+ if "picologging" in " ".join([handler["class"] for handler in self.handlers.values()]):
try:
from picologging import config, getLogger
except ImportError as e:
raise MissingDependencyException("picologging") from e
- values = {
- k: v
- for k, v in asdict(self).items()
- if v is not None and k not in ("incremental", "configure_root_logger")
- }
+ excluded_fields = ("incremental", "configure_root_logger")
else:
from logging import config, getLogger # type: ignore[no-redef, assignment]
- values = {k: v for k, v in asdict(self).items() if v is not None and k not in ("configure_root_logger",)}
+ excluded_fields = ("configure_root_logger",)
+
+ values = {
+ _field.name: getattr(self, _field.name)
+ for _field in fields(self)
+ if getattr(self, _field.name) is not None and _field.name not in excluded_fields
+ }
+
if not self.configure_root_logger:
values.pop("root")
config.dictConfig(values)
diff --git a/litestar/logging/standard.py b/litestar/logging/standard.py
index 467b2f1215..131c0edf62 100644
--- a/litestar/logging/standard.py
+++ b/litestar/logging/standard.py
@@ -1,34 +1,47 @@
from __future__ import annotations
import atexit
-import sys
-from logging import StreamHandler
+from logging import Handler, LogRecord, StreamHandler
from logging.handlers import QueueHandler, QueueListener
from queue import Queue
from typing import Any
from litestar.logging._utils import resolve_handlers
-__all__ = ("QueueListenerHandler",)
+__all__ = ("LoggingQueueListener", "QueueListenerHandler")
-if sys.version_info < (3, 12):
+class LoggingQueueListener(QueueListener):
+ """Custom ``QueueListener`` which starts and stops the listening process."""
- class QueueListenerHandler(QueueHandler):
- """Configure queue listener and handler to support non-blocking logging configuration."""
+ def __init__(self, queue: Queue[LogRecord], *handlers: Handler, respect_handler_level: bool = False) -> None:
+ """Initialize ``LoggingQueueListener``.
- def __init__(self, handlers: list[Any] | None = None) -> None:
- """Initialize `?QueueListenerHandler`.
+ Args:
+ queue: The queue to send messages to
+ *handlers: A list of handlers which will handle entries placed on the queue
+ respect_handler_level: If ``respect_handler_level`` is ``True``, a handler's level is respected (compared with the level for the message) when deciding whether to pass messages to that handler
+ """
+ super().__init__(queue, *handlers, respect_handler_level=respect_handler_level)
+ self.start()
+ atexit.register(self.stop)
- Args:
- handlers: Optional 'ConvertingList'
- """
- super().__init__(Queue(-1))
- handlers = resolve_handlers(handlers) if handlers else [StreamHandler()]
- self.listener = QueueListener(self.queue, *handlers)
- self.listener.start()
- atexit.register(self.listener.stop)
+class QueueListenerHandler(QueueHandler):
+ """Configure queue listener and handler to support non-blocking logging configuration.
-else:
- QueueListenerHandler = QueueHandler
+ .. caution::
+
+ This handler doesn't work with Python >= 3.12 and ``logging.config.dictConfig``. It might
+ be deprecated in the future. Please use ``logging.QueueHandler`` instead.
+ """
+
+ def __init__(self, handlers: list[Any] | None = None) -> None:
+ """Initialize ``QueueListenerHandler``.
+
+ Args:
+ handlers: Optional 'ConvertingList'
+ """
+ super().__init__(Queue(-1))
+ handlers = resolve_handlers(handlers) if handlers else [StreamHandler()]
+ self.listener = LoggingQueueListener(self.queue, *handlers) # type: ignore[arg-type]
diff --git a/litestar/openapi/spec/schema.py b/litestar/openapi/spec/schema.py
index 41c122aa1e..4be2b7cfa0 100644
--- a/litestar/openapi/spec/schema.py
+++ b/litestar/openapi/spec/schema.py
@@ -9,7 +9,6 @@
if TYPE_CHECKING:
from litestar.openapi.spec.discriminator import Discriminator
from litestar.openapi.spec.enums import OpenAPIFormat, OpenAPIType
- from litestar.openapi.spec.example import Example
from litestar.openapi.spec.external_documentation import ExternalDocumentation
from litestar.openapi.spec.reference import Reference
from litestar.openapi.spec.xml import XML
@@ -610,12 +609,8 @@ class Schema(BaseSchemaObject):
Omitting these keywords has the same behavior as values of false.
"""
- examples: Mapping[str, Example] | None = None
- """The value of this must be an array containing the example values directly or a mapping of string
- to an ``Example`` instance.
-
- This is based on the ``examples`` keyword of JSON Schema.
- """
+ examples: list[Any] | None = None
+ """The value of this must be an array containing the example values."""
discriminator: Discriminator | None = None
"""Adds support for polymorphism.
diff --git a/litestar/typing.py b/litestar/typing.py
index ec92689f5a..3a275573f3 100644
--- a/litestar/typing.py
+++ b/litestar/typing.py
@@ -4,7 +4,19 @@
from copy import deepcopy
from dataclasses import dataclass, is_dataclass, replace
from inspect import Parameter, Signature
-from typing import Any, AnyStr, Callable, Collection, ForwardRef, Literal, Mapping, Protocol, Sequence, TypeVar, cast
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Collection,
+ ForwardRef,
+ Literal,
+ Mapping,
+ Protocol,
+ Sequence,
+ TypeVar,
+ cast,
+)
from msgspec import UnsetType
from typing_extensions import NotRequired, Required, Self, get_args, get_origin, get_type_hints, is_typeddict
@@ -81,6 +93,7 @@ def _parse_metadata(value: Any, is_sequence_container: bool, extra: dict[str, An
**cast("dict[str, Any]", extra or getattr(value, "extra", None) or {}),
**(getattr(value, "json_schema_extra", None) or {}),
}
+ example_list: list[Any] | None
if example := extra.pop("example", None):
example_list = [Example(value=example)]
elif examples := getattr(value, "examples", None):
diff --git a/pdm.lock b/pdm.lock
index c75f377f35..dc29baa979 100644
--- a/pdm.lock
+++ b/pdm.lock
@@ -21,7 +21,7 @@ files = [
[[package]]
name = "advanced-alchemy"
-version = "0.7.4"
+version = "0.8.1"
requires_python = ">=3.8"
summary = "Ready-to-go SQLAlchemy concoctions."
dependencies = [
@@ -31,8 +31,8 @@ dependencies = [
"typing-extensions>=4.0.0",
]
files = [
- {file = "advanced_alchemy-0.7.4-py3-none-any.whl", hash = "sha256:52502d1131963dfcdca52c0edd6935e1c21ca3b5612677769ccf48f214383053"},
- {file = "advanced_alchemy-0.7.4.tar.gz", hash = "sha256:d4097fc337d6c971fc673695fdc3ff5f0d69282c7fea3f4754ef7c345051c4c3"},
+ {file = "advanced_alchemy-0.8.1-py3-none-any.whl", hash = "sha256:85078584914c7e7562fb7f0c750e4c8be9a14f43d52ab422b3a621170f04635f"},
+ {file = "advanced_alchemy-0.8.1.tar.gz", hash = "sha256:13921d8c47d608f63ad7f16347bd49b9c23e444dd8da62b8841e08b2367d4227"},
]
[[package]]
@@ -1110,12 +1110,12 @@ files = [
[[package]]
name = "fsspec"
-version = "2024.2.0"
+version = "2024.3.1"
requires_python = ">=3.8"
summary = "File-system specification"
files = [
- {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"},
- {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"},
+ {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"},
+ {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"},
]
[[package]]
@@ -1455,7 +1455,7 @@ files = [
[[package]]
name = "hypothesis"
-version = "6.99.5"
+version = "6.99.13"
requires_python = ">=3.8"
summary = "A library for property-based testing"
dependencies = [
@@ -1464,8 +1464,8 @@ dependencies = [
"sortedcontainers<3.0.0,>=2.1.0",
]
files = [
- {file = "hypothesis-6.99.5-py3-none-any.whl", hash = "sha256:0ab4968fa4c38ba6d3cd9f54f3d637e3c72fe136bff11373355f2e06416c6a7d"},
- {file = "hypothesis-6.99.5.tar.gz", hash = "sha256:1f795b71abe46f3919591acf7fc05cbcd9b601b97806d97433e0eb9bdb200861"},
+ {file = "hypothesis-6.99.13-py3-none-any.whl", hash = "sha256:b538df1d22365df84f94c38fb2d9c41a222373594c2a910cc8f4ddc68240a62f"},
+ {file = "hypothesis-6.99.13.tar.gz", hash = "sha256:e425e8a3f1912e44f62ff3e2768dca19c79f46d43ec70fa56e96e2d7194ccd2d"},
]
[[package]]
@@ -1591,15 +1591,15 @@ files = [
[[package]]
name = "importlib-resources"
-version = "6.3.0"
+version = "6.4.0"
requires_python = ">=3.8"
summary = "Read resources from Python packages"
dependencies = [
"zipp>=3.1.0; python_version < \"3.10\"",
]
files = [
- {file = "importlib_resources-6.3.0-py3-none-any.whl", hash = "sha256:783407aa1cd05550e3aa123e8f7cfaebee35ffa9cb0242919e2d1e4172222705"},
- {file = "importlib_resources-6.3.0.tar.gz", hash = "sha256:166072a97e86917a9025876f34286f549b9caf1d10b35a1b372bffa1600c6569"},
+ {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"},
+ {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"},
]
[[package]]
@@ -1672,7 +1672,7 @@ files = [
[[package]]
name = "litestar-sphinx-theme"
version = "0.2.0"
-requires_python = "<4.0,>=3.8"
+requires_python = ">=3.8,<4.0"
git = "https://github.com/litestar-org/litestar-sphinx-theme.git"
revision = "c5ce66aadc8f910c24f54bf0d172798c237a67eb"
summary = "A Sphinx theme for the Litestar organization"
@@ -1791,18 +1791,18 @@ files = [
[[package]]
name = "minijinja"
-version = "1.0.8"
+version = "1.0.14"
requires_python = ">=3.8"
summary = "An experimental Python binding of the Rust MiniJinja template engine."
files = [
- {file = "minijinja-1.0.8-cp38-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e24ef6508af8b6e4efaf63ddfa66ef0502cbf201a1d30c4d7d520e4c22eaf8cc"},
- {file = "minijinja-1.0.8-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27184753e54f967fcc0e62876badcf8d09d12e5df3f688a7ab641b7bbe05444f"},
- {file = "minijinja-1.0.8-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9b1d6ff5d73881c7283065560604333673f3c6956afe723f36dd48421eddec81"},
- {file = "minijinja-1.0.8-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808f72613fd057498200f947941c6b17c5e6b3e61bbd5fb10f1422270d27b126"},
- {file = "minijinja-1.0.8-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2d9e0d7a7fe78523d72cd7d362e377eaadaf767fee26a75577205991daea2d1"},
- {file = "minijinja-1.0.8-cp38-abi3-win32.whl", hash = "sha256:00c1fb73f5300a0736677fb21bfee6890cd70c5599fa28f31dd790988e610cc5"},
- {file = "minijinja-1.0.8-cp38-abi3-win_amd64.whl", hash = "sha256:97a3204ca03afe57f88bb3e71e32c71658236fd52e471768fc8a120847045d95"},
- {file = "minijinja-1.0.8.tar.gz", hash = "sha256:c204b15dc663326214a5e05670c30c66afead2420add1c6246b30eaff1aeb225"},
+ {file = "minijinja-1.0.14-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:90a18cf6d0c046c33e5c331c2de36386a2b41f2a128cbc9c0a38924acf42edda"},
+ {file = "minijinja-1.0.14-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1928e2c3716b6a5f3a93d5213a7454a356f11c84d71b8a7f082acaace366168a"},
+ {file = "minijinja-1.0.14-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3483deab95c5a1b5a46b2244bef06a97a5293608885f7143631c31e7cb39d84b"},
+ {file = "minijinja-1.0.14-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933e8ac097dc384d1bca60feff5e71a0c25744103f4a73e396f7b718a144dca8"},
+ {file = "minijinja-1.0.14-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99a451ccde5ac3c1a829d150e2a864dbfe40e0f446a9db17fbfefa7fa79e7664"},
+ {file = "minijinja-1.0.14-cp38-abi3-win32.whl", hash = "sha256:429e4c4ef03b8ae7b46a61c1fdd446375050bffcac8ec198787a07d828d1ba1a"},
+ {file = "minijinja-1.0.14-cp38-abi3-win_amd64.whl", hash = "sha256:e2f2fa3311f0c3d1f0ce78d73998423e5723a309122ca149cc9f634e0c407728"},
+ {file = "minijinja-1.0.14.tar.gz", hash = "sha256:a7b8ba9bb3ce32b6d83d50730882c14ddf8643b245378367dac41d0a24104706"},
]
[[package]]
@@ -2200,7 +2200,7 @@ files = [
[[package]]
name = "piccolo"
-version = "1.4.2"
+version = "1.5.0"
requires_python = ">=3.8.0"
summary = "A fast, user friendly ORM and query builder which supports asyncio."
dependencies = [
@@ -2213,8 +2213,8 @@ dependencies = [
"typing-extensions>=4.3.0",
]
files = [
- {file = "piccolo-1.4.2-py3-none-any.whl", hash = "sha256:3c64f7116bea6c3fd72ba2eac3f90c3c2f394341dbe57676139f983815a06882"},
- {file = "piccolo-1.4.2.tar.gz", hash = "sha256:a9219e4f0f1f1f19f571e3a4d392403f80ee185557e37cb01ae91fe64716b37e"},
+ {file = "piccolo-1.5.0-py3-none-any.whl", hash = "sha256:2cd5651e16cadaa8e599a5dc0b07426e6700fd44ce8fdd4f0afa78defde6a48b"},
+ {file = "piccolo-1.5.0.tar.gz", hash = "sha256:dde6b96a2bb3fd90a416a0964ec940f22d0d08842ebfefb48585fca14093ede3"},
]
[[package]]
@@ -2841,29 +2841,29 @@ files = [
[[package]]
name = "pytest-asyncio"
-version = "0.23.5.post1"
+version = "0.23.6"
requires_python = ">=3.8"
summary = "Pytest support for asyncio"
dependencies = [
"pytest<9,>=7.0.0",
]
files = [
- {file = "pytest-asyncio-0.23.5.post1.tar.gz", hash = "sha256:b9a8806bea78c21276bc34321bbf234ba1b2ea5b30d9f0ce0f2dea45e4685813"},
- {file = "pytest_asyncio-0.23.5.post1-py3-none-any.whl", hash = "sha256:30f54d27774e79ac409778889880242b0403d09cabd65b727ce90fe92dd5d80e"},
+ {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"},
+ {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"},
]
[[package]]
name = "pytest-cov"
-version = "4.1.0"
-requires_python = ">=3.7"
+version = "5.0.0"
+requires_python = ">=3.8"
summary = "Pytest plugin for measuring coverage."
dependencies = [
"coverage[toml]>=5.2.1",
"pytest>=4.6",
]
files = [
- {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
- {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+ {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
]
[[package]]
@@ -2880,15 +2880,15 @@ files = [
[[package]]
name = "pytest-mock"
-version = "3.12.0"
+version = "3.14.0"
requires_python = ">=3.8"
summary = "Thin-wrapper around the mock package for easier use with pytest"
dependencies = [
- "pytest>=5.0",
+ "pytest>=6.2.5",
]
files = [
- {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
- {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
+ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+ {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
]
[[package]]
@@ -3213,27 +3213,27 @@ files = [
[[package]]
name = "ruff"
-version = "0.3.2"
+version = "0.3.4"
requires_python = ">=3.7"
summary = "An extremely fast Python linter and code formatter, written in Rust."
files = [
- {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"},
- {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"},
- {file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"},
- {file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"},
- {file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"},
- {file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"},
- {file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"},
- {file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"},
- {file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"},
- {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"},
- {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"},
+ {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"},
+ {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"},
+ {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"},
+ {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"},
+ {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"},
+ {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"},
]
[[package]]
@@ -3679,69 +3679,69 @@ files = [
[[package]]
name = "time-machine"
-version = "2.14.0"
+version = "2.14.1"
requires_python = ">=3.8"
summary = "Travel through time in your tests."
dependencies = [
"python-dateutil",
]
files = [
- {file = "time-machine-2.14.0.tar.gz", hash = "sha256:b1076afb7825122a89a7be157d3a02f69f07d6fa0bacfaec463c71ac0488bd58"},
- {file = "time_machine-2.14.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7aab93218e9ad394164d69de164a81a4dce5a8b4528a07b77de806e422032fe2"},
- {file = "time_machine-2.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0124430457b4a5d4c33f739ea858bfbcdacba7cd0c72cc6c607d016a0bcac13"},
- {file = "time_machine-2.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a04eee7c5832efc57203bbd0d1d7b11ce52dbd35ae592edfdd4c25808471d06"},
- {file = "time_machine-2.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f96ed5e7fe3cae13b23ff1c4e93c7f90165289b477b34f1da3fa1277bb0f5a6"},
- {file = "time_machine-2.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5666edb45201679786611b2f016ad5d655acc675e6f62f6d4e62891dbcdfe4"},
- {file = "time_machine-2.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3c784c6bcc82856ca69f8cf26ce56f2cf06a113d340d929c41921d03f6b17b38"},
- {file = "time_machine-2.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:380d0a0ebda70637629ec18e1ca0ee098c04268a71d18852a3c4317fca7d7393"},
- {file = "time_machine-2.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7552d38b1f985feaa3eb3142873881e96ca07be02137b60414daf709bab36a2c"},
- {file = "time_machine-2.14.0-cp310-cp310-win32.whl", hash = "sha256:6c02dac22ed1669045bd39d214a5c52e097fee82fdb8d665700ff9f6cb499cfe"},
- {file = "time_machine-2.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:451583aecfc6b41805a6685b72cefd65c068313bcb39a1a6e246cbcccfda71d2"},
- {file = "time_machine-2.14.0-cp310-cp310-win_arm64.whl", hash = "sha256:029cd697f9cd13b4701e256eb79d995f6728e80da0c825028c22035a2c222720"},
- {file = "time_machine-2.14.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:412ace2c9053a7f4c513d8723f78bec3a5c2b4721e6bbf60f33de94abc88503a"},
- {file = "time_machine-2.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ed812603f0233770faba6f7e60f5ed04bae1a5290c8159f19cb8c6888f99fc1"},
- {file = "time_machine-2.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e00a9cff6df58cfe584ab55cbb21acdaa3ecc6d75414d59cf65726b2e3d90a6c"},
- {file = "time_machine-2.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10e30c8e9b5ef1e4b10e588d3e789888ff2a94bcc9120d300954116a5d83556b"},
- {file = "time_machine-2.14.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1b006d483d11f0dfe64b2a7f17d5fa16c3fd2940042731f5b3bd1533c7d827"},
- {file = "time_machine-2.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb9f6c62a205f12f6f054a027df221927f8066b2bca2b82477793291460410fa"},
- {file = "time_machine-2.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f01da787c2ac4c05e3722e94bf70da9698548c13ccfe6ca44ca2633c4b1cc24d"},
- {file = "time_machine-2.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d56a67b4656ae527b8152dd682642e31735559de653619116e92ab345b86a"},
- {file = "time_machine-2.14.0-cp311-cp311-win32.whl", hash = "sha256:14a82de9b00ed8427e4b9136a6d8e10a8c330b5cea62b5813fbedde978701c4a"},
- {file = "time_machine-2.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:26bf274f6b591ddc0f41e54b4b3a74d83748177dd96c5cfb8496adae1ada00ab"},
- {file = "time_machine-2.14.0-cp311-cp311-win_arm64.whl", hash = "sha256:9d2fac0e454c3aa63c10b331f5349fa2c961d58c4d430113f14698aac9565b3c"},
- {file = "time_machine-2.14.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a55717b8e3a153e4b7a9b6f551cd89e9d037db7e7732fc909c436d94e79628"},
- {file = "time_machine-2.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f92693a7ceedde14d507e906a26600ef11b80ca17cccfa91906266510f07b024"},
- {file = "time_machine-2.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:4a2670120780ad67c327f065eed03be917209cecd6fb0e9ada29720dbc1411e9"},
- {file = "time_machine-2.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e5a9ff08c585b8aac5d3db80a828dc549f5962c07297e1441e04cb0825464ac"},
- {file = "time_machine-2.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4e1a3c8bca77201dc6684d3c1d65d3ca4249872beb7ee9283c0b6e2df5cb677"},
- {file = "time_machine-2.14.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb603f46281c2d7f5c9607dd195107c9642af9bb36806386f66087b2741d0327"},
- {file = "time_machine-2.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9227c26a8d9e0cb0727917aa6470855320bde85f65deba58b988a8c0cc04bf9a"},
- {file = "time_machine-2.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4df6ee8f1ed9d9ca4aa7750e5cfc0d8bc0143c2cac068258af5bad5f50e3b3e8"},
- {file = "time_machine-2.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a59bee89bf015f3ff1db012436bc7905fd99a4aa827d2feb73f1301afb0cb5c6"},
- {file = "time_machine-2.14.0-cp312-cp312-win32.whl", hash = "sha256:892ee00cc176c9da6b465cf9d44da408fa3297d72fcb45aec1aac09d8e381f22"},
- {file = "time_machine-2.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:9ca7c08ded824e6ae138280524d9ebcceaf50623e5263f24e38a28259215fb37"},
- {file = "time_machine-2.14.0-cp312-cp312-win_arm64.whl", hash = "sha256:b604d904dbe5aa36be37df61b47c15d87c359764dadb70f3a8eae7191e382bd4"},
- {file = "time_machine-2.14.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:094e4149091f8f12691f71ecae8c8830e1cd23e5e22448a74c4e5a05310fd1cd"},
- {file = "time_machine-2.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74be790ced84b063d4c63ec7618d9b2404f3e79c1397750197a046b303829eef"},
- {file = "time_machine-2.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17f0c84329af5eb24544ac9f7097c20df3777cfce2cce8c1c4595055bef78102"},
- {file = "time_machine-2.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3e83e6976a3e0521fce8fd4a6d38d9385ea129cc433fb7a66c0918a499b18c"},
- {file = "time_machine-2.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:932cfde6024f9cd8874a0d3b4651db49fe72cbd144edc7b00153d5729ba75379"},
- {file = "time_machine-2.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b6daf1ff062855ae4723fdb0e7d7f47bcd0b3d9b17496d63fbb1ef66907486e2"},
- {file = "time_machine-2.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6b6559d8fac58d99a90c518f0a559de62b6ceff2fe9c3410eb78acdc3e16cfe4"},
- {file = "time_machine-2.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6aadb4bd780c5f89e55ac27d92192daff9cf7f307686798755f660a1f4ed3665"},
- {file = "time_machine-2.14.0-cp38-cp38-win32.whl", hash = "sha256:b99c8da2623dcb6c5cc05bd07138886d21fdab9081295f5783dfd799f9b91065"},
- {file = "time_machine-2.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:dd12a0be7f8cf5ea5617e7a6fed3800c1cf26976e5932058bcab1ce962e9bb0d"},
- {file = "time_machine-2.14.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d3e37eb8243415a8b6429099f191a8a83483e64aba9e04b21184ce9a1b6b1e6"},
- {file = "time_machine-2.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ba9c2da2cef0b0350beaaa7031acba5296cdc2146e59083f9b1ecd9036ff1cb9"},
- {file = "time_machine-2.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c36e9ecdf9afc729ba5c137f906a13bf24d16255871f3bb623b9d129859f3fa"},
- {file = "time_machine-2.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:669ae68799cbce72b09fb896a4a2c4314255f64dd5d68845b0aea71f32c082f5"},
- {file = "time_machine-2.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f5a5b20bde09ec4ab3143c94848b8323190c4aefab129f92da9e50b4f55d173"},
- {file = "time_machine-2.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fb90ffdbc67fa5a35948f10c1b3e6658e8db474468f6a64f8e8a2ab611eea047"},
- {file = "time_machine-2.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f163cbc66bcc76adcfdc8b649d3de51c3281b2193c4e753786d1af81582660fb"},
- {file = "time_machine-2.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dace63a21873a11ee2800cd765d35e295b78645477fe824283172e0f5ed87e93"},
- {file = "time_machine-2.14.0-cp39-cp39-win32.whl", hash = "sha256:6ce0f17783620fab245a7695e854cd7ecfb3c2cc6ccd5542d43ac3ecdb0100a3"},
- {file = "time_machine-2.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:ae871acd4121c510e6822a649e0c511ad4301d7cb92431ffc99e662c64f9ba9d"},
- {file = "time_machine-2.14.0-cp39-cp39-win_arm64.whl", hash = "sha256:e66796ba8d7adfe23deb03560eeaeb4ca7c11af43ad6cadadc7d3211ee6b696f"},
+ {file = "time-machine-2.14.1.tar.gz", hash = "sha256:57dc7efc1dde4331902d1bdefd34e8ee890a5c28533157e3b14a429c86b39533"},
+ {file = "time_machine-2.14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:528d588d1e8ba83e45319a74acab4be0569eb141113fdf50368045d0a7d79cee"},
+ {file = "time_machine-2.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06e913d570d7ee3e199e3316f10f10c8046287049141b0a101197712b4eac106"},
+ {file = "time_machine-2.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbbba954e9a409e7d66d60df2b6b8daeb897f8338f909a92d9d20e431ec70d1"},
+ {file = "time_machine-2.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a153b085b4aee652d6b3bf9019ca897f1597ba9869b640b06f28736b267182"},
+ {file = "time_machine-2.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b94274abe24b6a90d8a5c042167a9a7af2d3438b42ac8eb5ede50fbc73c08db"},
+ {file = "time_machine-2.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:364353858708628655bf9fa4c2825febd679c729d9e1dd424ff86845828bac05"},
+ {file = "time_machine-2.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b951b6f4b8a752ab8c441df422e21954a721a0a5276aa3814ce8cf7205aeb6da"},
+ {file = "time_machine-2.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:be215eb63d74a3d580f7924bb4209c783fabcfb3253073f4dcb3424d57d0f518"},
+ {file = "time_machine-2.14.1-cp310-cp310-win32.whl", hash = "sha256:0e120f95c17bf8e0c097fd8863a8eb24054f9b17d9b17c465694be50f8348a3a"},
+ {file = "time_machine-2.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:fb467d6c9e9ab615c8cf22d751d34296dacf801be323a57adeb4ff345cf72473"},
+ {file = "time_machine-2.14.1-cp310-cp310-win_arm64.whl", hash = "sha256:19db257117739b2dda1d57e149bb715a593313899b3902a7e6d752c5f1d22542"},
+ {file = "time_machine-2.14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:442d42f1b0ef006f03a5a34905829a1d3ac569a5bcda64d29706e6dc60832f94"},
+ {file = "time_machine-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0312b47f220e46f1bbfaded7fc1469882d9c2a27c6daf44e119aea7006b595cc"},
+ {file = "time_machine-2.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a39dba3033d9c28347d2db16bcb16041bbf4e9032e2b70023686b6f95deac9d"},
+ {file = "time_machine-2.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e030d2051bb515251d7f6edd9bbcf79b2b47811e2c402aba9c126af713843d26"},
+ {file = "time_machine-2.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:993ab140eb5678d1ee7f1197f08e4499dc8ea883ad6b8858737de70d509ec5b5"},
+ {file = "time_machine-2.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:90725f936ad8b123149bc82a46394dd7057e63157ee11ba878164053fa5bd8ad"},
+ {file = "time_machine-2.14.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:59a02c3d3b3b29e2dc3a708e775c5d6b951b0024c4013fed883f0d2205305c9e"},
+ {file = "time_machine-2.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f00f67d532da82538c4dfbbddc587e70c82664f168c11e1c2915d0c85ec2fc8"},
+ {file = "time_machine-2.14.1-cp311-cp311-win32.whl", hash = "sha256:27f735cba4c6352ad7bc53ce2d86b715379261a634e690b79fac329081e26fb6"},
+ {file = "time_machine-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee68597bd3fa5ab94633c8a9d3ebd4032091559610e078381818a732910002bc"},
+ {file = "time_machine-2.14.1-cp311-cp311-win_arm64.whl", hash = "sha256:6ced9de5eff1fb37efb12984ab7b63f31f0aeadeedec4be6d0404ec4fa91f2e7"},
+ {file = "time_machine-2.14.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:30a4a18357fa6cf089eeefcb37e9549b42523aebb5933894770a8919e6c398e1"},
+ {file = "time_machine-2.14.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d45bd60bea85869615b117667f10a821e3b0d3603c47bfd105b45d1f67156fc8"},
+ {file = "time_machine-2.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:39de6d37a14ff8882d4f1cbd50c53268b54e1cf4ef9be2bfe590d10a51ccd314"},
+ {file = "time_machine-2.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fd7d188b4f9d358c6bd477daf93b460d9b244a4c296ddd065945f2b6193c2bd"},
+ {file = "time_machine-2.14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99e6f013e67c4f74a9d8f57e34173b2047f2ad48f764e44c38f3ee5344a38c01"},
+ {file = "time_machine-2.14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a927d87501da8b053a27e80f5d0e1e58fbde4b50d70df2d3853ed67e89a731cf"},
+ {file = "time_machine-2.14.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77a616561dd4c7c442e9eee8cbb915750496e9a5a7fca6bcb11a9860226d2d0"},
+ {file = "time_machine-2.14.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e7fa70a6bdca40cc4a8386fd85bc1bae0a23ab11e49604ef853ab3ce92be127f"},
+ {file = "time_machine-2.14.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d63ef00d389fa6d2c76c863af580b3e4a8f0ccc6a9aea8e64590588e37f13c00"},
+ {file = "time_machine-2.14.1-cp312-cp312-win32.whl", hash = "sha256:6706eb06487354a5e219cacea709fb3ec44dec3842c6218237d5069fa5f1ad64"},
+ {file = "time_machine-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:36aa4f17adcd73a6064bf4991a29126cac93521f0690805edb91db837c4e1453"},
+ {file = "time_machine-2.14.1-cp312-cp312-win_arm64.whl", hash = "sha256:edea570f3835a036e8860bb8d6eb8d08473c59313db86e36e3b207f796fd7b14"},
+ {file = "time_machine-2.14.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e80408e6b6670e9ce33f94b1cc6b72b1a9b646f5e19f586908129871f74b40"},
+ {file = "time_machine-2.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c69c0cb498c86ef843cd15964714e76465cc25d64464da57d5d1318f499de099"},
+ {file = "time_machine-2.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc48d3934109b0bdbbdc5e9ce577213f7148a92fed378420ee13453503fe4db9"},
+ {file = "time_machine-2.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7161cea2ff3244cc6075e365fab89000df70ead63a3da9d473983d580558d2de"},
+ {file = "time_machine-2.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39fceeb131e6c07b386de042ce1016be771576e9516124b78e75cbab94ae5041"},
+ {file = "time_machine-2.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fe508a6c43fb72fa4f66b50b14684cf58d3db95fed617177ec197a7a90427bae"},
+ {file = "time_machine-2.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5f3d5c21884aee10e13b00ef45fab893a43db9d59ec27271573528bd359b0ef5"},
+ {file = "time_machine-2.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a75e24e59f58059bbbc50e7f97aa6d126bbc2f603a8a5cd1e884beffcf130d8f"},
+ {file = "time_machine-2.14.1-cp38-cp38-win32.whl", hash = "sha256:b0f8ba70fbb71d7fbc6d6adb90bed72a83db15b3318c7af0060467539b2f1b63"},
+ {file = "time_machine-2.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:15cf3623a4ba2bb4fce4529295570acd5f6c6b44bcbfd1b8d0756ce56c38fe82"},
+ {file = "time_machine-2.14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bb3a2518c52aa944989b541e5297b833388eb3fe72d91eb875b21fe771597b04"},
+ {file = "time_machine-2.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:416d94eab7723c7d8a37fe6b3b1882046fdbf3c31b9abec3cac87cf35dbb8230"},
+ {file = "time_machine-2.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adfbfa796dd96383400b44681eacc5ab06d3cbfad39c30878e5ead0bfdca808a"},
+ {file = "time_machine-2.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31e6e9bff89b7c6e4cbc169ba1d00d6c107b3abc43173b2799352b6995cf7cb2"},
+ {file = "time_machine-2.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:107caed387438d689180b692e8d84aa1ebe8918790df83dc5e2146e60e5e0859"},
+ {file = "time_machine-2.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cab4abf4d1490a7da35db5a321ff8a4d4a2195f4832a792c75b626ffc4a5584c"},
+ {file = "time_machine-2.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd8645b820f7895fdafbc4412d1ce376956e36ad4fd05a43269aa06c3132afc3"},
+ {file = "time_machine-2.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dd26039a9ffea2d5ee1309f2ec9b656d4925371c65563822d52e4037a4186eca"},
+ {file = "time_machine-2.14.1-cp39-cp39-win32.whl", hash = "sha256:5e19b19d20bfbff8c97949e06e150998cf9d0a676e1641fb90597e59a9d7d5e2"},
+ {file = "time_machine-2.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:f5d371a5218318121a6b44c21438258b6408b8bfe7ccccb754cf8eb880505576"},
+ {file = "time_machine-2.14.1-cp39-cp39-win_arm64.whl", hash = "sha256:2c774f4b603a36ca2611327c57aa8ce0d5042298da008238ee5234b31ce7b22c"},
]
[[package]]
@@ -3858,11 +3858,11 @@ files = [
[[package]]
name = "trio"
-version = "0.24.0"
+version = "0.25.0"
requires_python = ">=3.8"
summary = "A friendly Python library for async concurrency and I/O"
dependencies = [
- "attrs>=20.1.0",
+ "attrs>=23.2.0",
"cffi>=1.14; os_name == \"nt\" and implementation_name != \"pypy\"",
"exceptiongroup; python_version < \"3.11\"",
"idna",
@@ -3871,8 +3871,8 @@ dependencies = [
"sortedcontainers",
]
files = [
- {file = "trio-0.24.0-py3-none-any.whl", hash = "sha256:c3bd3a4e3e3025cd9a2241eae75637c43fe0b9e88b4c97b9161a55b9e54cd72c"},
- {file = "trio-0.24.0.tar.gz", hash = "sha256:ffa09a74a6bf81b84f8613909fb0beaee84757450183a7a2e0b47b455c0cac5d"},
+ {file = "trio-0.25.0-py3-none-any.whl", hash = "sha256:e6458efe29cc543e557a91e614e2b51710eba2961669329ce9c862d50c6e8e81"},
+ {file = "trio-0.25.0.tar.gz", hash = "sha256:9b41f5993ad2c0e5f62d0acca320ec657fdb6b2a2c22b8c7aed6caf154475c4e"},
]
[[package]]
@@ -3969,12 +3969,12 @@ files = [
[[package]]
name = "types-psutil"
-version = "5.9.5.20240311"
+version = "5.9.5.20240316"
requires_python = ">=3.8"
summary = "Typing stubs for psutil"
files = [
- {file = "types-psutil-5.9.5.20240311.tar.gz", hash = "sha256:7e5ab45170d9a53a83b461b1771f5eb8b00bb249e6ca7bb3e6391e8cab7742c7"},
- {file = "types_psutil-5.9.5.20240311-py3-none-any.whl", hash = "sha256:890965f336122917091ae68b6bb2e63987ae143f917a229169b9dc83580529a9"},
+ {file = "types-psutil-5.9.5.20240316.tar.gz", hash = "sha256:5636f5714bb930c64bb34c4d47a59dc92f9d610b778b5364a31daa5584944848"},
+ {file = "types_psutil-5.9.5.20240316-py3-none-any.whl", hash = "sha256:2fdd64ea6e97befa546938f486732624f9255fde198b55e6f00fda236f059f64"},
]
[[package]]
@@ -4079,7 +4079,7 @@ files = [
[[package]]
name = "uvicorn"
-version = "0.28.0"
+version = "0.29.0"
requires_python = ">=3.8"
summary = "The lightning-fast ASGI server."
dependencies = [
@@ -4088,13 +4088,13 @@ dependencies = [
"typing-extensions>=4.0; python_version < \"3.11\"",
]
files = [
- {file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"},
- {file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"},
+ {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
+ {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
]
[[package]]
name = "uvicorn"
-version = "0.28.0"
+version = "0.29.0"
extras = ["standard"]
requires_python = ">=3.8"
summary = "The lightning-fast ASGI server."
@@ -4103,14 +4103,14 @@ dependencies = [
"httptools>=0.5.0",
"python-dotenv>=0.13",
"pyyaml>=5.1",
- "uvicorn==0.28.0",
+ "uvicorn==0.29.0",
"uvloop!=0.15.0,!=0.15.1,>=0.14.0; (sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"",
"watchfiles>=0.13",
"websockets>=10.4",
]
files = [
- {file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"},
- {file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"},
+ {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
+ {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
]
[[package]]
diff --git a/pyproject.toml b/pyproject.toml
index 5e399bdd68..1dfc87b68b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -57,7 +57,7 @@ maintainers = [
name = "litestar"
readme = "README.md"
requires-python = ">=3.8,<4.0"
-version = "2.7.0"
+version = "2.7.1"
[project.urls]
Blog = "https://blog.litestar.dev"
diff --git a/tests/helpers.py b/tests/helpers.py
index 10456706b8..d550098352 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -1,10 +1,15 @@
from __future__ import annotations
+import atexit
import inspect
+import logging
import random
import sys
-from contextlib import AbstractContextManager
-from typing import Any, AsyncContextManager, Awaitable, ContextManager, TypeVar, cast, overload
+from contextlib import AbstractContextManager, contextmanager
+from typing import Any, AsyncContextManager, Awaitable, ContextManager, Generator, TypeVar, cast, overload
+
+import picologging
+from _pytest.logging import LogCaptureHandler, _LiveLoggingNullHandler
from litestar._openapi.schema_generation import SchemaCreator
from litestar._openapi.schema_generation.plugins import openapi_schema_plugins
@@ -28,6 +33,15 @@ def randbytes(n: int) -> bytes:
randbytes = RANDOM.randbytes
+if sys.version_info >= (3, 12):
+ getHandlerByName = logging.getHandlerByName
+else:
+ from logging import _handlers # type: ignore[attr-defined]
+
+ def getHandlerByName(name: str) -> Any:
+ return _handlers.get(name)
+
+
@overload
async def maybe_async(obj: Awaitable[T]) -> T: ...
@@ -66,3 +80,29 @@ def get_schema_for_field_definition(
if isinstance(result, Schema):
return result
return creator.schema_registry.from_reference(result).schema
+
+
+@contextmanager
+def cleanup_logging_impl() -> Generator:
+ # Reset root logger (`logging` module)
+ std_root_logger: logging.Logger = logging.getLogger()
+ for std_handler in std_root_logger.handlers:
+ # Don't interfere with PyTest handler config
+ if not isinstance(std_handler, (_LiveLoggingNullHandler, LogCaptureHandler)):
+ std_root_logger.removeHandler(std_handler)
+
+ # Reset root logger (`picologging` module)
+ pico_root_logger: picologging.Logger = picologging.getLogger()
+ for pico_handler in pico_root_logger.handlers:
+ pico_root_logger.removeHandler(pico_handler)
+
+ yield
+
+ # Stop queue_listener listener (mandatory for the 'logging' module with Python 3.12,
+ # else the test suite would hang on at the end of the tests and some tests would fail)
+ queue_listener_handler = getHandlerByName("queue_listener")
+ if queue_listener_handler and hasattr(queue_listener_handler, "listener"):
+ atexit.unregister(queue_listener_handler.listener.stop)
+ queue_listener_handler.listener.stop()
+ queue_listener_handler.close()
+ del queue_listener_handler
diff --git a/tests/unit/test_contrib/test_pydantic/test_dto.py b/tests/unit/test_contrib/test_pydantic/test_dto.py
index aee52fe405..b6baddfa0e 100644
--- a/tests/unit/test_contrib/test_pydantic/test_dto.py
+++ b/tests/unit/test_contrib/test_pydantic/test_dto.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING, Optional, cast
import pytest
from pydantic import v1 as pydantic_v1
@@ -13,8 +13,13 @@
from litestar.typing import FieldDefinition
if TYPE_CHECKING:
+ from collections.abc import Callable
+ from types import ModuleType
+
from pydantic import BaseModel
+ from litestar import Litestar
+
def test_schema_required_fields_with_pydantic_dto(
use_experimental_dto_backend: bool, base_model: type[BaseModel]
@@ -62,3 +67,36 @@ class Model(pydantic_v1.BaseModel):
dto_type = PydanticDTO[Model]
assert dto_type.detect_nested_field(FieldDefinition.from_annotation(Model)) is True
assert dto_type.detect_nested_field(FieldDefinition.from_annotation(int)) is False
+
+
+def test_pydantic_field_descriptions(create_module: Callable[[str], ModuleType]) -> None:
+ module = create_module(
+ """
+from litestar import Litestar, get
+from litestar.contrib.pydantic import PydanticDTO
+from litestar.dto import DTOConfig
+from pydantic import BaseModel, Field
+from typing_extensions import Annotated
+
+class User(BaseModel):
+ id: Annotated[
+ int,
+ Field(description="This is a test (id description)."),
+ ]
+
+class DataCollectionDTO(PydanticDTO[User]):
+ config = DTOConfig(rename_strategy="camel")
+
+@get("/user", return_dto=DataCollectionDTO, sync_to_thread=False)
+def get_user() -> User:
+ return User(id=user_id)
+
+app = Litestar(route_handlers=[get_user])
+ """
+ )
+ app = cast("Litestar", module.app)
+ schema = app.openapi_schema
+ assert schema.components.schemas is not None
+ component_schema = schema.components.schemas["GetUserUserResponseBody"]
+ assert component_schema.properties is not None
+ assert component_schema.properties["id"].description == "This is a test (id description)."
diff --git a/tests/unit/test_contrib/test_pydantic/test_openapi.py b/tests/unit/test_contrib/test_pydantic/test_openapi.py
index 124362062e..aa84def09b 100644
--- a/tests/unit/test_contrib/test_pydantic/test_openapi.py
+++ b/tests/unit/test_contrib/test_pydantic/test_openapi.py
@@ -17,7 +17,7 @@
from litestar._openapi.schema_generation.schema import SchemaCreator
from litestar.contrib.pydantic import PydanticPlugin, PydanticSchemaPlugin
from litestar.openapi import OpenAPIConfig
-from litestar.openapi.spec import Example, Reference, Schema
+from litestar.openapi.spec import Reference, Schema
from litestar.openapi.spec.enums import OpenAPIFormat, OpenAPIType
from litestar.params import KwargDefinition
from litestar.status_codes import HTTP_200_OK
@@ -393,7 +393,7 @@ async def example_route() -> Lookup:
assert response.status_code == HTTP_200_OK
assert response.json()["components"]["schemas"]["test_schema_generation_v1.Lookup"]["properties"]["id"] == {
"description": "A unique identifier",
- "examples": {"id-example-1": {"value": "e4eaaaf2-d142-11e1-b3e4-080027620cdd"}},
+ "examples": ["e4eaaaf2-d142-11e1-b3e4-080027620cdd"],
"maxLength": 16,
"minLength": 12,
"type": "string",
@@ -430,7 +430,7 @@ async def example_route() -> Lookup:
assert response.status_code == HTTP_200_OK
assert response.json()["components"]["schemas"]["test_schema_generation_v2.Lookup"]["properties"]["id"] == {
"description": "A unique identifier",
- "examples": {"id-example-1": {"value": "e4eaaaf2-d142-11e1-b3e4-080027620cdd"}},
+ "examples": ["e4eaaaf2-d142-11e1-b3e4-080027620cdd"],
"maxLength": 16,
"minLength": 12,
"type": "string",
@@ -530,7 +530,7 @@ class Model(pydantic_v1.BaseModel):
assert isinstance(value, Schema)
assert value.description == "description"
assert value.title == "title"
- assert value.examples == {"value-example-1": Example(value="example")}
+ assert value.examples == ["example"]
def test_create_schema_for_field_v2() -> None:
@@ -550,7 +550,7 @@ class Model(pydantic_v2.BaseModel):
assert isinstance(value, Schema)
assert value.description == "description"
assert value.title == "title"
- assert value.examples == {"value-example-1": Example(value="example")}
+ assert value.examples == ["example"]
@pytest.mark.parametrize("with_future_annotations", [True, False])
diff --git a/tests/unit/test_dto/test_factory/test_backends/test_backends.py b/tests/unit/test_dto/test_factory/test_backends/test_backends.py
index a7561e5ab1..0f5d1aedad 100644
--- a/tests/unit/test_dto/test_factory/test_backends/test_backends.py
+++ b/tests/unit/test_dto/test_factory/test_backends/test_backends.py
@@ -7,18 +7,20 @@
from unittest.mock import MagicMock
import pytest
-from msgspec import Struct, to_builtins
+from msgspec import Meta, Struct, to_builtins
from litestar import Litestar, Request, get, post
from litestar._openapi.schema_generation import SchemaCreator
from litestar.dto import DataclassDTO, DTOConfig, DTOField
-from litestar.dto._backend import DTOBackend
+from litestar.dto._backend import DTOBackend, _create_struct_field_meta_for_field_definition
from litestar.dto._types import CollectionType, SimpleType, TransferDTOFieldDefinition
from litestar.dto.data_structures import DTOFieldDefinition
from litestar.enums import MediaType
from litestar.exceptions import SerializationException
+from litestar.openapi.spec.example import Example
from litestar.openapi.spec.reference import Reference
from litestar.openapi.spec.schema import Schema
+from litestar.params import KwargDefinition
from litestar.serialization import encode_json
from litestar.testing import RequestFactory
from litestar.typing import FieldDefinition
@@ -448,3 +450,29 @@ class Factory(DataclassDTO):
assert b_d_nested_info is not None
assert not next(f for f in b_d_nested_info.field_definitions if f.name == "e").is_excluded
assert b_d_nested_info.field_definitions[1].name == "f"
+
+
+@pytest.mark.parametrize(
+ ("constraint_kwargs",),
+ (
+ ({},),
+ ({"gt": 0, "lt": 2},),
+ ({"ge": 0, "le": 2},),
+ ({"min_length": 1, "max_length": 2},),
+ ({"pattern": "test"},),
+ ),
+)
+def test_create_struct_field_meta_for_field_definition(constraint_kwargs: Any) -> None:
+ mock_field = MagicMock(spec=TransferDTOFieldDefinition, is_partial=False)
+ mock_field.kwarg_definition = KwargDefinition(
+ description="test",
+ examples=[Example(value=1)],
+ title="test",
+ **constraint_kwargs,
+ )
+ assert _create_struct_field_meta_for_field_definition(mock_field) == Meta(
+ description="test",
+ examples=[1],
+ title="test",
+ **constraint_kwargs,
+ )
diff --git a/tests/unit/test_kwargs/test_path_params.py b/tests/unit/test_kwargs/test_path_params.py
index a14c6f3cdf..fb72fa9569 100644
--- a/tests/unit/test_kwargs/test_path_params.py
+++ b/tests/unit/test_kwargs/test_path_params.py
@@ -121,6 +121,17 @@ def test_method() -> None:
Litestar(route_handlers=[test_method])
+def test_path_param_defined_in_layered_params_error() -> None:
+ @get(path="/{param:int}")
+ def test_method(param: int) -> None:
+ raise AssertionError("should not be called")
+
+ with pytest.raises(ImproperlyConfiguredException) as exc_info:
+ Litestar(route_handlers=[test_method], parameters={"param": Parameter(gt=3)})
+
+ assert "Kwarg resolution ambiguity detected for the following keys: param." in str(exc_info.value)
+
+
@pytest.mark.parametrize(
"param_type_name, param_type_class, value, expected_value",
[
diff --git a/tests/unit/test_logging/test_logging_config.py b/tests/unit/test_logging/test_logging_config.py
index 2e363d45cb..e318ed730f 100644
--- a/tests/unit/test_logging/test_logging_config.py
+++ b/tests/unit/test_logging/test_logging_config.py
@@ -1,9 +1,12 @@
import logging
import sys
-from typing import TYPE_CHECKING, Any, Dict
+import time
+from logging.handlers import QueueHandler
+from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from unittest.mock import Mock, patch
import pytest
+from _pytest.logging import LogCaptureHandler, _LiveLoggingNullHandler
from litestar import Request, get
from litestar.exceptions import ImproperlyConfiguredException
@@ -12,9 +15,16 @@
from litestar.logging.standard import QueueListenerHandler as StandardQueueListenerHandler
from litestar.status_codes import HTTP_200_OK
from litestar.testing import create_test_client
+from tests.helpers import cleanup_logging_impl
if TYPE_CHECKING:
- from _pytest.logging import LogCaptureFixture
+ from _pytest.capture import CaptureFixture
+
+
+@pytest.fixture(autouse=True)
+def cleanup_logging() -> Generator:
+ with cleanup_logging_impl():
+ yield
@pytest.mark.parametrize(
@@ -66,14 +76,45 @@ def test_dictconfig_startup(dict_config_class: str, handlers: Any) -> None:
assert dict_config_mock.called
-def test_standard_queue_listener_logger(caplog: "LogCaptureFixture") -> None:
- with caplog.at_level("INFO", logger="test_logger"):
- logger = logging.getLogger("test_logger")
- logger.info("Testing now!")
- assert "Testing now!" in caplog.text
- var = "test_var"
- logger.info("%s", var)
- assert var in caplog.text
+def test_standard_queue_listener_logger(capsys: "CaptureFixture[str]") -> None:
+ def wait_log_queue(queue: Any, sleep_time: float = 0.1, max_retries: int = 5) -> None:
+ retry = 0
+ while queue.qsize() > 0 and retry < max_retries:
+ retry += 1
+ time.sleep(sleep_time)
+
+ def assert_log(queue: Any, expected: str, count: Optional[int] = None) -> None:
+ wait_log_queue(queue)
+ log_output = capsys.readouterr().err.strip()
+ if count is not None:
+ assert len(log_output.split("\n")) == count
+ assert log_output == expected
+
+ with patch("litestar.logging.config.find_spec") as find_spec_mock:
+ find_spec_mock.return_value = False
+ get_logger = LoggingConfig(
+ formatters={"standard": {"format": "%(levelname)s :: %(name)s :: %(message)s"}},
+ loggers={
+ "test_logger": {
+ "level": "INFO",
+ "handlers": ["queue_listener"],
+ "propagate": False,
+ },
+ },
+ ).configure()
+
+ logger = get_logger("test_logger")
+ assert isinstance(logger, logging.Logger) # type: ignore[unreachable]
+
+ handler = logger.handlers[0] # type: ignore[unreachable]
+ assert isinstance(handler, QueueHandler if sys.version_info >= (3, 12, 0) else StandardQueueListenerHandler)
+
+ logger.info("Testing now!")
+ assert_log(handler.queue, expected="INFO :: test_logger :: Testing now!", count=1)
+
+ var = "test_var"
+ logger.info("%s", var)
+ assert_log(handler.queue, expected="INFO :: test_logger :: test_var", count=1)
@patch("picologging.config.dictConfig")
@@ -94,9 +135,10 @@ def test_get_logger_without_logging_config() -> None:
def test_get_default_logger() -> None:
with create_test_client(logging_config=LoggingConfig(handlers=default_handlers)) as client:
- assert isinstance(client.app.logger.handlers[0], StandardQueueListenerHandler)
+ expected_handler_class = QueueHandler if sys.version_info >= (3, 12, 0) else StandardQueueListenerHandler
+ assert isinstance(client.app.logger.handlers[0], expected_handler_class)
new_logger = client.app.get_logger()
- assert isinstance(new_logger.handlers[0], StandardQueueListenerHandler)
+ assert isinstance(new_logger.handlers[0], expected_handler_class)
def test_get_picologging_logger() -> None:
@@ -109,7 +151,7 @@ def test_get_picologging_logger() -> None:
@pytest.mark.parametrize(
"handlers, listener",
[
- [default_handlers, StandardQueueListenerHandler],
+ [default_handlers, QueueHandler if sys.version_info >= (3, 12, 0) else StandardQueueListenerHandler],
[default_picologging_handlers, PicologgingQueueListenerHandler],
],
)
@@ -133,7 +175,7 @@ def test_validation() -> None:
@pytest.mark.parametrize(
"handlers, listener",
[
- [default_handlers, StandardQueueListenerHandler],
+ [default_handlers, QueueHandler if sys.version_info >= (3, 12, 0) else StandardQueueListenerHandler],
[default_picologging_handlers, PicologgingQueueListenerHandler],
],
)
@@ -144,43 +186,36 @@ def test_root_logger(handlers: Any, listener: Any) -> None:
assert isinstance(root_logger.handlers[0], listener) # type: ignore[attr-defined]
-@pytest.mark.parametrize(
- "handlers, listener",
- [
- [default_handlers, StandardQueueListenerHandler],
- [default_picologging_handlers, PicologgingQueueListenerHandler],
- ],
-)
-def test_root_logger_no_config(handlers: Any, listener: Any) -> None:
+@pytest.mark.parametrize("handlers", [default_handlers, default_picologging_handlers])
+def test_root_logger_no_config(handlers: Any) -> None:
logging_config = LoggingConfig(handlers=handlers, configure_root_logger=False)
get_logger = logging_config.configure()
root_logger = get_logger()
- for handler in root_logger.handlers: # type: ignore[attr-defined]
- root_logger.removeHandler(handler) # type: ignore[attr-defined]
- get_logger = logging_config.configure()
- root_logger = get_logger()
- if handlers["console"]["class"] == "logging.StreamHandler":
- assert not isinstance(root_logger.handlers[0], listener) # type: ignore[attr-defined]
+ if isinstance(root_logger, logging.Logger): # type: ignore[unreachable]
+ # pytest automatically configures some handlers
+ for handler in root_logger.handlers: # type: ignore[unreachable]
+ assert isinstance(handler, (_LiveLoggingNullHandler, LogCaptureHandler))
else:
- assert len(root_logger.handlers) < 1 # type: ignore[attr-defined]
+ assert len(root_logger.handlers) == 0 # type: ignore[attr-defined]
@pytest.mark.parametrize(
- "handlers, listener",
+ "handlers, expected_handler_class",
[
- pytest.param(
- default_handlers,
- StandardQueueListenerHandler,
- marks=pytest.mark.xfail(
- condition=sys.version_info >= (3, 12), reason="change to QueueHandler/QueueListener config in 3.12"
- ),
- ),
+ [default_handlers, QueueHandler if sys.version_info >= (3, 12, 0) else StandardQueueListenerHandler],
[default_picologging_handlers, PicologgingQueueListenerHandler],
],
)
-def test_customizing_handler(handlers: Any, listener: Any, monkeypatch: pytest.MonkeyPatch) -> None:
- monkeypatch.setitem(handlers["queue_listener"], "handlers", ["cfg://handlers.console"])
- logging_config = LoggingConfig(handlers=handlers)
+def test_customizing_handler(handlers: Any, expected_handler_class: Any, monkeypatch: pytest.MonkeyPatch) -> None:
+ log_format = "%(levelname)s :: %(name)s :: %(message)s"
+ formatters = {"standard": {"format": log_format}}
+ logging_config = LoggingConfig(formatters=formatters, handlers=handlers)
get_logger = logging_config.configure()
root_logger = get_logger()
- assert isinstance(root_logger.handlers[0], listener) # type: ignore[attr-defined]
+ root_logger_handler = root_logger.handlers[0] # type: ignore[attr-defined]
+ assert isinstance(root_logger_handler, expected_handler_class)
+ if type(root_logger_handler) is QueueHandler:
+ formatter = root_logger_handler.listener.handlers[0].formatter # type: ignore[attr-defined]
+ else:
+ formatter = root_logger_handler.formatter
+ assert formatter._fmt == log_format
diff --git a/tests/unit/test_middleware/test_exception_handler_middleware.py b/tests/unit/test_middleware/test_exception_handler_middleware.py
index 4ecf7cf839..5371ca4847 100644
--- a/tests/unit/test_middleware/test_exception_handler_middleware.py
+++ b/tests/unit/test_middleware/test_exception_handler_middleware.py
@@ -1,5 +1,5 @@
from inspect import getinnerframes
-from typing import TYPE_CHECKING, Any, Callable, Optional
+from typing import TYPE_CHECKING, Any, Callable, Generator, Optional
import pytest
from _pytest.capture import CaptureFixture
@@ -17,6 +17,7 @@
from litestar.testing import TestClient, create_test_client
from litestar.types import ExceptionHandlersMap
from litestar.types.asgi_types import HTTPScope
+from tests.helpers import cleanup_logging_impl
if TYPE_CHECKING:
from _pytest.logging import LogCaptureFixture
@@ -29,6 +30,12 @@ async def dummy_app(scope: Any, receive: Any, send: Any) -> None:
return None
+@pytest.fixture(autouse=True)
+def cleanup_logging() -> Generator:
+ with cleanup_logging_impl():
+ yield
+
+
@pytest.fixture()
def app() -> Litestar:
return Litestar()
diff --git a/tests/unit/test_middleware/test_logging_middleware.py b/tests/unit/test_middleware/test_logging_middleware.py
index 6ce6e5dea1..35f9f6ca47 100644
--- a/tests/unit/test_middleware/test_logging_middleware.py
+++ b/tests/unit/test_middleware/test_logging_middleware.py
@@ -1,5 +1,5 @@
from logging import INFO
-from typing import TYPE_CHECKING, Any, Dict
+from typing import TYPE_CHECKING, Any, Dict, Generator
import pytest
from structlog.testing import capture_logs
@@ -18,6 +18,7 @@
from litestar.params import Body
from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED
from litestar.testing import create_test_client
+from tests.helpers import cleanup_logging_impl
if TYPE_CHECKING:
from _pytest.logging import LogCaptureFixture
@@ -30,6 +31,12 @@
pytestmark = pytest.mark.usefixtures("reset_httpx_logging")
+@pytest.fixture(autouse=True)
+def cleanup_logging() -> Generator:
+ with cleanup_logging_impl():
+ yield
+
+
@pytest.fixture
def handler() -> HTTPRouteHandler:
@get("/")
diff --git a/tests/unit/test_openapi/test_integration.py b/tests/unit/test_openapi/test_integration.py
index fd9369c893..df67fdf6ac 100644
--- a/tests/unit/test_openapi/test_integration.py
+++ b/tests/unit/test_openapi/test_integration.py
@@ -204,7 +204,7 @@ async def example_route() -> Lookup:
"id"
] == {
"description": "A unique identifier",
- "examples": {"id-example-1": {"value": "e4eaaaf2-d142-11e1-b3e4-080027620cdd"}},
+ "examples": ["e4eaaaf2-d142-11e1-b3e4-080027620cdd"],
"maxLength": 16,
"minLength": 12,
"type": "string",
diff --git a/tests/unit/test_openapi/test_parameters.py b/tests/unit/test_openapi/test_parameters.py
index b683081c7d..48cea53df0 100644
--- a/tests/unit/test_openapi/test_parameters.py
+++ b/tests/unit/test_openapi/test_parameters.py
@@ -71,8 +71,8 @@ def test_create_parameters(person_controller: Type[Controller]) -> None:
assert page_size.schema.type == OpenAPIType.INTEGER
assert page_size.required
assert page_size.description == "Page Size Description"
- assert page_size.schema.examples
- assert next(iter(page_size.schema.examples.values())).value == 1
+ assert page_size.examples
+ assert page_size.schema.examples == [1]
assert name.param_in == ParamType.QUERY
assert name.name == "name"
@@ -107,19 +107,19 @@ def test_create_parameters(person_controller: Type[Controller]) -> None:
Schema(
type=OpenAPIType.STRING,
enum=["M", "F", "O", "A"],
- examples={"gender-example-1": Example(description="Example value", value="M")},
+ examples=["M"],
),
Schema(
type=OpenAPIType.ARRAY,
items=Schema(
type=OpenAPIType.STRING,
enum=["M", "F", "O", "A"],
- examples={"gender-example-1": Example(description="Example value", value="F")},
+ examples=["F"],
),
- examples={"list-example-1": Example(description="Example value", value=["A"])},
+ examples=[["A"]],
),
],
- examples={"gender-example-1": Example(value="M"), "gender-example-2": Example(value=["M", "O"])},
+ examples=["M", ["M", "O"]],
)
assert not gender.required
diff --git a/tests/unit/test_openapi/test_schema.py b/tests/unit/test_openapi/test_schema.py
index 1b05ade837..c6d56c00df 100644
--- a/tests/unit/test_openapi/test_schema.py
+++ b/tests/unit/test_openapi/test_schema.py
@@ -82,7 +82,7 @@ def test_process_schema_result() -> None:
assert kwarg_definition.examples
for signature_key, schema_key in KWARG_DEFINITION_ATTRIBUTE_TO_OPENAPI_PROPERTY_MAP.items():
if schema_key == "examples":
- assert schema.examples == {"str-example-1": kwarg_definition.examples[0]}
+ assert schema.examples == [kwarg_definition.examples[0].value]
else:
assert getattr(schema, schema_key) == getattr(kwarg_definition, signature_key)
@@ -225,7 +225,7 @@ def test_schema_hashing() -> None:
Schema(type=OpenAPIType.NUMBER),
Schema(type=OpenAPIType.OBJECT, properties={"key": Schema(type=OpenAPIType.STRING)}),
],
- examples={"example-1": Example(value=None), "example-2": Example(value=[1, 2, 3])},
+ examples=[None, [1, 2, 3]],
)
assert hash(schema)
@@ -289,7 +289,7 @@ class Lookup(msgspec.Struct):
schema = get_schema_for_field_definition(FieldDefinition.from_kwarg(name="Lookup", annotation=Lookup))
assert schema.properties["id"].type == OpenAPIType.STRING # type: ignore[index, union-attr]
- assert schema.properties["id"].examples == {"id-example-1": Example(value="example")} # type: ignore[index, union-attr]
+ assert schema.properties["id"].examples == ["example"] # type: ignore[index, union-attr]
assert schema.properties["id"].description == "description" # type: ignore[index]
assert schema.properties["id"].title == "title" # type: ignore[index, union-attr]
assert schema.properties["id"].max_length == 16 # type: ignore[index, union-attr]
diff --git a/tests/unit/test_plugins/test_sqlalchemy.py b/tests/unit/test_plugins/test_sqlalchemy.py
index 437ba3acd6..8ac4e31070 100644
--- a/tests/unit/test_plugins/test_sqlalchemy.py
+++ b/tests/unit/test_plugins/test_sqlalchemy.py
@@ -1,5 +1,5 @@
-import advanced_alchemy
from advanced_alchemy import base as sa_base
+from advanced_alchemy import filters as sa_filters
from advanced_alchemy import types as sa_types
from advanced_alchemy.extensions import litestar as sa_litestar
@@ -7,7 +7,7 @@
def test_re_exports() -> None:
- assert sqlalchemy.filters is advanced_alchemy.filters
+ assert sqlalchemy.filters is sa_filters
assert sqlalchemy.types is sa_types
assert sqlalchemy.AuditColumns is sa_base.AuditColumns
diff --git a/tools/prepare_release.py b/tools/prepare_release.py
index cbc7180e1c..e081825f38 100644
--- a/tools/prepare_release.py
+++ b/tools/prepare_release.py
@@ -16,6 +16,10 @@
import httpx
import msgspec
+_polar = "[Polar.sh](https://polar.sh/litestar-org)"
+_open_collective = "[OpenCollective](https://opencollective.com/litestar)"
+_github_sponsors = "[GitHub Sponsors](https://github.com/sponsors/litestar-org/)"
+
class PullRequest(msgspec.Struct):
title: str
@@ -250,7 +254,7 @@ def add_change(self, pr: PRInfo) -> None:
@contextlib.contextmanager
def directive(self, name: str, arg: str | None = None, **options: str) -> Generator[None, None, None]:
- self.add_line(f".. {name}:: {arg if arg else ''}")
+ self.add_line(f".. {name}:: {arg or ''}")
self._level += 1
for key, value in options.items():
if value:
@@ -268,12 +272,23 @@ def build_gh_release_notes(release_info: ReleaseInfo) -> str:
# 3. It works with our release branch process. GitHub doesn't pick up (all) commits
# made there depending on how things were merged
doc = GHReleaseWriter()
+
+ doc.add_line("## Sponsors 🌟")
+ doc.add_line(
+ "⚠️ Maintainers: Please adjust business/individual sponsors section here as defined by our tier rewards"
+ )
+ doc.add_line(f"- A huge 'Thank you!' to all sponsors across {_polar}, {_open_collective} and {_github_sponsors}!")
+
doc.add_line("## What's changed")
+ if release_info.first_time_prs:
+ doc.add_line("\n## New contributors 🎉")
+ for pr in release_info.first_time_prs:
+ doc.add_line(f"* @{pr.user.login} made their first contribution in {pr.url}")
if fixes := release_info.pull_requests.get("fix"):
- doc.add_line("\n### Bugfixes")
+ doc.add_line("\n### Bugfixes 🐛")
doc.add_pr_descriptions(fixes)
if features := release_info.pull_requests.get("feat"):
- doc.add_line("\nNew features")
+ doc.add_line("\nNew features 🚀")
doc.add_pr_descriptions(features)
ignore_sections = {"fix", "feat", "ci", "chore"}
@@ -283,11 +298,6 @@ def build_gh_release_notes(release_info: ReleaseInfo) -> str:
doc.add_line("### Other changes")
doc.add_pr_descriptions(other)
- if release_info.first_time_prs:
- doc.add_line("\n## New contributors")
- for pr in release_info.first_time_prs:
- doc.add_line(f"* @{pr.user.login} made their first contribution in {pr.url}")
-
doc.add_line("\n**Full Changelog**")
doc.add_line(release_info.compare_url)