diff --git a/.github/workflows/on_pr_to_main.yaml b/.github/workflows/on_pr_to_main.yaml index 78eacac1..83d114d3 100644 --- a/.github/workflows/on_pr_to_main.yaml +++ b/.github/workflows/on_pr_to_main.yaml @@ -22,7 +22,7 @@ jobs: id: setup-python uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.12' #---------------------------------------------- # ----- install & configure poetry ----- #---------------------------------------------- @@ -52,7 +52,7 @@ jobs: # install your root project, if required #---------------------------------------------- - name: Install project - run: poetry install --no-interaction + run: poetry install --no-interaction --extras "server" #---------------------------------------------- # run test suite #---------------------------------------------- diff --git a/.github/workflows/on_push_to_feature.yaml b/.github/workflows/on_push_to_feature.yaml index 5cc2cf10..855dd59a 100644 --- a/.github/workflows/on_push_to_feature.yaml +++ b/.github/workflows/on_push_to_feature.yaml @@ -20,7 +20,7 @@ jobs: id: setup-python uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.12' #---------------------------------------------- # ----- install & configure poetry ----- #---------------------------------------------- @@ -50,7 +50,7 @@ jobs: # install your root project, if required #---------------------------------------------- - name: Install project - run: poetry install --no-interaction + run: poetry install --no-interaction --extras "server" #---------------------------------------------- # run test suite #---------------------------------------------- diff --git a/.github/workflows/on_release.yaml b/.github/workflows/on_release.yaml index 19f6aeb8..b24c55b0 100644 --- a/.github/workflows/on_release.yaml +++ b/.github/workflows/on_release.yaml @@ -28,7 +28,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Install Poetry uses: snok/install-poetry@v1 diff --git a/Dockerfile b/Dockerfile index 84e5c556..f723f8bf 100755 --- a/Dockerfile +++ b/Dockerfile @@ -15,13 +15,8 @@ ENV VIRTUAL_ENV=${VIRTUAL_ENV} \ RUN apk add --no-cache \ bash \ - gcc \ - libffi-dev \ - musl-dev \ pipx \ - python3-dev \ - geos \ - geos-dev + git RUN pipx install poetry==${POETRY_VERSION} @@ -32,6 +27,7 @@ COPY . . RUN poetry build RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} RUN ${VIRTUAL_ENV}/bin/pip3 install --no-cache-dir dist/*.whl +RUN ${VIRTUAL_ENV}/bin/pip3 install uvicorn # # Final @@ -48,10 +44,8 @@ COPY --from=base ${VIRTUAL_ENV} ${VIRTUAL_ENV} RUN apk update && \ apk upgrade --no-cache && \ - apk add --no-cache \ - bash \ - geos \ - geos-dev + apk add --no-cache + WORKDIR /app # prez module is already built as a package and installed in $VIRTUAL_ENV as a library diff --git a/README-Dev.md b/README-Dev.md index 242149ef..e4a93c2f 100644 --- a/README-Dev.md +++ b/README-Dev.md @@ -561,6 +561,42 @@ these endpoints, specifying any variables that need to be substituted (such as p to construct the system links. 5. Return the response +## Specification of remote SPARQL templates for Object endpoints + +The OGC Features endpoints can utilise custom SPARQL templates. +At present the queries must be of the form: +```sparql +PREFIX geo: +PREFIX rdf: +PREFIX sosa: +CONSTRUCT { + ... +} +WHERE { + VALUES ?focusNode { UNDEF } +... +} +``` +At present the queries are ONLY substituted with the focus node URI, and only for OGC Features endpoints. It is intended that this functionality will provide the basis for a more general templating system across all OBJECT endpoints in the future. +These templates should be declared in the remote repo in this format: +```sparql +PREFIX prez: +PREFIX rdf: +INSERT DATA { GRAPH { + [ a prez:TemplateQuery ; + rdf:value """ +""" ; + prez:forEndpoint "http://www.opengis.net/ogcapi-features-1/1.0/feature" ; + ] + }} +``` +Prez will detect these template queries and when a request comes in on the endpoint with the relevant URI, utilise the template query. + +Templates can also be specified in `prez/reference_data/xxx.rq`, and mapped to an endpoint using the `endpoint_to_template_query_filename` setting (can be set as an environment variable), where `xxx.rq` is the filename of the template query. For example: +```bash +export ENDPOINT_TO_TEMPLATE_QUERY_FILENAME='{"http://www.opengis.net/ogcapi-features-1/1.0/feature": "xxx.rq"}' +``` + ## High Level Sequence listing and individual object endpoints Prez follows the following logic to determine what information to return, based on a profile, and in what mediatype to return it. diff --git a/README.md b/README.md index b066a421..15ec977b 100644 --- a/README.md +++ b/README.md @@ -40,6 +40,50 @@ poetry install > **Note:** Poetry must be installed on the system. To check if you have Poetry installed run `poetry --version`. For tips on installing and managing specific dependency groups check the [documentation](https://python-poetry.org/docs/managing-dependencies/). +## Endpoints + +Prez delivers the following endpoints: + +### Core Endpoints + +**Endpoint** | **Default MT** +--- | --- +/ | text/anot+turtle +/docs | text/html +/catalogs/{catalogId} | text/anot+turtle +/catalogs/{catalogId}/collections | text/anot+turtle +/catalogs/{catalogId}/collections/{recordsCollectionId} | text/anot+turtle +/catalogs/{catalogId}/collections/{recordsCollectionId}/items | text/anot+turtle +/catalogs/{catalogId}/collections/{recordsCollectionId}/items/{itemId} | text/anot+turtle +/purge-tbox-cache | application/json +/tbox-cache | application/json +/health | application/json +/prefixes | text/anot+turtle +/concept-hierarchy/{parent_curie}/narrowers | text/anot+turtle +/concept-hierarchy/{parent_curie}/top-concepts | text/anot+turtle +/cql | text/anot+turtle +/profiles | text/anot+turtle +/search | text/anot+turtle +/profiles/{profile_curie} | text/anot+turtle +/object | text/anot+turtle +/identifier/redirect | N/A +/identifier/curie/{iri} | text/plain +/identifier/iri/{curie} | text/plai + +### OGC Features API Endpoints + +**The OGC Features API Endpoints are based at the ROOT `/catalogs/{catalogId}/collections/{recordsCollectionId}/`** + +**Endpoint** | **Default MT** +--- | --- +{ROOT}/features | application/json +{ROOT}/features/docs | text/html +{ROOT}/features/conformance | application/json +{ROOT}/features/collections | application/json +{ROOT}/features/collections/{collectionId} | application/json +{ROOT}/features/collections/{collectionId}/items | application/geo+json +{ROOT}/features/collections/{collectionId}/items/{featureId} | application/geo+jso + ## Configuration The following Environment Variables can be used to configure Prez: @@ -144,6 +188,23 @@ Used in conjunction with the Pyoxigraph repo. Specifies a directory (from the re - `ep:system/profile-listing` - `ep:system/profile-object` +#### Listing and Search Configuration + +- **`listing_count_limit`**: The maximum number of items to count for a listing endpoint. Counts greater than this limit will be returned as ">N" where N is the limit. Default is `100`. +- **`search_count_limit`**: The maximum number of items to return in a search result. Default is `10`. + +#### SPARQL Endpoint + +- **`enable_sparql_endpoint`**: Whether to enable the SPARQL endpoint. Default is `False`. + +#### Temporal Configuration + +- **`temporal_predicate`**: The predicate used for temporal properties. Default is `sdo:temporal`. + +#### Query Template Configuration + +- **`endpoint_to_template_query_filename`**: A dictionary mapping endpoints to query template filenames. Default is an empty dictionary. + ## Running This section is for developing Prez locally. See the [Running](#running) options below for running Prez in production. diff --git a/azure/function_app.py b/azure/function_app.py index 98adcb98..8d329f29 100644 --- a/azure/function_app.py +++ b/azure/function_app.py @@ -59,4 +59,3 @@ task = fn(req, context) resp = loop.run_until_complete(task) print(resp) - diff --git a/azure/patched_asgi_function_wrapper.py b/azure/patched_asgi_function_wrapper.py index 3bfd2383..2d5711a5 100644 --- a/azure/patched_asgi_function_wrapper.py +++ b/azure/patched_asgi_function_wrapper.py @@ -7,6 +7,7 @@ from azure.functions._abc import Context from azure.functions import HttpRequest + # ------------------- # Create a patched AsgiFunctionApp to fix the ASGI scope state issue # ------------------- @@ -16,12 +17,13 @@ async def _handle_async(self, req, context): asgi_request = AsgiRequest(req, context) scope = asgi_request.to_asgi_http_scope() # shallow copy the state as-per the ASGI spec - scope["state"] = copy(self.state) # <-- this is the patch, add the state to the scope - asgi_response = await AsgiResponse.from_app(self._app, - scope, - req.get_body()) + scope["state"] = copy( + self.state + ) # <-- this is the patch, add the state to the scope + asgi_response = await AsgiResponse.from_app(self._app, scope, req.get_body()) return asgi_response.to_func_response() + # ------------------- # Create a patched AsgiFunctionApp to fix the double-slash route issue # ------------------- @@ -35,7 +37,7 @@ def __init__(self, app, http_auth_level): self.startup_task_done = False def _add_http_app( - self, http_middleware: Union[AsgiMiddleware, WsgiMiddleware] + self, http_middleware: Union[AsgiMiddleware, WsgiMiddleware] ) -> None: """Add an Asgi app integrated http function. diff --git a/poetry.lock b/poetry.lock index 36bd9241..0944b871 100644 --- a/poetry.lock +++ b/poetry.lock @@ -112,24 +112,24 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -373,99 +373,41 @@ files = [ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "email-validator" -version = "2.2.0" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - [[package]] name = "fastapi" -version = "0.111.1" +version = "0.114.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.111.1-py3-none-any.whl", hash = "sha256:4f51cfa25d72f9fbc3280832e84b32494cf186f50158d364a8765aabf22587bf"}, - {file = "fastapi-0.111.1.tar.gz", hash = "sha256:ddd1ac34cb1f76c2e2d7f8545a4bcb5463bce4834e81abf0b189e0c359ab2413"}, + {file = "fastapi-0.114.1-py3-none-any.whl", hash = "sha256:5d4746f6e4b7dff0b4f6b6c6d5445645285f662fe75886e99af7ee2d6b58bb3e"}, + {file = "fastapi-0.114.1.tar.gz", hash = "sha256:1d7bbbeabbaae0acb0c22f0ab0b040f642d3093ca3645f8c876b6f91391861d8"}, ] [package.dependencies] -email_validator = ">=2.0.0" -fastapi-cli = ">=0.0.2" -httpx = ">=0.23.0" -jinja2 = ">=2.11.2" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -python-multipart = ">=0.0.7" -starlette = ">=0.37.2,<0.38.0" +starlette = ">=0.37.2,<0.39.0" typing-extensions = ">=4.8.0" -uvicorn = {version = ">=0.12.0", extras = ["standard"]} [package.extras] -all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "fastapi-cli" -version = "0.0.5" -description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi_cli-0.0.5-py3-none-any.whl", hash = "sha256:e94d847524648c748a5350673546bbf9bcaeb086b33c24f2e82e021436866a46"}, - {file = "fastapi_cli-0.0.5.tar.gz", hash = "sha256:d30e1239c6f46fcb95e606f02cdda59a1e2fa778a54b64686b3ff27f6211ff9f"}, -] - -[package.dependencies] -typer = ">=0.12.3" -uvicorn = {version = ">=0.15.0", extras = ["standard"]} - -[package.extras] -standard = ["uvicorn[standard] (>=0.15.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "frozendict" @@ -509,6 +451,17 @@ files = [ {file = "frozendict-2.4.4.tar.gz", hash = "sha256:3f7c031b26e4ee6a3f786ceb5e3abf1181c4ade92dce1f847da26ea2c96008c7"}, ] +[[package]] +name = "geojson" +version = "3.1.0" +description = "Python bindings and utilities for GeoJSON" +optional = false +python-versions = ">=3.7" +files = [ + {file = "geojson-3.1.0-py3-none-any.whl", hash = "sha256:68a9771827237adb8c0c71f8527509c8f5bef61733aa434cefc9c9d4f0ebe8f3"}, + {file = "geojson-3.1.0.tar.gz", hash = "sha256:58a7fa40727ea058efc28b0e9ff0099eadf6d0965e04690830208d3ef571adac"}, +] + [[package]] name = "geopandas" version = "1.0.1" @@ -543,6 +496,27 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["chardet (>=2.2)", "genshi", "lxml"] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml"] + [[package]] name = "httpcore" version = "1.0.5" @@ -564,63 +538,15 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<0.26.0)"] -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -635,6 +561,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" @@ -652,15 +579,38 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1020,56 +970,64 @@ files = [ [[package]] name = "numpy" -version = "2.0.1" +version = "2.1.1" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"}, - {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"}, - {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"}, - {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"}, - {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"}, - {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"}, - {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"}, - {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"}, - {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"}, - {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"}, - {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"}, - {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"}, - {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"}, - {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"}, - {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"}, - {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"}, - {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"}, - {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"}, - {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"}, - {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"}, - {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"}, - {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc085b28d62ff4009364e7ca34b80a9a080cbd97c2c0630bb5f7f770dae9414"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fae4ebbf95a179c1156fab0b142b74e4ba4204c87bde8d3d8b6f9c34c5825ef"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:72dc22e9ec8f6eaa206deb1b1355eb2e253899d7347f5e2fae5f0af613741d06"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:ec87f5f8aca726117a1c9b7083e7656a9d0d606eec7299cc067bb83d26f16e0c"}, - {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f682ea61a88479d9498bf2091fdcd722b090724b08b31d63e022adc063bad59"}, - {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efc84f01c1cd7e34b3fb310183e72fcdf55293ee736d679b6d35b35d80bba26"}, - {file = "numpy-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3fdabe3e2a52bc4eff8dc7a5044342f8bd9f11ef0934fcd3289a788c0eb10018"}, - {file = "numpy-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:24a0e1befbfa14615b49ba9659d3d8818a0f4d8a1c5822af8696706fbda7310c"}, - {file = "numpy-2.0.1-cp39-cp39-win32.whl", hash = "sha256:f9cf5ea551aec449206954b075db819f52adc1638d46a6738253a712d553c7b4"}, - {file = "numpy-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e9e81fa9017eaa416c056e5d9e71be93d05e2c3c2ab308d23307a8bc4443c368"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61728fba1e464f789b11deb78a57805c70b2ed02343560456190d0501ba37b0f"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:12f5d865d60fb9734e60a60f1d5afa6d962d8d4467c120a1c0cda6eb2964437d"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacf3291e263d5a67d8c1a581a8ebbcfd6447204ef58828caf69a5e3e8c75990"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2c3a346ae20cfd80b6cfd3e60dc179963ef2ea58da5ec074fd3d9e7a1e7ba97f"}, - {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, +python-versions = ">=3.10" +files = [ + {file = "numpy-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8a0e34993b510fc19b9a2ce7f31cb8e94ecf6e924a40c0c9dd4f62d0aac47d9"}, + {file = "numpy-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7dd86dfaf7c900c0bbdcb8b16e2f6ddf1eb1fe39c6c8cca6e94844ed3152a8fd"}, + {file = "numpy-2.1.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:5889dd24f03ca5a5b1e8a90a33b5a0846d8977565e4ae003a63d22ecddf6782f"}, + {file = "numpy-2.1.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:59ca673ad11d4b84ceb385290ed0ebe60266e356641428c845b39cd9df6713ab"}, + {file = "numpy-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ce49a34c44b6de5241f0b38b07e44c1b2dcacd9e36c30f9c2fcb1bb5135db7"}, + {file = "numpy-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913cc1d311060b1d409e609947fa1b9753701dac96e6581b58afc36b7ee35af6"}, + {file = "numpy-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:caf5d284ddea7462c32b8d4a6b8af030b6c9fd5332afb70e7414d7fdded4bfd0"}, + {file = "numpy-2.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:57eb525e7c2a8fdee02d731f647146ff54ea8c973364f3b850069ffb42799647"}, + {file = "numpy-2.1.1-cp310-cp310-win32.whl", hash = "sha256:9a8e06c7a980869ea67bbf551283bbed2856915f0a792dc32dd0f9dd2fb56728"}, + {file = "numpy-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:d10c39947a2d351d6d466b4ae83dad4c37cd6c3cdd6d5d0fa797da56f710a6ae"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d07841fd284718feffe7dd17a63a2e6c78679b2d386d3e82f44f0108c905550"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b5613cfeb1adfe791e8e681128f5f49f22f3fcaa942255a6124d58ca59d9528f"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0b8cc2715a84b7c3b161f9ebbd942740aaed913584cae9cdc7f8ad5ad41943d0"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b49742cdb85f1f81e4dc1b39dcf328244f4d8d1ded95dea725b316bd2cf18c95"}, + {file = "numpy-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d5f8a8e3bc87334f025194c6193e408903d21ebaeb10952264943a985066ca"}, + {file = "numpy-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d51fc141ddbe3f919e91a096ec739f49d686df8af254b2053ba21a910ae518bf"}, + {file = "numpy-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98ce7fb5b8063cfdd86596b9c762bf2b5e35a2cdd7e967494ab78a1fa7f8b86e"}, + {file = "numpy-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:24c2ad697bd8593887b019817ddd9974a7f429c14a5469d7fad413f28340a6d2"}, + {file = "numpy-2.1.1-cp311-cp311-win32.whl", hash = "sha256:397bc5ce62d3fb73f304bec332171535c187e0643e176a6e9421a6e3eacef06d"}, + {file = "numpy-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:ae8ce252404cdd4de56dcfce8b11eac3c594a9c16c231d081fb705cf23bd4d9e"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c803b7934a7f59563db459292e6aa078bb38b7ab1446ca38dd138646a38203e"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6435c48250c12f001920f0751fe50c0348f5f240852cfddc5e2f97e007544cbe"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3269c9eb8745e8d975980b3a7411a98976824e1fdef11f0aacf76147f662b15f"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:fac6e277a41163d27dfab5f4ec1f7a83fac94e170665a4a50191b545721c6521"}, + {file = "numpy-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd8f556cdc8cfe35e70efb92463082b7f43dd7e547eb071ffc36abc0ca4699b"}, + {file = "numpy-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b9cd92c8f8e7b313b80e93cedc12c0112088541dcedd9197b5dee3738c1201"}, + {file = "numpy-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:afd9c680df4de71cd58582b51e88a61feed4abcc7530bcd3d48483f20fc76f2a"}, + {file = "numpy-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8661c94e3aad18e1ea17a11f60f843a4933ccaf1a25a7c6a9182af70610b2313"}, + {file = "numpy-2.1.1-cp312-cp312-win32.whl", hash = "sha256:950802d17a33c07cba7fd7c3dcfa7d64705509206be1606f196d179e539111ed"}, + {file = "numpy-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:3fc5eabfc720db95d68e6646e88f8b399bfedd235994016351b1d9e062c4b270"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:046356b19d7ad1890c751b99acad5e82dc4a02232013bd9a9a712fddf8eb60f5"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6e5a9cb2be39350ae6c8f79410744e80154df658d5bea06e06e0ac5bb75480d5"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:d4c57b68c8ef5e1ebf47238e99bf27657511ec3f071c465f6b1bccbef12d4136"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:8ae0fd135e0b157365ac7cc31fff27f07a5572bdfc38f9c2d43b2aff416cc8b0"}, + {file = "numpy-2.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981707f6b31b59c0c24bcda52e5605f9701cb46da4b86c2e8023656ad3e833cb"}, + {file = "numpy-2.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ca4b53e1e0b279142113b8c5eb7d7a877e967c306edc34f3b58e9be12fda8df"}, + {file = "numpy-2.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e097507396c0be4e547ff15b13dc3866f45f3680f789c1a1301b07dadd3fbc78"}, + {file = "numpy-2.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7506387e191fe8cdb267f912469a3cccc538ab108471291636a96a54e599556"}, + {file = "numpy-2.1.1-cp313-cp313-win32.whl", hash = "sha256:251105b7c42abe40e3a689881e1793370cc9724ad50d64b30b358bbb3a97553b"}, + {file = "numpy-2.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:f212d4f46b67ff604d11fff7cc62d36b3e8714edf68e44e9760e19be38c03eb0"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:920b0911bb2e4414c50e55bd658baeb78281a47feeb064ab40c2b66ecba85553"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bab7c09454460a487e631ffc0c42057e3d8f2a9ddccd1e60c7bb8ed774992480"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:cea427d1350f3fd0d2818ce7350095c1a2ee33e30961d2f0fef48576ddbbe90f"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:e30356d530528a42eeba51420ae8bf6c6c09559051887196599d96ee5f536468"}, + {file = "numpy-2.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8dfa9e94fc127c40979c3eacbae1e61fda4fe71d84869cc129e2721973231ef"}, + {file = "numpy-2.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910b47a6d0635ec1bd53b88f86120a52bf56dcc27b51f18c7b4a2e2224c29f0f"}, + {file = "numpy-2.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:13cc11c00000848702322af4de0147ced365c81d66053a67c2e962a485b3717c"}, + {file = "numpy-2.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53e27293b3a2b661c03f79aa51c3987492bd4641ef933e366e0f9f6c9bf257ec"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7be6a07520b88214ea85d8ac8b7d6d8a1839b0b5cb87412ac9f49fa934eb15d5"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:52ac2e48f5ad847cd43c4755520a2317f3380213493b9d8a4c5e37f3b87df504"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a95ca3560a6058d6ea91d4629a83a897ee27c00630aed9d933dff191f170cd"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:99f4a9ee60eed1385a86e82288971a51e71df052ed0b2900ed30bc840c0f2e39"}, + {file = "numpy-2.1.1.tar.gz", hash = "sha256:d0cf7d55b1051387807405b3898efafa862997b4cba8aa5dbe657be794afeafd"}, ] [[package]] @@ -1108,6 +1066,20 @@ jsonschema = ">=4.19.1,<5.0.0" jsonschema-specifications = ">=2023.5.2,<2024.0.0" rfc3339-validator = "*" +[[package]] +name = "owlrl" +version = "6.0.2" +description = "OWL-RL and RDFS based RDF Closure inferencing for Python" +optional = false +python-versions = "*" +files = [ + {file = "owlrl-6.0.2-py3-none-any.whl", hash = "sha256:57eca06b221edbbc682376c8d42e2ddffc99f61e82c0da02e26735592f08bacc"}, + {file = "owlrl-6.0.2.tar.gz", hash = "sha256:904e3310ff4df15101475776693d2427d1f8244ee9a6a9f9e13c3c57fae90b74"}, +] + +[package.dependencies] +rdflib = ">=6.0.2" + [[package]] name = "oxrdflib" version = "0.3.7" @@ -1219,19 +1191,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, + {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -1266,6 +1238,23 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prettytable" +version = "3.11.0" +description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "prettytable-3.11.0-py3-none-any.whl", hash = "sha256:aa17083feb6c71da11a68b2c213b04675c4af4ce9c541762632ca3f2cb3546dd"}, + {file = "prettytable-3.11.0.tar.gz", hash = "sha256:7e23ca1e68bbfd06ba8de98bf553bf3493264c96d5e8a615c0471025deeba722"}, +] + +[package.dependencies] +wcwidth = "*" + +[package.extras] +tests = ["pytest", "pytest-cov", "pytest-lazy-fixtures"] + [[package]] name = "pyarrow" version = "17.0.0" @@ -1319,18 +1308,18 @@ test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" typing-extensions = [ {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, @@ -1338,103 +1327,104 @@ typing-extensions = [ [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [package.dependencies] @@ -1442,13 +1432,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.4.0" +version = "2.5.2" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, + {file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"}, + {file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"}, ] [package.dependencies] @@ -1601,13 +1591,13 @@ files = [ [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -1652,15 +1642,44 @@ files = [ [package.dependencies] certifi = "*" +[[package]] +name = "pyshacl" +version = "0.26.0" +description = "Python SHACL Validator" +optional = false +python-versions = "<4.0.0,>=3.8.1" +files = [ + {file = "pyshacl-0.26.0-py3-none-any.whl", hash = "sha256:a4bef4296d56305a30e0a97509e541ebe4f2cc2d5da73536d0541233e28f2d22"}, + {file = "pyshacl-0.26.0.tar.gz", hash = "sha256:48d44f317cd9aad8e3fdb5df8aa5706fa92dc6b2746419698035e84a320fb89d"}, +] + +[package.dependencies] +html5lib = ">=1.1,<2" +importlib-metadata = {version = ">6", markers = "python_version < \"3.12\""} +owlrl = ">=6.0.2,<7" +packaging = ">=21.3" +prettytable = [ + {version = ">=3.7.0", markers = "python_version >= \"3.12\""}, + {version = ">=3.5.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, +] +rdflib = {version = ">=6.3.2,<8.0", markers = "python_full_version >= \"3.8.1\""} + +[package.extras] +dev-coverage = ["coverage (>6.1,!=6.1.1,<7)", "platformdirs", "pytest-cov (>=2.8.1,<3.0.0)"] +dev-lint = ["black (==24.3.0)", "platformdirs", "ruff (>=0.1.5,<0.2.0)"] +dev-type-checking = ["mypy (>=0.812,<0.900)", "mypy (>=0.900,<0.1000)", "platformdirs", "types-setuptools"] +http = ["sanic (>=22.12,<23)", "sanic-cors (==2.2.0)", "sanic-ext (>=23.3,<23.6)"] +js = ["pyduktape2 (>=0.4.6,<0.5.0)"] + [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1734,13 +1753,13 @@ dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatc [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -1805,6 +1824,26 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rdf2geojson" +version = "0.2.0" +description = "A Python package that converts any RDF to GeoJSON" +optional = false +python-versions = "^3.11" +files = [] +develop = false + +[package.dependencies] +geojson = "^3.1.0" +pyshacl = "^0.26.0" +rdflib = "^7.0.0" + +[package.source] +type = "git" +url = "https://github.com/ashleysommer/rdf2geojson.git" +reference = "v0.2.1" +resolved_reference = "ea11a0a15606065d991cb1939f410c0490d7e3e8" + [[package]] name = "rdflib" version = "7.0.0" @@ -1892,13 +1931,13 @@ six = "*" [[package]] name = "rich" -version = "13.7.1" +version = "13.8.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, ] [package.dependencies] @@ -2053,47 +2092,53 @@ wheel = ">=0.36.1" [[package]] name = "shapely" -version = "2.0.5" +version = "2.0.6" description = "Manipulation and analysis of geometric objects" optional = false python-versions = ">=3.7" files = [ - {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"}, - {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"}, - {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"}, - {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"}, - {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"}, - {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"}, - {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"}, - {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"}, - {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"}, - {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"}, - {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"}, - {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"}, - {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"}, - {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"}, - {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"}, - {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"}, + {file = "shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b"}, + {file = "shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b"}, + {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde"}, + {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e"}, + {file = "shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e"}, + {file = "shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4"}, + {file = "shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e"}, + {file = "shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2"}, + {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855"}, + {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0"}, + {file = "shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d"}, + {file = "shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b"}, + {file = "shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0"}, + {file = "shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3"}, + {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8"}, + {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726"}, + {file = "shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f"}, + {file = "shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48"}, + {file = "shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013"}, + {file = "shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7"}, + {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381"}, + {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805"}, + {file = "shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a"}, + {file = "shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2"}, + {file = "shapely-2.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fa7468e4f5b92049c0f36d63c3e309f85f2775752e076378e36c6387245c5462"}, + {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5867e598a9e8ac3291da6cc9baa62ca25706eea186117034e8ec0ea4355653"}, + {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81d9dfe155f371f78c8d895a7b7f323bb241fb148d848a2bf2244f79213123fe"}, + {file = "shapely-2.0.6-cp37-cp37m-win32.whl", hash = "sha256:fbb7bf02a7542dba55129062570211cfb0defa05386409b3e306c39612e7fbcc"}, + {file = "shapely-2.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:837d395fac58aa01aa544495b97940995211e3e25f9aaf87bc3ba5b3a8cd1ac7"}, + {file = "shapely-2.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c6d88ade96bf02f6bfd667ddd3626913098e243e419a0325ebef2bbd481d1eb6"}, + {file = "shapely-2.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b3b818c4407eaa0b4cb376fd2305e20ff6df757bf1356651589eadc14aab41b"}, + {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbc783529a21f2bd50c79cef90761f72d41c45622b3e57acf78d984c50a5d13"}, + {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2423f6c0903ebe5df6d32e0066b3d94029aab18425ad4b07bf98c3972a6e25a1"}, + {file = "shapely-2.0.6-cp38-cp38-win32.whl", hash = "sha256:2de00c3bfa80d6750832bde1d9487e302a6dd21d90cb2f210515cefdb616e5f5"}, + {file = "shapely-2.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:3a82d58a1134d5e975f19268710e53bddd9c473743356c90d97ce04b73e101ee"}, + {file = "shapely-2.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:392f66f458a0a2c706254f473290418236e52aa4c9b476a072539d63a2460595"}, + {file = "shapely-2.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eba5bae271d523c938274c61658ebc34de6c4b33fdf43ef7e938b5776388c1be"}, + {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060566bc4888b0c8ed14b5d57df8a0ead5c28f9b69fb6bed4476df31c51b0af"}, + {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b02154b3e9d076a29a8513dffcb80f047a5ea63c897c0cd3d3679f29363cf7e5"}, + {file = "shapely-2.0.6-cp39-cp39-win32.whl", hash = "sha256:44246d30124a4f1a638a7d5419149959532b99dfa25b54393512e6acc9c211ac"}, + {file = "shapely-2.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:2b542d7f1dbb89192d3512c52b679c822ba916f93479fa5d4fc2fe4fa0b3c9e8"}, + {file = "shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6"}, ] [package.dependencies] @@ -2103,17 +2148,6 @@ numpy = ">=1.14,<3" docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] test = ["pytest", "pytest-cov"] -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - [[package]] name = "six" version = "1.16.0" @@ -2153,13 +2187,13 @@ rdflib = ">=7.0.0,<8.0.0" [[package]] name = "starlette" -version = "0.37.2" +version = "0.38.5" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, + {file = "starlette-0.38.5-py3-none-any.whl", hash = "sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206"}, + {file = "starlette-0.38.5.tar.gz", hash = "sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077"}, ] [package.dependencies] @@ -2193,23 +2227,6 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -[[package]] -name = "typer" -version = "0.12.3" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.7" -files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - [[package]] name = "typing-extensions" version = "4.12.2" @@ -2234,13 +2251,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -2253,7 +2270,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "uvicorn" version = "0.30.6" description = "The lightning-fast ASGI server." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, @@ -2262,71 +2279,20 @@ files = [ [package.dependencies] click = ">=7.0" -colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} h11 = ">=0.8" -httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} -python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} -watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] -[[package]] -name = "uvloop" -version = "0.19.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, -] - -[package.extras] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.4" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, + {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, ] [package.dependencies] @@ -2339,183 +2305,25 @@ docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "s test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] -name = "watchfiles" -version = "0.23.0" -description = "Simple, modern and high performance file watching and code reload in python." +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" optional = false -python-versions = ">=3.8" +python-versions = "*" files = [ - {file = "watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4"}, - {file = "watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981"}, - {file = "watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42"}, - {file = "watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320"}, - {file = "watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b"}, - {file = "watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e"}, - {file = "watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581"}, - {file = "watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75"}, - {file = "watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb"}, - {file = "watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1"}, - {file = "watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b"}, - {file = "watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2dddc2487d33e92f8b6222b5fb74ae2cfde5e8e6c44e0248d24ec23befdc5366"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e75695cc952e825fa3e0684a7f4a302f9128721f13eedd8dbd3af2ba450932b8"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2537ef60596511df79b91613a5bb499b63f46f01a11a81b0a2b0dedf645d0a9c"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20b423b58f5fdde704a226b598a2d78165fe29eb5621358fe57ea63f16f165c4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b98732ec893975455708d6fc9a6daab527fc8bbe65be354a3861f8c450a632a4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee1f5fcbf5bc33acc0be9dd31130bcba35d6d2302e4eceafafd7d9018c7755ab"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f195338a5a7b50a058522b39517c50238358d9ad8284fd92943643144c0c03"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524fcb8d59b0dbee2c9b32207084b67b2420f6431ed02c18bd191e6c575f5c48"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0eff099a4df36afaa0eea7a913aa64dcf2cbd4e7a4f319a73012210af4d23810"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a8323daae27ea290ba3350c70c836c0d2b0fb47897fa3b0ca6a5375b952b90d3"}, - {file = "watchfiles-0.23.0-cp38-none-win32.whl", hash = "sha256:aafea64a3ae698695975251f4254df2225e2624185a69534e7fe70581066bc1b"}, - {file = "watchfiles-0.23.0-cp38-none-win_amd64.whl", hash = "sha256:c846884b2e690ba62a51048a097acb6b5cd263d8bd91062cd6137e2880578472"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a753993635eccf1ecb185dedcc69d220dab41804272f45e4aef0a67e790c3eb3"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6bb91fa4d0b392f0f7e27c40981e46dda9eb0fbc84162c7fb478fe115944f491"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1f67312efa3902a8e8496bfa9824d3bec096ff83c4669ea555c6bdd213aa516"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ca6b71dcc50d320c88fb2d88ecd63924934a8abc1673683a242a7ca7d39e781"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aec5c29915caf08771d2507da3ac08e8de24a50f746eb1ed295584ba1820330"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1733b9bc2c8098c6bdb0ff7a3d7cb211753fecb7bd99bdd6df995621ee1a574b"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02ff5d7bd066c6a7673b17c8879cd8ee903078d184802a7ee851449c43521bdd"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e2de19801b0eaa4c5292a223effb7cfb43904cb742c5317a0ac686ed604765"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8ada449e22198c31fb013ae7e9add887e8d2bd2335401abd3cbc55f8c5083647"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3af1b05361e1cc497bf1be654a664750ae61f5739e4bb094a2be86ec8c6db9b6"}, - {file = "watchfiles-0.23.0-cp39-none-win32.whl", hash = "sha256:486bda18be5d25ab5d932699ceed918f68eb91f45d018b0343e3502e52866e5e"}, - {file = "watchfiles-0.23.0-cp39-none-win_amd64.whl", hash = "sha256:d2d42254b189a346249424fb9bb39182a19289a2409051ee432fb2926bad966a"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aba037c1310dd108411d27b3d5815998ef0e83573e47d4219f45753c710f969f"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a96ac14e184aa86dc43b8a22bb53854760a58b2966c2b41580de938e9bf26ed0"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11698bb2ea5e991d10f1f4f83a39a02f91e44e4bd05f01b5c1ec04c9342bf63c"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efadd40fca3a04063d40c4448c9303ce24dd6151dc162cfae4a2a060232ebdcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:556347b0abb4224c5ec688fc58214162e92a500323f50182f994f3ad33385dcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1cf7f486169986c4b9d34087f08ce56a35126600b6fef3028f19ca16d5889071"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18de0f82c62c4197bea5ecf4389288ac755896aac734bd2cc44004c56e4ac47"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:532e1f2c491274d1333a814e4c5c2e8b92345d41b12dc806cf07aaff786beb66"}, - {file = "watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] -[package.dependencies] -anyio = ">=3.0.0" - [[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" optional = false -python-versions = ">=3.8" +python-versions = "*" files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] [[package]] @@ -2532,7 +2340,29 @@ files = [ [package.extras] test = ["pytest (>=6.0.0)", "setuptools (>=65)"] +[[package]] +name = "zipp" +version = "3.20.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[extras] +server = ["uvicorn"] + [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "a00ee7f9965e881d4d1953785ca7197e44838b2ec957d8137767e2ceac11e149" +content-hash = "ff0e8eddd301b0a3b278d383b26ec73d3e45f100df346ff9a5e2bff3fcb36f60" diff --git a/prez/app.py b/prez/app.py index e46a8725..986ff6b2 100755 --- a/prez/app.py +++ b/prez/app.py @@ -1,9 +1,9 @@ import logging +from contextlib import asynccontextmanager from functools import partial from textwrap import dedent from typing import Optional, Dict, Union, Any -import uvicorn from fastapi import FastAPI from fastapi.openapi.utils import get_openapi from rdflib import Graph @@ -25,11 +25,13 @@ URINotFoundException, NoProfilesException, InvalidSPARQLQueryException, + PrefixNotFoundException, ) from prez.repositories import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router from prez.routers.ogc_router import router as ogc_records_router +from prez.routers.ogc_features_router import features_subapi from prez.routers.sparql import router as sparql_router from prez.services.app_service import ( healthcheck_sparql_endpoints, @@ -37,6 +39,7 @@ create_endpoints_graph, populate_api_info, prefix_initialisation, + retrieve_remote_template_queries, ) from prez.services.exception_catchers import ( catch_400, @@ -46,6 +49,7 @@ catch_uri_not_found_exception, catch_no_profiles_exception, catch_invalid_sparql_query, + catch_prefix_not_found_exception, ) from prez.services.generate_profiles import create_profiles_graph from prez.services.prez_logging import setup_logger @@ -81,55 +85,49 @@ async def add_cors_headers(request, call_next): return response -async def app_startup(_settings: Settings, _app: FastAPI): - """ - This function runs at startup and will continually poll the separate backends until their SPARQL endpoints - are available. Initial caching can be triggered within the try block. NB this function does not check that data is - appropriately configured at the SPARQL endpoint(s), only that the SPARQL endpoint(s) are reachable. - """ - setup_logger(_settings) +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + setup_logger(app.state.settings) log = logging.getLogger("prez") log.info("Starting up") - if _settings.sparql_repo_type == "pyoxigraph": - _app.state.pyoxi_store = get_pyoxi_store() - _app.state.repo = _repo = PyoxigraphRepo(_app.state.pyoxi_store) - await load_local_data_to_oxigraph(_app.state.pyoxi_store) - elif _settings.sparql_repo_type == "oxrdflib": - _app.state.oxrdflib_store = get_oxrdflib_store() - _app.state.repo = _repo = OxrdflibRepo(_app.state.oxrdflib_store) - elif _settings.sparql_repo_type == "remote": - _app.state.http_async_client = await get_async_http_client() - _app.state.repo = _repo = RemoteSparqlRepo(_app.state.http_async_client) + if app.state.settings.sparql_repo_type == "pyoxigraph": + app.state.pyoxi_store = get_pyoxi_store() + app.state.repo = PyoxigraphRepo(app.state.pyoxi_store) + await load_local_data_to_oxigraph(app.state.pyoxi_store) + elif app.state.settings.sparql_repo_type == "oxrdflib": + app.state.oxrdflib_store = get_oxrdflib_store() + app.state.repo = OxrdflibRepo(app.state.oxrdflib_store) + elif app.state.settings.sparql_repo_type == "remote": + app.state.http_async_client = await get_async_http_client() + app.state.repo = RemoteSparqlRepo(app.state.http_async_client) await healthcheck_sparql_endpoints() else: raise ValueError( "SPARQL_REPO_TYPE must be one of 'pyoxigraph', 'oxrdflib' or 'remote'" ) - await prefix_initialisation(_repo) - await create_profiles_graph(_repo) - await create_endpoints_graph(_repo) - await count_objects(_repo) + await prefix_initialisation(app.state.repo) + await retrieve_remote_template_queries(app.state.repo) + await create_profiles_graph(app.state.repo) + await create_endpoints_graph(app.state.repo) + await count_objects(app.state.repo) await populate_api_info() - _app.state.pyoxi_system_store = get_system_store() - _app.state.annotations_store = get_annotations_store() - await load_system_data_to_oxigraph(_app.state.pyoxi_system_store) - await load_annotations_data_to_oxigraph(_app.state.annotations_store) + app.state.pyoxi_system_store = get_system_store() + app.state.annotations_store = get_annotations_store() + await load_system_data_to_oxigraph(app.state.pyoxi_system_store) + await load_annotations_data_to_oxigraph(app.state.annotations_store) + yield -async def app_shutdown(_settings: Settings, _app: FastAPI): - """ - persists caches - close async SPARQL clients - """ - log = logging.getLogger("prez") + # Shutdown log.info("Shutting down...") # close all SPARQL async clients - if not _settings.sparql_repo_type: - await _app.state.http_async_client.aclose() + if app.state.settings.sparql_repo_type == "remote": + await app.state.http_async_client.aclose() def assemble_app( @@ -140,7 +138,6 @@ def assemble_app( local_settings: Optional[Settings] = None, **kwargs ): - _settings = local_settings if local_settings is not None else settings if title is None: @@ -157,22 +154,31 @@ def assemble_app( version=version, description=description, contact=contact, + lifespan=lifespan, exception_handlers={ 400: catch_400, 404: catch_404, 500: catch_500, ClassNotFoundException: catch_class_not_found_exception, URINotFoundException: catch_uri_not_found_exception, + PrefixNotFoundException: catch_prefix_not_found_exception, NoProfilesException: catch_no_profiles_exception, InvalidSPARQLQueryException: catch_invalid_sparql_query, }, **kwargs ) + app.state.settings = _settings + app.include_router(management_router) app.include_router(ogc_records_router) if _settings.enable_sparql_endpoint: app.include_router(sparql_router) + if _settings.enable_ogc_features: + app.mount( + "/catalogs/{catalogId}/collections/{recordsCollectionId}/features", + features_subapi, + ) app.include_router(identifier_router) app.openapi = partial( prez_open_api_metadata, @@ -193,8 +199,7 @@ def assemble_app( allow_headers=["*"], expose_headers=["*"], ) - app.on_event("startup")(partial(app_startup, _settings=_settings, _app=app)) - app.on_event("shutdown")(partial(app_shutdown, _settings=_settings, _app=app)) + return app @@ -233,4 +238,13 @@ def _get_sparql_service_description(request, format): if __name__ == "__main__": + try: + import uvicorn + except ImportError: + print( + 'Error: Uvicorn is not installed. Install it with \'poetry install --extras "server".' + ) + import sys + + sys.exit(1) uvicorn.run(assemble_app, factory=True, port=settings.port, host=settings.host) diff --git a/prez/cache.py b/prez/cache.py index 4fd02375..acd1d035 100755 --- a/prez/cache.py +++ b/prez/cache.py @@ -2,8 +2,6 @@ from pyoxigraph.pyoxigraph import Store from rdflib import Graph, ConjunctiveGraph, Dataset -from prez.repositories import PyoxigraphRepo - profiles_graph_cache = Dataset() profiles_graph_cache.bind("prez", "https://prez.dev/") @@ -26,7 +24,6 @@ system_store = Store() annotations_store = Store() -annotations_repo = PyoxigraphRepo(annotations_store) oxrdflib_store = Graph(store="Oxigraph") diff --git a/prez/config.py b/prez/config.py index bde3a00f..ce5d6494 100755 --- a/prez/config.py +++ b/prez/config.py @@ -1,6 +1,5 @@ from os import environ from pathlib import Path -from typing import Optional, Union, Any, Dict from typing import Optional, List, Tuple from typing import Union, Any, Dict @@ -80,6 +79,9 @@ class Settings(BaseSettings): EP["system/profile-object"], ] enable_sparql_endpoint: bool = False + enable_ogc_features: bool = True + temporal_predicate: Optional[URIRef] = SDO.temporal + endpoint_to_template_query_filename: Optional[Dict[str, str]] = {} @field_validator("prez_version") @classmethod diff --git a/prez/dependencies.py b/prez/dependencies.py index 7581b39a..a4558d8b 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -4,7 +4,7 @@ import httpx from fastapi import Depends, Request, HTTPException from pyoxigraph import Store -from rdflib import Dataset, URIRef, Graph, SKOS +from rdflib import Dataset, URIRef, Graph, SKOS, RDF from sparql_grammar_pydantic import IRI, Var from prez.cache import ( @@ -14,10 +14,17 @@ profiles_graph_cache, endpoints_graph_cache, annotations_store, - annotations_repo, + prez_system_graph, ) from prez.config import settings -from prez.reference_data.prez_ns import ALTREXT, ONT, EP, OGCE +from prez.enums import ( + NonAnnotatedRDFMediaType, + SPARQLQueryMediaType, + JSONMediaType, + GeoJSONMediaType, +) +from prez.models.query_params import QueryParams +from prez.reference_data.prez_ns import ALTREXT, ONT, EP, OGCE, OGCFEAT, PREZ from prez.repositories import PyoxigraphRepo, RemoteSparqlRepo, OxrdflibRepo, Repo from prez.services.classes import get_classes_single from prez.services.connegp_service import NegotiatedPMTs @@ -87,7 +94,7 @@ async def get_annotations_repo(): """ A pyoxigraph Store with labels, descriptions etc. from Context Ontologies """ - return annotations_repo + return PyoxigraphRepo(annotations_store) async def load_local_data_to_oxigraph(store: Store): @@ -144,16 +151,19 @@ async def cql_post_parser_dependency(request: Request) -> CQLParser: ) -async def cql_get_parser_dependency(request: Request) -> CQLParser: - if request.query_params.get("filter"): +async def cql_get_parser_dependency( + query_params: QueryParams = Depends(), +) -> CQLParser: + if query_params.filter: try: - query = json.loads(request.query_params["filter"]) + crs = query_params.filter_crs + query = json.loads(query_params.filter) context = json.load( ( Path(__file__).parent / "reference_data/cql/default_context.json" ).open() ) - cql_parser = CQLParser(cql=query, context=context) + cql_parser = CQLParser(cql=query, context=context, crs=crs) cql_parser.generate_jsonld() cql_parser.parse() return cql_parser @@ -171,8 +181,8 @@ async def generate_search_query(request: Request): escaped_term = escape_for_lucene_and_sparql(term) predicates = request.query_params.getlist("predicates") page = request.query_params.get("page", 1) - per_page = request.query_params.get("per_page") - limit = int(per_page) if per_page else settings.search_count_limit + limit = request.query_params.get("limit") + limit = int(limit) if limit else settings.search_count_limit offset = limit * (int(page) - 1) return SearchQueryRegex( @@ -411,6 +421,7 @@ async def get_negotiated_pmts( classes=klasses, listing=listing, system_repo=system_repo, + current_path=request.url.path, ) await pmts.setup() return pmts @@ -453,3 +464,108 @@ async def get_profile_nodeshape( kind="profile", focus_node=focus_node, ) + + +async def get_url( + request: Request, +): + return request.url + + +async def get_endpoint_uri( + request: Request, +): + return URIRef(request.scope.get("route").name) + + +async def get_ogc_features_path_params( + request: Request, +): + return request.path_params + + +async def get_ogc_features_mediatype( + request: Request, + endpoint_uri: URIRef = Depends(get_endpoint_uri), +): + if endpoint_uri in [ + OGCFEAT["feature-collections"], + OGCFEAT["feature-collection"], + OGCFEAT["queryables-global"], + OGCFEAT["queryables-local"], + ]: + allowed_mts = [ + mt.value + for mt in [*NonAnnotatedRDFMediaType, *SPARQLQueryMediaType, *JSONMediaType] + ] + default_mt = JSONMediaType.JSON.value + elif endpoint_uri in [OGCFEAT["feature"], OGCFEAT["features"]]: + allowed_mts = [ + mt.value + for mt in [ + *NonAnnotatedRDFMediaType, + *SPARQLQueryMediaType, + *GeoJSONMediaType, + ] + ] + default_mt = GeoJSONMediaType.GEOJSON.value + else: + raise ValueError("Endpoint not recognized") + + qsa_mt = request.query_params.get("_mediatype") + + if qsa_mt: + if qsa_mt in allowed_mts: + return qsa_mt + elif request.headers.get("Accept"): + split_accept = request.headers.get("Accept").split(",") + if any(mt in split_accept for mt in allowed_mts): + for mt in split_accept: + if mt in allowed_mts: + return mt + else: + return default_mt + return default_mt + + +async def get_template_query( + endpoint_uri_type: tuple[URIRef, URIRef] = Depends(get_endpoint_uri_type), +): + endpoint_uri = endpoint_uri_type[0] + filename = settings.endpoint_to_template_query_filename.get(str(endpoint_uri)) + + # check local files + if filename: + return ( + Path(__file__).parent / "reference_data/template_queries" / filename + ).read_text() + + # check prez_system_graph + for s in prez_system_graph.subjects(RDF.type, ONT.TemplateQuery): + endpoint_in_sys_graph = prez_system_graph.value(s, ONT.forEndpoint, None) + if str(endpoint_uri) == str(endpoint_in_sys_graph): + template_query = prez_system_graph.value(s, RDF.value, None) + return str(template_query) + return None + + +async def check_unknown_params(request: Request): + known_params = { + "_mediatype", + "page", + "limit", + "datetime", + "bbox", + "filter-lang", + "filter_crs", + "q", + "filter", + "order_by", + "order_by_direction", + } + unknown_params = set(request.query_params.keys()) - known_params + if unknown_params: + raise HTTPException( + status_code=400, + detail=f"Unknown query parameters: {', '.join(unknown_params)}", + ) diff --git a/prez/enums.py b/prez/enums.py new file mode 100644 index 00000000..01d6194c --- /dev/null +++ b/prez/enums.py @@ -0,0 +1,36 @@ +from enum import Enum + + +class NonAnnotatedRDFMediaType(Enum): + LD_JSON = "application/ld+json" + RDF_XML = "application/rdf+xml" + TURTLE = "text/turtle" + N_TRIPLES = "application/n-triples" + + +class AnnotatedRDFMediaType(Enum): + ANOT_LD_JSON = "application/anot+ld+json" + ANOT_RDF_XML = "application/anot+rdf+xml" + ANOT_TURTLE = "text/anot+turtle" + ANOT_N_TRIPLES = "application/anot+n-triples" + + +class SPARQLQueryMediaType(Enum): + SPARQL_QUERY = "application/sparql-query" + + +class JSONMediaType(Enum): + JSON = "application/json" + + +class GeoJSONMediaType(Enum): + GEOJSON = "application/geo+json" + + +class OrderByDirectionEnum(Enum): + ASC = "ASC" + DESC = "DESC" + + +class FilterLangEnum(Enum): + CQL_JSON = "cql2-json" diff --git a/prez/examples/cql/geo_contains.json b/prez/examples/cql/geo_contains.json index 7c2cb8b5..65e08aae 100644 --- a/prez/examples/cql/geo_contains.json +++ b/prez/examples/cql/geo_contains.json @@ -2,7 +2,7 @@ "op": "s_contains", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_contains_filter.json b/prez/examples/cql/geo_contains_filter.json index b1541596..387561e6 100644 --- a/prez/examples/cql/geo_contains_filter.json +++ b/prez/examples/cql/geo_contains_filter.json @@ -5,7 +5,7 @@ "op": "s_contains", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_contains_inverse.json b/prez/examples/cql/geo_contains_inverse.json index e6b1b7e0..5354484f 100644 --- a/prez/examples/cql/geo_contains_inverse.json +++ b/prez/examples/cql/geo_contains_inverse.json @@ -5,7 +5,7 @@ "op": "s_contains", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_contains_like.json b/prez/examples/cql/geo_contains_like.json index 9c65bf63..8cae7241 100644 --- a/prez/examples/cql/geo_contains_like.json +++ b/prez/examples/cql/geo_contains_like.json @@ -5,7 +5,7 @@ "op": "s_contains", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_crosses.json b/prez/examples/cql/geo_crosses.json index 120068d8..51ed1707 100644 --- a/prez/examples/cql/geo_crosses.json +++ b/prez/examples/cql/geo_crosses.json @@ -2,7 +2,7 @@ "op": "s_crosses", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "LineString", diff --git a/prez/examples/cql/geo_disjoint.json b/prez/examples/cql/geo_disjoint.json index 4b6913c8..7f67f60b 100644 --- a/prez/examples/cql/geo_disjoint.json +++ b/prez/examples/cql/geo_disjoint.json @@ -2,7 +2,7 @@ "op": "s_disjoint", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_equals.json b/prez/examples/cql/geo_equals.json index fa00371c..d0af2d48 100644 --- a/prez/examples/cql/geo_equals.json +++ b/prez/examples/cql/geo_equals.json @@ -2,7 +2,7 @@ "op": "s_equals", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_intersects.json b/prez/examples/cql/geo_intersects.json index ade27977..d935f497 100644 --- a/prez/examples/cql/geo_intersects.json +++ b/prez/examples/cql/geo_intersects.json @@ -2,7 +2,7 @@ "op": "s_intersects", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_overlaps.json b/prez/examples/cql/geo_overlaps.json index f230fc5a..d1311e04 100644 --- a/prez/examples/cql/geo_overlaps.json +++ b/prez/examples/cql/geo_overlaps.json @@ -2,7 +2,7 @@ "op": "s_overlaps", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_touches.json b/prez/examples/cql/geo_touches.json index 335ed762..f0ac846c 100644 --- a/prez/examples/cql/geo_touches.json +++ b/prez/examples/cql/geo_touches.json @@ -2,7 +2,7 @@ "op": "s_touches", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/examples/cql/geo_within.json b/prez/examples/cql/geo_within.json index a79976c0..e4435622 100644 --- a/prez/examples/cql/geo_within.json +++ b/prez/examples/cql/geo_within.json @@ -2,7 +2,7 @@ "op": "s_within", "args": [ { - "property": "http://www.w3.org/ns/shacl#focusNode" + "property": "geometry" }, { "type": "Polygon", diff --git a/prez/exceptions/model_exceptions.py b/prez/exceptions/model_exceptions.py index 8e729247..144a2ba3 100755 --- a/prez/exceptions/model_exceptions.py +++ b/prez/exceptions/model_exceptions.py @@ -24,6 +24,18 @@ def __init__(self, uri: URIRef): super().__init__(self.message) +class PrefixNotFoundException(Exception): + """ + Raised when a requested prefix is not found in the triplestore. + """ + + def __init__(self, prefix: str): + self.message = ( + f'Prefix "{prefix}" not found at endpoint {settings.sparql_endpoint}.' + ) + super().__init__(self.message) + + class NoProfilesException(Exception): """ Raised when no profiles can be found for a resource. diff --git a/prez/models/ogc_features.py b/prez/models/ogc_features.py new file mode 100644 index 00000000..f2b7be95 --- /dev/null +++ b/prez/models/ogc_features.py @@ -0,0 +1,267 @@ +from typing import List +from typing import Optional + +from pydantic import BaseModel, Field + +from prez.config import settings + + +######################################################################################################################## +# Landing Page + + +class Link(BaseModel): + href: str + rel: str + type: str + title: Optional[str] = None + + +class Links(BaseModel): + links: List[Link] + + +class OGCFeaturesLandingPage(BaseModel): + title: str + description: str + links: List[Link] + + +def generate_landing_page_links(url): + url_path = url.path + link_dicts = [ + { + "href": f"{settings.system_uri}{url_path}", + "rel": "self", + "type": "application/json", + "title": "this document", + }, + { + "href": f"{settings.system_uri}{url_path}openapi.json", + "rel": "service-desc", + "type": "application/vnd.oai.openapi+json;version=3.1", + "title": "the API definition", + }, + { + "href": f"{settings.system_uri}{url_path}docs", + "rel": "service-doc", + "type": "text/html", + "title": "the API definition", + }, + { + "href": f"{settings.system_uri}{url_path}conformance", + "rel": "conformance", + "type": "application/json", + "title": "OGC API conformance classes implemented by this server", + }, + { + "href": f"{settings.system_uri}{url_path}collections", + "rel": "data", + "type": "application/json", + "title": "Information about the feature collections", + }, + { + "href": f"{settings.system_uri}{url_path}queryables", + "rel": "http://www.opengis.net/def/rel/ogc/1.0/queryables", + "type": "application/schema+json", + "title": "Global Queryables", + }, + ] + return [Link(**link) for link in link_dicts] + + +######################################################################################################################## +# Conformance + + +class ConformanceDeclaration(BaseModel): + conformsTo: List[str] + + +CONFORMANCE_CLASSES = [ + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core", + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30", + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/html", + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson", +] + + +######################################################################################################################## +# Collection and Collections + + +class Extent(BaseModel): + pass + + +class Collection(BaseModel): + id: str = Field( + ..., description="identifier of the collection used, for example, in URIs" + ) + title: Optional[str] = Field( + None, description="human readable title of the collection" + ) + description: Optional[str] = Field( + None, description="a description of the features in the collection" + ) + links: List[Link] + extent: Optional[Extent] = None + itemType: str = Field( + default="feature", + description="indicator about the type of the items in the collection (the default value is 'feature').", + ) + crs: List[str] = Field( + default=["http://www.opengis.net/def/crs/OGC/1.3/CRS84"], + description="the list of coordinate reference systems supported by the service", + ) + + class Config: + json_schema_extra = { + "example": { + "id": "address", + "title": "address", + "description": "An address.", + "links": [ + {"href": "http://data.example.com/buildings", "rel": "item"}, + { + "href": "http://example.com/concepts/buildings.html", + "rel": "describedby", + "type": "text/html", + }, + ], + "crs": [ + "http://www.opengis.net/def/crs/OGC/1.3/CRS84", + "http://www.opengis.net/def/crs/EPSG/0/4326", + ], + } + } + + +class Collections(BaseModel): + links: List[Link] + collections: List[Collection] + + class Config: + json_schema_extra = { + "example": { + "links": [ + {"href": "http://data.example.com/collections", "rel": "self"}, + {"href": "http://data.example.com/", "rel": "parent"}, + ], + "collections": [ + { + "id": "address", + "title": "address", + "description": "An address.", + "links": [ + { + "href": "http://data.example.com/collections/address", + "rel": "item", + }, + { + "href": "http://example.com/concepts/address.html", + "rel": "describedby", + "type": "text/html", + }, + ], + } + ], + } + } + + +######################################################################################################################## +# Queryables + +from pydantic import BaseModel, Field, AnyUrl +from typing import Optional, List, Union, Literal +from enum import Enum + + +class GeometryType(str, Enum): + POINT = "Point" + LINESTRING = "LineString" + POLYGON = "Polygon" + MULTIPOINT = "MultiPoint" + MULTILINESTRING = "MultiLineString" + MULTIPOLYGON = "MultiPolygon" + GEOMETRYCOLLECTION = "GeometryCollection" + + +class QueryableProperty(BaseModel): + title: Optional[str] = Field( + None, description="Human readable title of the queryable" + ) + description: Optional[str] = Field(None, description="Description of the queryable") + type: Optional[str] = Field( + default="string", description="Data type of the queryable" + ) + minLength: Optional[int] = Field( + None, description="Minimum length for string properties" + ) + maxLength: Optional[int] = Field( + None, description="Maximum length for string properties" + ) + enum: Optional[List[Union[str, int]]] = Field(None, description="Enumerated values") + pattern: Optional[str] = Field( + None, description="Regex pattern for string properties" + ) + multipleOf: Optional[float] = Field( + None, description="Multiple of for numeric properties" + ) + minimum: Optional[float] = Field( + None, description="Minimum value for numeric properties" + ) + exclusiveMinimum: Optional[float] = Field( + None, description="Exclusive minimum for numeric properties" + ) + maximum: Optional[float] = Field( + None, description="Maximum value for numeric properties" + ) + exclusiveMaximum: Optional[float] = Field( + None, description="Exclusive maximum for numeric properties" + ) + format: Optional[Literal["date-time", "date", "time", "duration"]] = Field( + None, description="Format for temporal properties" + ) + items: Optional[Union[List[str], List[int]]] = Field( + None, description="Items for array properties" + ) + + +class SpatialQueryableProperty(QueryableProperty): + type: Literal["object"] = "object" + geometryType: GeometryType = Field(..., description="Type of geometry") + schema: AnyUrl = Field( + ..., description="URL to the GeoJSON schema for the geometry type" + ) + + +class Queryables(BaseModel): + schema: Literal[ + "https://json-schema.org/draft/2019-09/schema", + "http://json-schema.org/draft-07/schema#", + ] = Field(default="https://json-schema.org/draft/2019-09/schema", alias="$schema") + id: str = Field( + ..., alias="$id", description="URI of the resource without query parameters" + ) + type: Literal["object"] = "object" + title: Optional[str] = Field(None, description="Title of the schema") + description: Optional[str] = Field(None, description="Description of the schema") + properties: dict[str, Union[QueryableProperty, SpatialQueryableProperty]] = Field( + ..., description="Queryable properties" + ) + + +######################################################################################################################## +# generate link headers + + +def generate_link_header(links: List[Link]) -> str: + header_links = [] + for link in links: + header_link = f'<{link.href}>; rel="{link.rel}"; type="{link.type}"' + if link.title is not None: + header_link += f'; title="{link.title}"' + header_links.append(header_link) + return ", ".join(header_links) diff --git a/prez/models/query_params.py b/prez/models/query_params.py index efd032d3..89ba0e41 100755 --- a/prez/models/query_params.py +++ b/prez/models/query_params.py @@ -1,77 +1,179 @@ import json -from typing import Optional +from datetime import datetime +from typing import Optional, List, Tuple, Union -from fastapi import HTTPException, Query +from fastapi import HTTPException, Query, Depends -# TODO auto generate allowed mediatypes based on mediatypes referenced in profiles -ALLOWED_MEDIA_TYPES = { - "application/sparql-query", - "application/ld+json", - "application/anot+ld+json", - "application/rdf+xml", - "text/turtle", - "text/anot+turtle", - "application/n-triples", - "application/anot+n-triples", -} +from prez.enums import FilterLangEnum, OrderByDirectionEnum + +DateTimeOrUnbounded = Union[datetime, str, None] + + +def reformat_bbox( + bbox: List[str] = Query( + default=[], # Australia + description="Bounding box coordinates", + alias="bbox", + openapi_extra={ + "name": "bbox", + "in": "query", + "required": False, + "schema": { + "type": "array", + "oneOf": [ + {"minItems": 4, "maxItems": 4}, + {"minItems": 6, "maxItems": 6}, + ], + "items": {"type": "number"}, + }, + "style": "form", + "explode": False, + }, + example=["113.338953078, -43.6345972634, 153.569469029, -10.6681857235"], + ) +) -> List[float]: + if bbox: + return [float(x) for x in bbox[0].split(",")] + return None + + +def parse_datetime( + datetime_str: str, +) -> Tuple[DateTimeOrUnbounded, DateTimeOrUnbounded]: + def normalize_and_parse(part: str) -> DateTimeOrUnbounded: + if not part: + return None + if part == "..": + return ".." + normalized = part.replace("t", "T").replace("z", "Z") + return datetime.fromisoformat(normalized) + + parts = datetime_str.split("/") + if len(parts) == 1: + return normalize_and_parse(parts[0]), None + elif len(parts) == 2: + start = normalize_and_parse(parts[0]) + end = normalize_and_parse(parts[1]) + if start == ".." and end == "..": + raise ValueError("Both parts of the interval cannot be open") + return start, end + else: + raise ValueError("Invalid datetime format") + + +def validate_datetime( + datetime: Optional[str] = Query( + None, + description=""" Either a date-time or an interval. Date and time expressions adhere to RFC 3339. + Intervals may be bounded or half-bounded (double-dots at start or end). + + Temporal geometries are either a date-time value or a time interval. The parameter value SHALL conform to the following syntax (using ABNF): + + interval-bounded = date-time "/" date-time + interval-half-bounded-start = [".."] "/" date-time + interval-half-bounded-end = date-time "/" [".."] + interval = interval-closed / interval-half-bounded-start / interval-half-bounded-end + datetime = date-time / interval + + Examples: + * A date-time: "2018-02-12T23:20:50Z" + * A bounded interval: "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z" + * Half-bounded intervals: "2018-02-12T00:00:00Z/.." or "../2018-03-18T12:31:12Z" + + Only features that have a temporal property that intersects the value of + `datetime` are selected. + + If a feature has multiple temporal properties, it is the decision of the + server whether only a single temporal property is used to determine + the extent or all relevant temporal properties.""", + alias="datetime", + openapi_extra={ + "name": "datetime", + "in": "query", + "required": False, + "schema": { + "type": "string", + "examples": [ + "2018-02-12T23:20:50Z", + "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z", + "2018-02-12T00:00:00Z/..", + "../2018-03-18T12:31:12Z", + ], + }, + "style": "form", + "explode": False, + }, + ) +) -> Optional[tuple]: + if datetime: + try: + return parse_datetime(datetime) + except ValueError as e: + raise ValueError(f"Invalid datetime format: {str(e)}") + return None class QueryParams: """ Not using Pydantic as cannot pass descriptions through to OpenAPI docs when using Pydantic. See: https://stackoverflow.com/a/64366434/15371702 + + For bbox, require workaround as Pydantic does not support lists of query parameters in the form ?bbox=1,2,3,4 + https://github.com/fastapi/fastapi/issues/2500 """ def __init__( self, - mediatype: Optional[str] = Query( - "text/anot+turtle", alias="_mediatype", description="Requested mediatype" + mediatype: str = Query( + default="text/turtle", alias="_mediatype", description="Requested mediatype" ), - profile: Optional[str] = Query( - None, alias="_profile", description="Requested profile" + page: int = Query( + default=1, ge=1, description="Page number, must be greater than 0" ), - page: Optional[int] = Query( - 1, ge=1, description="Page number, must be greater than 0" - ), - per_page: Optional[int] = Query( - 20, + limit: int = Query( + default=10, ge=1, - le=100, - description="Number of items per page, must be greater than 0", + le=10000, + description="Number of items per page, must be 1<=limit<=10000", + alias="limit", + style="form", + explode=False, ), - q: Optional[str] = Query( - None, description="Search query", example="building" + datetime: Optional[tuple] = Depends(validate_datetime), + bbox: List[float] = Depends(reformat_bbox), + filter_lang: FilterLangEnum = Query( + default="cql2-json", + description="Language of the filter expression", + alias="filter-lang", ), - filter: Optional[str] = Query( - None, - description="CQL JSON expression.", + filter_crs: str = Query( + "http://www.opengis.net/def/crs/OGC/1.3/CRS84", + description="CRS used for the filter expression", ), - order_by: Optional[str] = Query( - None, description="Optional: Field to order by" + q: str = Query(None, description="Search query", example="building"), + filter: str = Query( + default=None, + description="CQL JSON expression.", ), - order_by_direction: Optional[str] = Query( - None, - regex="^(ASC|DESC)$", + order_by: str = Query(default=None, description="Optional: Field to order by"), + order_by_direction: OrderByDirectionEnum = Query( + default=None, description="Optional: Order direction, must be 'ASC' or 'DESC'", ), ): self.q = q self.page = page - self.per_page = per_page + self.limit = limit + self.bbox = bbox + self.filter_lang = filter_lang + self.filter_crs = filter_crs + self.datetime = datetime self.order_by = order_by self.order_by_direction = order_by_direction self.filter = filter self.mediatype = mediatype - self.profile = profile - self.validate_mediatype() self.validate_filter() - def validate_mediatype(self): - if self.mediatype and self.mediatype not in ALLOWED_MEDIA_TYPES: - raise HTTPException( - status_code=400, detail=f"Invalid media type: {self.mediatype}" - ) - def validate_filter(self): if self.filter: try: diff --git a/prez/reference_data/cql/bounded_temporal_interval_relation_matrix.json b/prez/reference_data/cql/bounded_temporal_interval_relation_matrix.json new file mode 100644 index 00000000..3506dfd7 --- /dev/null +++ b/prez/reference_data/cql/bounded_temporal_interval_relation_matrix.json @@ -0,0 +1,1284 @@ +{ + "t_after": { + "definition": [ + "https://www.w3.org/TR/owl-time/#time:after", + ",https://www.w3.org/TR/owl-time/#time:intervalAfter" + ], + "interval_interval": { + "BB_BB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BU_BU": false, + "BU_UB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + }, + "instant_interval": { + "I_BB": { + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ] + ] + }, + "I_BU": false, + "I_UB": { + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ] + ] + }, + "I_UU": false + }, + "interval_instant": { + "BB_I": { + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ] + ] + }, + "BU_I": { + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ] + ] + }, + "UB_I": false, + "UU_I": false + }, + "instant_instant": { + "I_I": { + "conditions": [ + [ + "t1_instant", + ">", + "t2_instant" + ] + ] + } + } + }, + "t_before": { + "definition": [ + "https://www.w3.org/TR/owl-time/#time:before", + "https://www.w3.org/TR/owl-time/#time:intervalBefore" + ], + "interval_interval": { + "BB_BB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "BB_BU": false, + "BB_UB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "BU_BU": false, + "BU_UB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + }, + "instant_interval": { + "I_BB": { + "conditions": [ + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_BU": false, + "I_UB": { + "conditions": [ + [ + "t1_instant", + "<", + "t2_end" + ] + ] + }, + "I_UU": false + }, + "interval_instant": { + "BB_I": false, + "BU_I": false, + "UB_I": { + "conditions": [ + [ + "t1_start", + "<", + "t2_instant" + ] + ] + }, + "UU_I": { + "conditions": [ + [ + "t1_start", + "<", + "t2_instant" + ] + ] + } + }, + "instant_instant": { + "I_I": { + "conditions": [ + [ + "t1_instant", + "<", + "t2_instant" + ] + ] + } + } + }, + "t_disjoint": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalDisjoint", + "interval_interval": { + "BB_BB": { + "logic": "OR", + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ], + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BB_BU": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "BB_UB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BU_BU": false, + "BU_UB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BU_UU": false, + "UB_BB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "UB_BU": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + }, + "instant_interval": { + "I_BB": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ], + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_BU": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ], + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_UB": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ], + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_UU": false + }, + "interval_instant": { + "BB_I": { + "logic": "OR", + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ], + [ + "t1_end", + "<", + "t2_instant" + ] + ] + }, + "BU_I": { + "logic": "OR", + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ], + [ + "t1_end", + "<", + "t2_instant" + ] + ] + }, + "UB_I": { + "logic": "OR", + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ], + [ + "t1_end", + "<", + "t2_instant" + ] + ] + }, + "UU_I": false + }, + "instant_instant": { + "I_I": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_instant" + ], + [ + "t1_instant", + "<", + "t2_instant" + ] + ] + } + } + }, + "t_equals": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalEquals", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "=", + "t2_start" + ], + [ + "t1_end", + "=", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": false, + "BB_UU": false, + "BU_BB": false, + "BU_BU": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "=", + "t2_start" + ] + ] + }, + "BU_UB": false, + "BU_UU": false, + "UB_BB": false, + "UB_BU": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "=", + "t2_start" + ] + ] + }, + "UB_UB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "=", + "t2_start" + ] + ] + }, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": true + }, + "instant_instant": { + "I_I": { + "logic": "AND", + "conditions": [ + [ + "t1_instant", + "=", + "t2_instant" + ] + ] + } + }, + "instant_interval": { + "I_BB": false, + "I_BU": false, + "I_UB": false, + "I_UU": false + }, + "interval_instant": { + "BB_I": false, + "BU_I": false, + "UB_I": false, + "UU_I": false + } + }, + "t_intersects": { + "negated": true, + "definition": "https://www.w3.org/TR/owl-time/#time:intervalDisjoint", + "interval_interval": { + "BB_BB": { + "logic": "OR", + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ], + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BB_BU": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "BB_UB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BU_BU": false, + "BU_UB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_end" + ] + ] + }, + "BU_UU": false, + "UB_BB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "UB_BU": { + "conditions": [ + [ + "t1_end", + "<", + "t2_start" + ] + ] + }, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + }, + "instant_interval": { + "I_BB": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ], + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_BU": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ], + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_UB": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_end" + ], + [ + "t1_instant", + "<", + "t2_start" + ] + ] + }, + "I_UU": false + }, + "interval_instant": { + "BB_I": { + "logic": "OR", + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ], + [ + "t1_end", + "<", + "t2_instant" + ] + ] + }, + "BU_I": { + "logic": "OR", + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ], + [ + "t1_end", + "<", + "t2_instant" + ] + ] + }, + "UB_I": { + "logic": "OR", + "conditions": [ + [ + "t1_start", + ">", + "t2_instant" + ], + [ + "t1_end", + "<", + "t2_instant" + ] + ] + }, + "UU_I": false + }, + "instant_instant": { + "I_I": { + "logic": "OR", + "conditions": [ + [ + "t1_instant", + ">", + "t2_instant" + ], + [ + "t1_instant", + "<", + "t2_instant" + ] + ] + } + } + }, + "t_contains": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalContains", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ], + [ + "t1_end", + ">", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": false, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ] + ] + }, + "BU_BU": { + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ] + ] + }, + "BU_UB": false, + "BU_UU": false, + "UB_BB": { + "conditions": [ + [ + "t1_end", + ">", + "t2_end" + ] + ] + }, + "UB_BU": false, + "UB_UB": { + "logic": "AND", + "conditions": [ + [ + "t1_end", + ">", + "t2_end" + ] + ] + }, + "UB_UU": false, + "UU_BB": true, + "UU_BU": true, + "UU_UB": true, + "UU_UU": false + } + }, + "t_during": { + "definition": "Logical inverse of https://www.w3.org/TR/owl-time/#time:intervalContains", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ], + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "BB_BU": true, + "BB_UB": true, + "BB_UU": true, + "BU_BB": { + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ] + ] + }, + "BU_BU": { + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ] + ] + }, + "BU_UB": true, + "BU_UU": true, + "UB_BB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "UB_BU": true, + "UB_UB": { + "conditions": [ + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "UB_UU": true, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_finishes": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalFinishes", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ], + [ + "t1_end", + "=", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": { + "conditions": [ + [ + "t1_end", + "=", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": false, + "BU_BU": false, + "BU_UB": false, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_finishedBy": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalFinishedBy", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ], + [ + "t1_end", + "=", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": false, + "BB_UU": false, + "BU_BB": false, + "BU_BU": false, + "BU_UB": false, + "BU_UU": false, + "UB_BB": { + "conditions": [ + [ + "t1_end", + "=", + "t2_end" + ] + ] + }, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_meets": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalMeets", + "interval_interval": { + "BB_BB": { + "conditions": [ + [ + "t1_end", + "=", + "t2_start" + ] + ] + }, + "BB_BU": { + "conditions": [ + [ + "t1_end", + "=", + "t2_start" + ] + ] + }, + "BB_UB": false, + "BB_UU": false, + "BU_BB": false, + "BU_BU": false, + "BU_UB": false, + "BU_UU": false, + "UB_BB": { + "conditions": [ + [ + "t1_end", + "=", + "t2_start" + ] + ] + }, + "UB_BU": { + "conditions": [ + [ + "t1_end", + "=", + "t2_start" + ] + ] + }, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_metBy": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalMetBy", + "interval_interval": { + "BB_BB": { + "conditions": [ + [ + "t1_start", + "=", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": { + "conditions": [ + [ + "t1_start", + "=", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_start", + "=", + "t2_end" + ] + ] + }, + "BU_BU": false, + "BU_UB": { + "conditions": [ + [ + "t1_start", + "=", + "t2_end" + ] + ] + }, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_overlappedBy": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalOverlappedBy", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ], + [ + "t1_start", + "<", + "t2_end" + ], + [ + "t1_end", + ">", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_end" + ], + [ + "t1_end", + ">", + "t2_end" + ] + ] + }, + "BB_UU": false, + "BU_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ], + [ + "t1_start", + "<", + "t2_end" + ] + ] + }, + "BU_BU": { + "conditions": [ + [ + "t1_start", + ">", + "t2_start" + ] + ] + }, + "BU_UB": false, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_overlaps": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalOverlaps", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ], + [ + "t1_end", + ">", + "t2_start" + ], + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "BB_BU": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ], + [ + "t1_end", + ">", + "t2_start" + ] + ] + }, + "BB_UB": false, + "BB_UU": false, + "BU_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ], + [ + "t1_end", + ">", + "t2_start" + ] + ] + }, + "BU_BU": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "<", + "t2_start" + ], + [ + "t1_end", + ">", + "t2_start" + ] + ] + }, + "BU_UB": false, + "BU_UU": false, + "UB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_end", + ">", + "t2_start" + ], + [ + "t1_end", + "<", + "t2_end" + ] + ] + }, + "UB_BU": { + "conditions": [ + [ + "t1_end", + ">", + "t2_start" + ] + ] + }, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_startedBy": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalStartedBy", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + [ + "t1_start", + "=", + "t2_start" + ], + [ + "t1_end", + ">", + "t2_end" + ] + ] + }, + "BB_BU": false, + "BB_UB": false, + "BB_UU": false, + "BU_BB": { + "conditions": [ + [ + "t1_start", + "=", + "t2_start" + ] + ] + }, + "BU_BU": false, + "BU_UB": false, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + }, + "t_starts": { + "definition": "https://www.w3.org/TR/owl-time/#time:intervalStarts", + "interval_interval": { + "BB_BB": { + "logic": "AND", + "conditions": [ + ["t1_start", "=", "t2_start"], + ["t1_end", "<", "t2_end"] + ] + }, + "BB_BU": { + "conditions": [ + ["t1_start", "=", "t2_start"] + ] + }, + "BB_UB": false, + "BB_UU": false, + "BU_BB": false, + "BU_BU": false, + "BU_UB": false, + "BU_UU": false, + "UB_BB": false, + "UB_BU": false, + "UB_UB": false, + "UB_UU": false, + "UU_BB": false, + "UU_BU": false, + "UU_UB": false, + "UU_UU": false + } + } +} \ No newline at end of file diff --git a/prez/reference_data/cql/bounded_temporal_interval_relation_matrix.schema.json b/prez/reference_data/cql/bounded_temporal_interval_relation_matrix.schema.json new file mode 100644 index 00000000..b386105e --- /dev/null +++ b/prez/reference_data/cql/bounded_temporal_interval_relation_matrix.schema.json @@ -0,0 +1,93 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "description": "A collection of temporal interval relations and their definitions", + "patternProperties": { + "^T_[A-Z_]+$": { + "type": "object", + "description": "A temporal interval relation", + "properties": { + "definition": { + "type": "string", + "format": "uri", + "description": "A URL linking to the formal definition of the temporal relation" + }, + "interval_interval": { + "$ref": "#/$defs/comparisonSet" + }, + "instant_interval": { + "$ref": "#/$defs/comparisonSet" + }, + "interval_instant": { + "$ref": "#/$defs/comparisonSet" + }, + "instant_instant": { + "$ref": "#/$defs/comparisonSet" + } + }, + "required": ["definition", "interval_interval"], + "additionalProperties": false + } + }, + "additionalProperties": false, + "$defs": { + "temporalOperand": { + "type": "string", + "enum": ["t1_start", "t1_end", "t2_start", "t2_end", "t1_instant", "t2_instant"], + "description": "A temporal operand representing a start, end, or instant of an interval" + }, + "comparisonOperator": { + "type": "string", + "enum": ["<", ">", "="], + "description": "An operator for comparing temporal operands" + }, + "condition": { + "type": "array", + "description": "A single condition in the form [left_operand, operator, right_operand]", + "items": [ + { "$ref": "#/$defs/temporalOperand" }, + { "$ref": "#/$defs/comparisonOperator" }, + { "$ref": "#/$defs/temporalOperand" } + ], + "minItems": 3, + "maxItems": 3 + }, + "conditionSet": { + "type": "object", + "properties": { + "logic": { + "type": "string", + "enum": ["AND", "OR"], + "description": "The logical relationship between the conditions" + }, + "conditions": { + "type": "array", + "items": { + "$ref": "#/$defs/condition" + }, + "minItems": 1 + } + }, + "required": ["conditions"], + "additionalProperties": false + }, + "comparisonSet": { + "type": "object", + "patternProperties": { + "^[BUI]{1,2}_[BUI]{1,2}$": { + "oneOf": [ + { + "$ref": "#/$defs/conditionSet" + }, + { + "type": "boolean", + "enum": [false], + "description": "Indicates that the relation is impossible for this combination of bounded/unbounded intervals or instants" + } + ] + } + }, + "additionalProperties": false + } + } +} \ No newline at end of file diff --git a/prez/reference_data/cql/default_context.json b/prez/reference_data/cql/default_context.json index 4ca742ca..c720041f 100644 --- a/prez/reference_data/cql/default_context.json +++ b/prez/reference_data/cql/default_context.json @@ -20,5 +20,17 @@ }, "type": { "@id": "sf:type" + }, + "date": { + "@id": "cql:date" + }, + "datetime": { + "@id": "cql:datetime" + }, + "timestamp": { + "@id": "cql:timestamp" + }, + "interval": { + "@id": "cql:interval" } } diff --git a/prez/reference_data/cql/geo_function_mapping.py b/prez/reference_data/cql/geo_function_mapping.py index 23c4be3e..b30650ac 100755 --- a/prez/reference_data/cql/geo_function_mapping.py +++ b/prez/reference_data/cql/geo_function_mapping.py @@ -1,12 +1,4 @@ from rdflib import Namespace -from shapely import ( - Polygon, - MultiPolygon, - Point, - MultiPoint, - LineString, - MultiLineString, -) GEOF = Namespace("http://www.opengis.net/def/function/geosparql/") @@ -20,12 +12,3 @@ "s_touches": GEOF.sfTouches, "s_crosses": GEOF.sfCrosses, } - -cql_to_shapely_mapping = { - "Polygon": Polygon, - "MultiPolygon": MultiPolygon, - "Point": Point, - "MultiPoint": MultiPoint, - "LineString": LineString, - "MultiLineString": MultiLineString, -} diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index 71a3cc3b..6ae6d6dc 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -59,11 +59,6 @@ ogce:item-object ont:relevantShapes ex:Feature , ex:ConceptSchemeConcept , ex:CollectionConcept , ex:Resource ; . -ogce:cql-queryables - a ont:ListingEndpoint ; - ont:relevantShapes ex:queryables ; -. - ogce:search a ont:ListingEndpoint ; ont:relevantShapes ex:Search ; diff --git a/prez/reference_data/endpoints/endpoint_nodeshapes.ttl b/prez/reference_data/endpoints/endpoint_nodeshapes.ttl index c72b9885..33afc14d 100644 --- a/prez/reference_data/endpoints/endpoint_nodeshapes.ttl +++ b/prez/reference_data/endpoints/endpoint_nodeshapes.ttl @@ -19,33 +19,21 @@ ex:Catalogs sh:property [ sh:path dcterms:hasPart ; sh:or ( - [ sh:class dcat:Resource ] - [ sh:class geo:FeatureCollection ] [ sh:class skos:ConceptScheme ] [ sh:class skos:Collection ] + [ sh:class dcat:Dataset ] ) ; ] . ex:Collections a sh:NodeShape ; ont:hierarchyLevel 2 ; - sh:targetClass geo:FeatureCollection , skos:ConceptScheme , skos:Collection , dcat:Resource ; + sh:targetClass skos:ConceptScheme , skos:Collection , dcat:Dataset ; sh:property [ sh:path [ sh:inversePath dcterms:hasPart ] ; sh:class dcat:Catalog ; ] . -ex:Feature - a sh:NodeShape ; - ont:hierarchyLevel 3 ; - sh:targetClass geo:Feature ; - sh:property [ - sh:path [ sh:inversePath rdfs:member ] ; - sh:class geo:FeatureCollection ; - ] , [ - sh:path ( [sh:inversePath rdfs:member ] [ sh:inversePath dcterms:hasPart ] ); - sh:class dcat:Catalog ; - ] . ex:ConceptSchemeConcept a sh:NodeShape ; @@ -74,10 +62,10 @@ ex:CollectionConcept ex:Resource a sh:NodeShape ; ont:hierarchyLevel 3 ; - sh:targetClass rdf:Resource ; + sh:targetClass geo:FeatureCollection ; sh:property [ sh:path [ sh:inversePath dcterms:hasPart ] ; - sh:class dcat:Resource ; + sh:class dcat:Dataset ; ] , [ sh:path ( [ sh:inversePath dcterms:hasPart ] [ sh:inversePath dcterms:hasPart ] ); sh:class dcat:Catalog ; @@ -89,32 +77,6 @@ ex:Profiles sh:targetClass prof:Profile ; . -ex:queryables a sh:NodeShape ; - ont:hierarchyLevel 1 ; - sh:rule [ sh:subject "?focus_node" ; - sh:predicate ; - sh:object ] ; - ont:deliversClasses prez:QueryablesList ; - sh:target [ sh:select """SELECT DISTINCT ?focus_node - WHERE { - ?s a ?class ; - ?focus_node ?o . - VALUES ?class { - dcat:Catalog - dcat:Dataset - dcat:Resource - skos:ConceptScheme - skos:Collection - skos:Concept - geo:FeatureCollection - geo:Feature - rdf:Resource - } - }""" ] ; - shext:limit 100 ; - shext:offset 0 ; -. - ex:AltProfilesForListing a sh:NodeShape ; ont:hierarchyLevel 1 ; diff --git a/prez/reference_data/endpoints/features_metadata.ttl b/prez/reference_data/endpoints/features_metadata.ttl new file mode 100644 index 00000000..2765da65 --- /dev/null +++ b/prez/reference_data/endpoints/features_metadata.ttl @@ -0,0 +1,36 @@ +@prefix ex: . +@prefix ogce: . +@prefix ogcfeat: . +@prefix ont: . +@prefix prez: . +@prefix sys: . + +ogcfeat:feature-collections + a ont:ListingEndpoint ; + ont:relevantShapes ex:FeatureCollections ; +. + +ogcfeat:feature-collection + a ont:ObjectEndpoint ; + ont:relevantShapes ex:FeatureCollections ; +. + +ogcfeat:features + a ont:ListingEndpoint ; + ont:relevantShapes ex:Feature ; +. + +ogcfeat:feature + a ont:ObjectEndpoint ; + ont:relevantShapes ex:Feature ; +. + +ogcfeat:queryables-global + a ont:ListingEndpoint ; + ont:relevantShapes ex:FeatureCollections ; +. + +ogcfeat:queryables-local + a ont:ListingEndpoint ; + ont:relevantShapes ex:Feature ; +. \ No newline at end of file diff --git a/prez/reference_data/endpoints/features_nodeshapes.ttl b/prez/reference_data/endpoints/features_nodeshapes.ttl new file mode 100644 index 00000000..194de77f --- /dev/null +++ b/prez/reference_data/endpoints/features_nodeshapes.ttl @@ -0,0 +1,37 @@ +@prefix void: . +@prefix dcat: . +@prefix dcterms: . +@prefix ex: . +@prefix geo: . +@prefix ont: . +@prefix prez: . +@prefix rdfs: . +@prefix sh: . +@prefix xsd: . +@prefix skos: . + +ex:FeatureCollections + a sh:NodeShape ; + sh:property [ sh:path void:inDataset ; + sh:class dcat:Dataset ; ] ; + sh:targetClass geo:FeatureCollection ; + ont:hierarchyLevel 3 . + +ex:Feature + a sh:NodeShape ; + sh:property [ sh:class geo:FeatureCollection ; + sh:path [ sh:inversePath rdfs:member ] ] ; + sh:property [ sh:class dcat:Dataset ; + sh:path ( [ sh:inversePath rdfs:member ] void:inDataset ) ] ; + sh:targetClass geo:Feature ; + ont:hierarchyLevel 4 . + +ex:Object + a sh:NodeShape ; + ont:hierarchyLevel 3 . + +ex:Queryables + a sh:NodeShape ; + sh:targetClass prez:Queryable ; + ont:hierarchyLevel 3 , 5 +. diff --git a/prez/reference_data/prez_ns.py b/prez/reference_data/prez_ns.py index 6856aadf..cc49e919 100755 --- a/prez/reference_data/prez_ns.py +++ b/prez/reference_data/prez_ns.py @@ -7,3 +7,4 @@ EP = Namespace("https://prez.dev/endpoint/") SHEXT = Namespace("http://example.com/shacl-extension#") OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) +OGCFEAT = Namespace("http://www.opengis.net/ogcapi-features-1/1.0/") diff --git a/prez/reference_data/profiles/ogc_features.ttl b/prez/reference_data/profiles/ogc_features.ttl new file mode 100644 index 00000000..4ae80fd7 --- /dev/null +++ b/prez/reference_data/profiles/ogc_features.ttl @@ -0,0 +1,95 @@ +@prefix altr-ext: . +@prefix dcterms: . +@prefix geo: . +@prefix prez: . +@prefix prof: . +@prefix rdf: . +@prefix rdfs: . +@prefix sh: . +@prefix shext: . +@prefix xsd: . + +prez:OGCFeaturesProfile a prof:Profile ; + dcterms:description "A system profile for OGC Features conformant API" ; + dcterms:identifier "ogcfeat"^^xsd:token ; + dcterms:title "OGC Features Profile" ; + altr-ext:constrainsClass prez:CatPrez ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasNodeShape + [ + a sh:NodeShape ; + altr-ext:hasDefaultProfile prez:OGCFeaturesMinimalProps ; + sh:targetClass + geo:FeatureCollection, + prez:CQLObjectList, + prez:SearchResult + ], + [ + a sh:NodeShape ; + altr-ext:hasDefaultProfile prez:OGCFeaturesAllProps ; + sh:targetClass + geo:Feature , + rdf:Resource + ] ; + altr-ext:hasResourceFormat + "application/json", + "application/ld+json", + "application/rdf+xml", + "text/anot+turtle", + "text/turtle" . + +prez:OGCFeaturesMinimalProps a + prof:Profile, + sh:NodeShape, + prez:ListingProfile , + prez:ObjectProfile ; + dcterms:description "A profile showing only the classes of items" ; + dcterms:identifier "ogcfeat-minimal"^^xsd:token ; + dcterms:title "OGC Features Minimal Profile" ; + altr-ext:constrainsClass + geo:Feature, + geo:FeatureCollection, + rdf:Resource, + prof:Profile , + prez:Queryable ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat + "application/anot+ld+json", + "application/ld+json", + "application/n-triples", + "application/rdf+xml", + "text/anot+turtle", + "text/turtle" ; + sh:property + [ sh:path rdf:type ] ; +. + +prez:OGCFeaturesAllProps a + prof:Profile, + sh:NodeShape, + prez:ListingProfile , + prez:ObjectProfile ; + dcterms:description "A profile returning all properties and blank nodes to a depth of two." ; + dcterms:identifier "ogcfeat-all"^^xsd:token ; + dcterms:title "OGC Features All Properties Profile" ; + altr-ext:constrainsClass + geo:Feature, + geo:FeatureCollection, + rdf:Resource, + prof:Profile, + prez:CQLObjectList, + prez:SearchResult ; + altr-ext:hasDefaultResourceFormat "application/geo+json" ; + altr-ext:hasResourceFormat + "application/geo+json", + "application/anot+ld+json", + "application/ld+json", + "application/rdf+xml", + "text/anot+turtle", + "text/turtle" ; + sh:property [ sh:path shext:allPredicateValues ] ; + shext:bnode-depth 2 ; +. + + + diff --git a/prez/reference_data/profiles/ogc_records_profile.ttl b/prez/reference_data/profiles/ogc_records_profile.ttl index 81f7eafd..072c29af 100644 --- a/prez/reference_data/profiles/ogc_records_profile.ttl +++ b/prez/reference_data/profiles/ogc_records_profile.ttl @@ -1,7 +1,6 @@ PREFIX altr-ext: PREFIX dcat: PREFIX dcterms: -PREFIX geo: PREFIX owl: PREFIX prez: PREFIX prof: @@ -24,135 +23,154 @@ prez:OGCRecordsProfile altr-ext:constrainsClass prez:CatPrez ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:hasResourceFormat - "application/json" , - "application/ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , + "application/json", + "application/ld+json", + "application/rdf+xml", + "text/anot+turtle", "text/turtle" ; - altr-ext:hasNodeShape [ - a sh:NodeShape ; - sh:targetClass dcat:Catalog , dcat:Resource , skos:Concept , geo:Feature , geo:FeatureCollection - , skos:Collection , rdf:Resource , prez:SearchResult , prez:CQLObjectList ; - altr-ext:hasDefaultProfile prez:OGCListingProfile - ] , [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:hasDefaultProfile prez:OGCSchemesListProfile - ] , [ - a sh:NodeShape ; - sh:targetClass skos:ConceptScheme ; - altr-ext:hasDefaultProfile prez:OGCSchemesObjectProfile - ] , [ - a sh:NodeShape ; - sh:targetClass dcat:Catalog , dcat:Resource , skos:Concept , geo:Feature , geo:FeatureCollection - , skos:Collection , rdf:Resource ; - altr-ext:hasDefaultProfile prez:OGCItemProfile - ] - . + altr-ext:hasNodeShape + [ + a sh:NodeShape ; + sh:targetClass + dcat:Catalog, + dcat:Resource, + skos:Concept + , + skos:Collection, + rdf:Resource, + prez:SearchResult, + prez:CQLObjectList ; + altr-ext:hasDefaultProfile prez:OGCListingProfile + ], + [ + a sh:NodeShape ; + sh:targetClass skos:ConceptScheme ; + altr-ext:hasDefaultProfile prez:OGCSchemesListProfile + ], + [ + a sh:NodeShape ; + sh:targetClass skos:ConceptScheme ; + altr-ext:hasDefaultProfile prez:OGCSchemesObjectProfile + ], + [ + a sh:NodeShape ; + sh:targetClass + dcat:Catalog, + dcat:Resource, + skos:Concept + , + skos:Collection, + rdf:Resource ; + altr-ext:hasDefaultProfile prez:OGCItemProfile + ] +. prez:OGCListingProfile - a prof:Profile , prez:ListingProfile , sh:NodeShape ; + a prof:Profile, prez:ListingProfile, sh:NodeShape ; dcterms:identifier "ogc-listing"^^xsd:token ; dcterms:title "OGC Listing Profile" ; dcterms:description "A profile for listing different kinds of items relevant to an OGC Records API" ; altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , + "application/ld+json", + "application/anot+ld+json", + "application/rdf+xml", + "text/anot+turtle", "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - altr-ext:constrainsClass dcat:Catalog , skos:Collection , geo:Feature , geo:FeatureCollection , skos:Concept , - dcat:Resource , prof:Profile , prez:SearchResult , prez:CQLObjectList , rdf:Resource ; + altr-ext:constrainsClass + dcat:Catalog, + skos:Collection, + skos:Concept, + dcat:Resource, + prof:Profile, + prez:SearchResult, + prez:CQLObjectList, + rdf:Resource ; sh:property [ sh:path rdf:type ] - . +. prez:OGCItemProfile - a prof:Profile , prez:ObjectProfile , sh:NodeShape ; + a prof:Profile, prez:ObjectProfile, sh:NodeShape ; dcterms:title "OGC Object Profile" ; dcterms:description "A profile for individual OGC Records API items" ; dcterms:identifier "ogc-item"^^xsd:token ; altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , - "text/turtle" , - "application/n-triples"; + "application/ld+json", + "application/anot+ld+json", + "application/rdf+xml", + "text/anot+turtle", + "text/turtle", + "application/n-triples" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; - sh:property [ - sh:path shext:allPredicateValues ; - ] , - [ - sh:maxCount 0 ; - sh:path ( - sh:union ( - dcterms:hasPart - rdfs:member - ) - ) - ] ; + sh:property + [ + sh:path shext:allPredicateValues ; + ], + [ + sh:maxCount 0 ; + sh:path dcterms:hasPart , rdfs:member ; + ] ; shext:bnode-depth 2 ; - altr-ext:constrainsClass dcat:Catalog , - dcat:Resource , - skos:ConceptScheme, - skos:Collection , - skos:Concept , - geo:FeatureCollection , - geo:Feature , - rdf:Resource , - prof:Profile ; - . + altr-ext:constrainsClass + dcat:Dataset, + dcat:Catalog, + dcat:Resource, + skos:ConceptScheme, + skos:Collection, + skos:Concept, + rdf:Resource, + prof:Profile ; +. prez:OGCSchemesListProfile - a prof:Profile , prez:ListingProfile , sh:NodeShape ; + a prof:Profile, prez:ListingProfile, sh:NodeShape ; dcterms:title "OGC Concept Scheme Listing Profile" ; dcterms:description "A profile for listing SKOS Concept Schemes" ; dcterms:identifier "ogc-schemes-listing"^^xsd:token ; altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , + "application/ld+json", + "application/anot+ld+json", + "application/rdf+xml", + "text/anot+turtle", "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:constrainsClass skos:ConceptScheme ; sh:property [ - sh:minCount 0 ; sh:path ( sh:union ( - dcterms:publisher - reg:status - ( prov:qualifiedDerivation prov:hadRole ) - ( prov:qualifiedDerivation prov:entity ) + rdf:type + dcterms:publisher + reg:status + ( prov:qualifiedDerivation prov:hadRole ) + ( prov:qualifiedDerivation prov:entity ) ) - ) - ], [ - sh:path rdf:type - ] - . + ) + ] ; +. prez:OGCSchemesObjectProfile - a prof:Profile , prez:ObjectProfile , sh:NodeShape ; + a prof:Profile, prez:ObjectProfile, sh:NodeShape ; dcterms:title "OGC Concept Scheme Object Profile" ; dcterms:description "A profile for SKOS Concept Schemes" ; dcterms:identifier "ogc-schemes-object"^^xsd:token ; altr-ext:hasResourceFormat - "application/ld+json" , - "application/anot+ld+json" , - "application/rdf+xml" , - "text/anot+turtle" , + "application/ld+json", + "application/anot+ld+json", + "application/rdf+xml", + "text/anot+turtle", "text/turtle" ; altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; altr-ext:constrainsClass skos:ConceptScheme ; - sh:property [ - sh:path shext:allPredicateValues - ] , [ - sh:maxCount 0 ; - sh:path skos:hasTopConcept ; + sh:property + [ + sh:path shext:allPredicateValues + ], + [ + sh:maxCount 0 ; + sh:path skos:hasTopConcept ; ] ; shext:bnode-depth 2 ; - . +. diff --git a/prez/renderers/renderer.py b/prez/renderers/renderer.py index 0e765e8f..14ead230 100755 --- a/prez/renderers/renderer.py +++ b/prez/renderers/renderer.py @@ -6,6 +6,7 @@ from fastapi import status from fastapi.exceptions import HTTPException from fastapi.responses import StreamingResponse +from rdf2geojson import convert from rdflib import Graph, URIRef, RDF from prez.cache import prefix_graph @@ -36,7 +37,8 @@ async def return_from_graph( return await return_rdf(graph, mediatype, profile_headers) elif profile == URIRef("https://w3id.org/profile/dd"): - graph = await return_annotated_rdf(graph, profile, repo) + annotations_graph = await return_annotated_rdf(graph, profile, repo) + graph.__iadd__(annotations_graph) try: # TODO: Currently, data is generated in memory, instead of in a streaming manner. @@ -64,10 +66,16 @@ async def return_from_graph( except NotFoundError as err: raise HTTPException(status.HTTP_404_NOT_FOUND, str(err)) + elif str(mediatype) == "application/geo+json": + geojson = convert(g=graph, do_validate=False, iri2id=get_curie_id_for_uri) + content = io.BytesIO(json.dumps(geojson).encode("utf-8")) + return StreamingResponse(content=content, media_type=mediatype) + else: if "anot+" in mediatype: non_anot_mediatype = mediatype.replace("anot+", "") - graph = await return_annotated_rdf(graph, repo, system_repo) + annotations_graph = await return_annotated_rdf(graph, repo, system_repo) + graph.__iadd__(annotations_graph) graph.namespace_manager = prefix_graph.namespace_manager content = io.BytesIO( graph.serialize(format=non_anot_mediatype, encoding="utf-8") @@ -104,4 +112,5 @@ async def return_annotated_rdf( annotations_graph, repo, system_repo ) log.debug(f"Time to get annotations: {time.time() - t_start}") - return graph.__iadd__(annotations_graph) + # return graph.__iadd__(annotations_graph) + return annotations_graph diff --git a/prez/repositories/base.py b/prez/repositories/base.py index 14dfe073..3dee2fff 100755 --- a/prez/repositories/base.py +++ b/prez/repositories/base.py @@ -6,6 +6,8 @@ from rdflib import Namespace, Graph, URIRef +from prez.cache import prefix_graph + PREZ = Namespace("https://prez.dev/") log = logging.getLogger(__name__) @@ -34,9 +36,7 @@ async def send_queries( if query ], ) - # from prez.cache import prefix_graph - # g = Graph(namespace_manager=prefix_graph.namespace_manager) #TODO find where else this can go. significantly degrades performance - g = Graph() + g = Graph(namespace_manager=prefix_graph.namespace_manager) tabular_results = [] for result in results: if isinstance(result, Graph): diff --git a/prez/routers/api_extras_examples.py b/prez/routers/api_extras_examples.py index 8fe404c2..33becef4 100644 --- a/prez/routers/api_extras_examples.py +++ b/prez/routers/api_extras_examples.py @@ -23,7 +23,7 @@ def create_path_param(name: str, description: str, example: str): } -path_parameters = { +ogc_extended_path_parameters = { "collection-listing": [ create_path_param("catalogId", "Curie of the Catalog ID.", "bblck-ctlg:bblocks") ], @@ -32,7 +32,7 @@ def create_path_param(name: str, description: str, example: str): "catalogId", "Curie of the Catalog ID.", "bblck-ctlg:bblocks" ), create_path_param( - "collectionId", "Curie of the Collection ID.", "bblck-vcbs:api" + "recordsCollectionId", "Curie of the Collection ID.", "bblck-vcbs:api" ), ], "top-concepts": [ @@ -49,14 +49,35 @@ def create_path_param(name: str, description: str, example: str): ], "collection-object": [ create_path_param("catalogId", "Catalog ID.", "bblck-ctlg:bblocks"), - create_path_param("collectionId", "Collection ID.", "bblck-vcbs:api"), + create_path_param("recordsCollectionId", "Collection ID.", "bblck-vcbs:api"), ], "item-object": [ create_path_param("catalogId", "Catalog ID.", "bblck-ctlg:bblocks"), - create_path_param("collectionId", "Collection ID.", "bblck-vcbs:api"), + create_path_param("recordsCollectionId", "Collection ID.", "bblck-vcbs:api"), create_path_param("itemId", "Item ID.", "bblcks:ogc.unstable.sosa"), ], } -openapi_extras = { - name: {"parameters": params} for name, params in path_parameters.items() +ogc_extended_openapi_extras = { + name: {"parameters": params} + for name, params in ogc_extended_path_parameters.items() +} + +ogc_features_path_parameters = { + "feature-collection": [ + create_path_param( + "collectionId", "Collection ID.", "ns4:9fcf7a21-681f-43fd-8a8f-b56dbe2f4660" + ) + ], + "feature": [ + create_path_param( + "collectionId", "Collection ID.", "ns4:9fcf7a21-681f-43fd-8a8f-b56dbe2f4660" + ), + create_path_param( + "featureId", "Feature ID.", "ns3:2c8e292e-6eff-4a36-ad27-54fed2c1b422" + ), + ], +} +ogc_features_openapi_extras = { + name: {"parameters": params} + for name, params in ogc_features_path_parameters.items() } diff --git a/prez/routers/conformance.py b/prez/routers/conformance.py new file mode 100644 index 00000000..dd3753fa --- /dev/null +++ b/prez/routers/conformance.py @@ -0,0 +1,13 @@ +from fastapi import APIRouter, HTTPException + +from prez.models.ogc_features import ConformanceDeclaration, CONFORMANCE_CLASSES + +router = APIRouter(tags=["Conformance"]) + + +@router.get("/conformance", response_model=ConformanceDeclaration, status_code=200) +async def get_conformance(): + try: + return ConformanceDeclaration(conformsTo=CONFORMANCE_CLASSES) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/prez/routers/management.py b/prez/routers/management.py index 44d7d14e..5ae2e21a 100755 --- a/prez/routers/management.py +++ b/prez/routers/management.py @@ -1,32 +1,32 @@ +import io +import json import logging import pickle from typing import Optional from aiocache import caches -from fastapi import APIRouter, Query, Depends -from rdflib import BNode +from fastapi import APIRouter, Depends, Query +from rdflib import BNode, VANN from rdflib import Graph, URIRef, Literal from rdflib.collection import Collection from starlette.requests import Request -from starlette.responses import PlainTextResponse +from starlette.responses import PlainTextResponse, StreamingResponse -from prez.cache import endpoints_graph_cache +from prez.cache import endpoints_graph_cache, prefix_graph from prez.config import settings from prez.dependencies import get_system_repo +from prez.enums import JSONMediaType, NonAnnotatedRDFMediaType from prez.reference_data.prez_ns import PREZ from prez.renderers.renderer import return_rdf, return_from_graph from prez.repositories import Repo -from prez.services.connegp_service import RDF_MEDIATYPES, MediaType, NegotiatedPMTs +from prez.services.connegp_service import RDF_MEDIATYPES, NegotiatedPMTs router = APIRouter(tags=["Management"]) log = logging.getLogger(__name__) @router.get("/", summary="Home page", tags=["Prez"]) -async def index( - request: Request, - system_repo: Repo = Depends(get_system_repo) -): +async def index(request: Request, system_repo: Repo = Depends(get_system_repo)): """Returns the following information about the API""" pmts = NegotiatedPMTs( headers=request.headers, @@ -48,11 +48,10 @@ async def index( profile_headers=pmts.generate_response_headers(), selected_class=pmts.selected["class"], repo=system_repo, - system_repo=system_repo + system_repo=system_repo, ) - @router.get("/purge-tbox-cache", summary="Reset Tbox Cache") async def purge_tbox_cache(): """Purges the tbox cache, then re-adds annotations from common ontologies Prez has a copy of @@ -115,3 +114,24 @@ async def return_annotation_predicates(): Collection(g, provenance_list_bn, settings.provenance_predicates) Collection(g, other_list_bn, settings.other_predicates) return g + + +@router.get("/prefixes", summary="Show prefixes known to prez") +async def show_prefixes( + mediatype: Optional[NonAnnotatedRDFMediaType | JSONMediaType] = Query( + default=NonAnnotatedRDFMediaType.TURTLE, alias="_mediatype" + ) +): + """Returns the prefixes known to prez""" + mediatype_str = str(mediatype.value) + ns_map = {pfx: ns for pfx, ns in prefix_graph.namespaces()} + if mediatype_str == "application/json": + content = io.BytesIO(json.dumps(ns_map).encode("utf-8")) + else: + g = Graph() + for prefix, namespace in ns_map.items(): + bn = BNode() + g.add((bn, VANN.preferredNamespacePrefix, Literal(prefix))) + g.add((bn, VANN.preferredNamespaceUri, Literal(namespace))) + content = io.BytesIO(g.serialize(format=mediatype_str, encoding="utf-8")) + return StreamingResponse(content=content, media_type=mediatype_str) diff --git a/prez/routers/ogc_features_router.py b/prez/routers/ogc_features_router.py new file mode 100755 index 00000000..da434765 --- /dev/null +++ b/prez/routers/ogc_features_router.py @@ -0,0 +1,208 @@ +from typing import Optional, List + +from fastapi import Depends, FastAPI +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError +from starlette import status +from starlette.requests import Request +from starlette.responses import StreamingResponse, JSONResponse + +from prez.dependencies import ( + get_data_repo, + cql_get_parser_dependency, + get_url, + get_ogc_features_mediatype, + get_system_repo, + get_endpoint_nodeshapes, + get_profile_nodeshape, + get_endpoint_uri_type, + get_ogc_features_path_params, + get_template_query, + check_unknown_params, +) +from prez.exceptions.model_exceptions import ( + ClassNotFoundException, + URINotFoundException, + InvalidSPARQLQueryException, + PrefixNotFoundException, + NoProfilesException, +) +from prez.models.ogc_features import generate_landing_page_links, OGCFeaturesLandingPage +from prez.models.query_params import QueryParams +from prez.reference_data.prez_ns import OGCFEAT +from prez.repositories import Repo +from prez.routers.api_extras_examples import ogc_features_openapi_extras +from prez.routers.conformance import router as conformance_router +from prez.services.exception_catchers import ( + catch_400, + catch_404, + catch_500, + catch_class_not_found_exception, + catch_uri_not_found_exception, + catch_invalid_sparql_query, + catch_prefix_not_found_exception, + catch_no_profiles_exception, +) +from prez.services.listings import ogc_features_listing_function, generate_link_headers +from prez.services.objects import ogc_features_object_function +from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.shacl import NodeShape + +ALLOWED_METHODS: List[str] = ["GET", "HEAD", "OPTIONS"] + +features_subapi = FastAPI( + title="OGC Features API", + exception_handlers={ + 400: catch_400, + 404: catch_404, + 500: catch_500, + NoProfilesException: catch_no_profiles_exception, + ClassNotFoundException: catch_class_not_found_exception, + URINotFoundException: catch_uri_not_found_exception, + PrefixNotFoundException: catch_prefix_not_found_exception, + InvalidSPARQLQueryException: catch_invalid_sparql_query, + }, +) +features_subapi.include_router(conformance_router) + + +@features_subapi.exception_handler(RequestValidationError) +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=jsonable_encoder( + { + "detail": exc.errors(), + "body": exc.body, + "note": { + "This error was caught as a RequestValidationError which OGC Features " + "specification specifies should be raised with a status code of 400. " + "It would otherwise be a 422 Unprocessable Entity." + }, + } + ), + ) + + +@features_subapi.api_route( + "/", + summary="OGC Features API", + methods=ALLOWED_METHODS, +) +async def ogc_features_api( + url: str = Depends(get_url), +): + links = generate_landing_page_links(url) + link_headers = generate_link_headers(links) + lp = OGCFeaturesLandingPage( + title="OGC API - Features", + description="This is a landing page for the OGC API - Features.", + links=links, + ) + return JSONResponse( + content=lp.model_dump(), + headers={"Content-Type": "application/json"} | link_headers, + ) + + +######################################################################################################################## +# Listing endpoints + +# 1: /features/collections +# 2: /features/collections/{collectionId}/items +######################################################################################################################## + + +@features_subapi.api_route( + "/queryables", + methods=ALLOWED_METHODS, + name=OGCFEAT["queryables-global"], +) +@features_subapi.api_route( + "/collections", + methods=ALLOWED_METHODS, + name=OGCFEAT["feature-collections"], +) +@features_subapi.api_route( + "/collections/{collectionId}/items", + methods=ALLOWED_METHODS, + name=OGCFEAT["features"], + openapi_extra=ogc_features_openapi_extras.get("feature-collection"), +) +@features_subapi.api_route( + "/collections/{collectionId}/queryables", + methods=ALLOWED_METHODS, + name=OGCFEAT["queryables-local"], + openapi_extra=ogc_features_openapi_extras.get("feature-collection"), +) +async def listings_with_feature_collection( + validate_unknown_params: bool = Depends(check_unknown_params), + endpoint_uri_type: tuple = Depends(get_endpoint_uri_type), + endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes), + profile_nodeshape: NodeShape = Depends(get_profile_nodeshape), + url: str = Depends(get_url), + mediatype: str = Depends(get_ogc_features_mediatype), + path_params: dict = Depends(get_ogc_features_path_params), + query_params: QueryParams = Depends(), + cql_parser: CQLParser = Depends(cql_get_parser_dependency), + data_repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), +): + try: + content, headers = await ogc_features_listing_function( + endpoint_uri_type, + endpoint_nodeshape, + profile_nodeshape, + mediatype, + url, + data_repo, + system_repo, + cql_parser, + query_params, + **path_params, + ) + except Exception as e: + raise e + return StreamingResponse(content=content, media_type=mediatype, headers=headers) + + +######################################################################################################################## +# Object endpoints + +# 1: /features/collections/{collectionId} +# 2: /features/collections/{collectionId}/items/{featureId} +######################################################################################################################## + + +@features_subapi.api_route( + path="/collections/{collectionId}", + methods=ALLOWED_METHODS, + name=OGCFEAT["feature-collection"], + openapi_extra=ogc_features_openapi_extras.get("feature-collection"), +) +@features_subapi.api_route( + path="/collections/{collectionId}/items/{featureId}", + methods=ALLOWED_METHODS, + name=OGCFEAT["feature"], + openapi_extra=ogc_features_openapi_extras.get("feature"), +) +async def objects( + template_query: Optional[str] = Depends(get_template_query), + mediatype: str = Depends(get_ogc_features_mediatype), + url: str = Depends(get_url), + path_params: dict = Depends(get_ogc_features_path_params), + data_repo: Repo = Depends(get_data_repo), + system_repo: Repo = Depends(get_system_repo), +): + try: + content, headers = await ogc_features_object_function( + template_query, + mediatype, + url, + data_repo, + system_repo, + **path_params, + ) + except Exception as e: + raise e + return StreamingResponse(content=content, media_type=mediatype, headers=headers) diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 8a3efb6a..48dade3f 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -16,7 +16,11 @@ from prez.models.query_params import QueryParams from prez.reference_data.prez_ns import EP, ONT, OGCE from prez.repositories import Repo -from prez.routers.api_extras_examples import responses, cql_examples, openapi_extras +from prez.routers.api_extras_examples import ( + responses, + cql_examples, + ogc_extended_openapi_extras, +) from prez.services.connegp_service import NegotiatedPMTs from prez.services.listings import listing_function from prez.services.objects import object_function @@ -47,28 +51,28 @@ "/catalogs/{catalogId}/collections", summary="Collection Listing", name=OGCE["collection-listing"], - openapi_extra=openapi_extras.get("collection-listing"), + openapi_extra=ogc_extended_openapi_extras.get("collection-listing"), responses=responses, ) @router.get( - "/catalogs/{catalogId}/collections/{collectionId}/items", + "/catalogs/{catalogId}/collections/{recordsCollectionId}/items", summary="Item Listing", name=OGCE["item-listing"], - openapi_extra=openapi_extras.get("item-listing"), + openapi_extra=ogc_extended_openapi_extras.get("item-listing"), responses=responses, ) @router.get( "/concept-hierarchy/{parent_curie}/top-concepts", summary="Top Concepts", name=OGCE["top-concepts"], - openapi_extra=openapi_extras.get("top-concepts"), + openapi_extra=ogc_extended_openapi_extras.get("top-concepts"), responses=responses, ) @router.get( "/concept-hierarchy/{parent_curie}/narrowers", summary="Narrowers", name=OGCE["narrowers"], - openapi_extra=openapi_extras.get("narrowers"), + openapi_extra=ogc_extended_openapi_extras.get("narrowers"), responses=responses, ) async def listings( @@ -141,8 +145,8 @@ async def cql_post_listings( # 1: /object?uri= # 2: /profiles/{profile_curie} # 3: /catalogs/{catalogId} -# 4: /catalogs/{catalogId}/collections/{collectionId} -# 5: /catalogs/{catalogId}/collections/{collectionId}/items/{itemId} +# 4: /catalogs/{catalogId}/collections/{recordsCollectionId} +# 5: /catalogs/{catalogId}/collections/{recordsCollectionId}/items/{itemId} ######################################################################################################################## @@ -153,28 +157,28 @@ async def cql_post_listings( path="/profiles/{profile_curie}", summary="Profile", name=EP["system/profile-object"], - openapi_extra=openapi_extras.get("profile-object"), + openapi_extra=ogc_extended_openapi_extras.get("profile-object"), responses=responses, ) @router.get( path="/catalogs/{catalogId}", summary="Catalog Object", name=OGCE["catalog-object"], - openapi_extra=openapi_extras.get("catalog-object"), + openapi_extra=ogc_extended_openapi_extras.get("catalog-object"), responses=responses, ) @router.get( - path="/catalogs/{catalogId}/collections/{collectionId}", + path="/catalogs/{catalogId}/collections/{recordsCollectionId}", summary="Collection Object", name=OGCE["collection-object"], - openapi_extra=openapi_extras.get("collection-object"), + openapi_extra=ogc_extended_openapi_extras.get("collection-object"), responses=responses, ) @router.get( - path="/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}", + path="/catalogs/{catalogId}/collections/{recordsCollectionId}/items/{itemId}", summary="Item Object", name=OGCE["item-object"], - openapi_extra=openapi_extras.get("item-object"), + openapi_extra=ogc_extended_openapi_extras.get("item-object"), responses=responses, ) async def objects( diff --git a/prez/routers/sparql.py b/prez/routers/sparql.py index 346a22b7..1b9021e3 100755 --- a/prez/routers/sparql.py +++ b/prez/routers/sparql.py @@ -70,10 +70,9 @@ async def sparql_endpoint_handler( await response.aread() g = Graph() g.parse(data=response.text, format=non_anot_mediatype) - graph = await return_annotated_rdf(g, repo, system_repo) - content = io.BytesIO( - graph.serialize(format=non_anot_mediatype, encoding="utf-8") - ) + annotations_graph = await return_annotated_rdf(g, repo, system_repo) + g.__iadd__(annotations_graph) + content = io.BytesIO(g.serialize(format=non_anot_mediatype, encoding="utf-8")) return StreamingResponse( content=content, media_type=non_anot_mediatype, diff --git a/prez/services/app_service.py b/prez/services/app_service.py index ef3b7e6c..afb637a8 100755 --- a/prez/services/app_service.py +++ b/prez/services/app_service.py @@ -3,7 +3,7 @@ from pathlib import Path import httpx -from rdflib import URIRef, Literal, Graph +from rdflib import URIRef, Literal, Graph, RDF, BNode from prez.cache import ( prez_system_graph, @@ -12,7 +12,7 @@ endpoints_graph_cache, ) from prez.config import settings -from prez.reference_data.prez_ns import PREZ +from prez.reference_data.prez_ns import PREZ, ONT from prez.repositories import Repo from prez.services.curie_functions import get_curie_id_for_uri from prez.services.query_generation.count import startup_count_objects @@ -77,6 +77,32 @@ async def prefix_initialisation(repo: Repo): await generate_prefixes(repo) +async def retrieve_remote_template_queries(repo: Repo): + # TODO allow mediatype specification in repo queries + query = """ + PREFIX prez: + PREFIX rdf: + SELECT ?query ?endpoint + WHERE { + ?bn a prez:TemplateQuery ; + rdf:value ?query ; + prez:forEndpoint ?endpoint ; + } + """ + _, results = await repo.send_queries([], [(None, query)]) + if results: + for result in results[0][1]: + bn = BNode() + query = result["query"]["value"] + endpoint = result["endpoint"]["value"] + prez_system_graph.add((bn, RDF.type, ONT.TemplateQuery)) + prez_system_graph.add((bn, RDF.value, Literal(query))) + prez_system_graph.add((bn, ONT.forEndpoint, URIRef(endpoint))) + log.info(f"Remote template query(ies) found and added") + else: + log.info("No remote template queries found") + + async def add_remote_prefixes(repo: Repo): # TODO allow mediatype specification in repo queries query = PrefixQuery().to_string() @@ -93,18 +119,6 @@ async def add_local_prefixes(repo): """ Adds prefixes to the prefix graph """ - # look for remote prefixes - remote_prefix_query = f""" - CONSTRUCT WHERE {{ ?bn ?prefix; - ?namespace. }} - """ - remote_prefix_g, _ = await repo.send_queries([remote_prefix_query], []) - if remote_prefix_g: - remote_i = await _add_prefixes_from_graph(remote_prefix_g) - log.info(f"{remote_i+1:,} prefixes bound from remote repository.") - else: - log.info("No remote prefix declarations found.") - for f in (Path(__file__).parent.parent / "reference_data/prefixes").glob("*.ttl"): g = Graph().parse(f, format="turtle") local_i = await _add_prefixes_from_graph(g) diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index 87870636..d4ff47f1 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -2,12 +2,15 @@ import re from enum import Enum from textwrap import dedent +from typing import List, Dict +from urllib.parse import urlencode from pydantic import BaseModel from rdflib import Graph, Namespace, URIRef from prez.config import settings from prez.exceptions.model_exceptions import NoProfilesException +from prez.models.ogc_features import Link from prez.repositories.base import Repo from prez.services.curie_functions import get_curie_id_for_uri, get_uri_for_curie_id @@ -37,6 +40,7 @@ "text/plain": "nt", # text/plain is the old/deprecated mimetype for n-triples } + class MediaType(str, Enum): turtle = "text/turtle" n3 = "text/n3" @@ -88,6 +92,7 @@ class NegotiatedPMTs(BaseModel): requested_mediatypes: list[tuple[str, float]] | None = None available: list[dict] | None = None selected: dict | None = None + current_path: str | None = None class Config: arbitrary_types_allowed = True @@ -219,7 +224,7 @@ def generate_response_headers(self) -> dict: ) mediatype_header_links = ", ".join( [ - f'<{self.selected["class"]}?_profile={get_curie_id_for_uri(pmt["profile"])}&_mediatype={pmt["mediatype"]}>; rel="{"self" if pmt == self.selected else "alternate"}"; type="{pmt["mediatype"]}"; profile="{pmt["profile"]}"' + f'<{settings.system_uri}{self.current_path}?_profile={get_curie_id_for_uri(pmt["profile"])}&_mediatype={pmt["mediatype"]}>; rel="{"self" if pmt == self.selected else "alternate"}"; type="{pmt["mediatype"]}"; format="{pmt["profile"]}"' for pmt in self.available ] ) @@ -259,7 +264,7 @@ def _compose_select_query(self) -> str: VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature skos:ConceptScheme skos:Concept skos:Collection dcat:Catalog rdf:Resource dcat:Resource prof:Profile prez:SPARQLQuery - prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object rdfs:Resource }} + prez:SearchResult prez:CQLObjectList prez:Queryable prez:Object rdfs:Resource }} ?profile altr-ext:constrainsClass ?class ; altr-ext:hasResourceFormat ?format ; dcterms:title ?title .\ @@ -306,23 +311,32 @@ async def _do_query(self, query: str) -> tuple[Graph, list]: if settings.log_level == "DEBUG": from tabulate import tabulate + table_data = [ [ - item['profile']['value'], - item['title']['value'], - item['class']['value'], - item['distance']['value'], - item['def_profile']['value'], - item['format']['value'], - item['req_format']['value'], - item['def_format']['value'], + item["profile"]["value"], + item["title"]["value"], + item["class"]["value"], + item["distance"]["value"], + item["def_profile"]["value"], + item["format"]["value"], + item["req_format"]["value"], + item["def_format"]["value"], ] for item in response[1][0][1] ] # Define headers - headers = ["Profile", "Title", "Class", "Distance", "Default Profile", "Format", "Requested Format", - "Default Format"] + headers = [ + "Profile", + "Title", + "Class", + "Distance", + "Default Profile", + "Format", + "Requested Format", + "Default Format", + ] # Render as a table log.debug(tabulate(table_data, headers=headers, tablefmt="grid")) diff --git a/prez/services/curie_functions.py b/prez/services/curie_functions.py index 44b21ca1..cb954696 100755 --- a/prez/services/curie_functions.py +++ b/prez/services/curie_functions.py @@ -1,12 +1,11 @@ import logging from urllib.parse import urlparse -from aiocache.serializers import PickleSerializer +from aiocache import caches from rdflib import URIRef from prez.cache import prefix_graph from prez.config import settings -from aiocache import cached, Cache, caches log = logging.getLogger(__name__) diff --git a/prez/services/exception_catchers.py b/prez/services/exception_catchers.py index 2a4e05d5..8ae8f13a 100755 --- a/prez/services/exception_catchers.py +++ b/prez/services/exception_catchers.py @@ -7,6 +7,7 @@ URINotFoundException, NoProfilesException, InvalidSPARQLQueryException, + PrefixNotFoundException, ) @@ -44,6 +45,18 @@ async def catch_uri_not_found_exception(request: Request, exc: URINotFoundExcept ) +async def catch_prefix_not_found_exception( + request: Request, exc: PrefixNotFoundException +): + return JSONResponse( + status_code=404, + content={ + "error": "NO_PREFIX", + "message": exc.message, + }, + ) + + async def catch_no_profiles_exception(request: Request, exc: NoProfilesException): return JSONResponse( status_code=404, diff --git a/prez/services/generate_queryables.py b/prez/services/generate_queryables.py new file mode 100644 index 00000000..f2fe3c78 --- /dev/null +++ b/prez/services/generate_queryables.py @@ -0,0 +1,29 @@ +from prez.config import settings +from prez.models.ogc_features import QueryableProperty, Queryables +from prez.reference_data.prez_ns import PREZ, OGCFEAT + + +def generate_queryables_json(item_graph, annotations_graph, url, endpoint_uri): + queryable_props = {} + for queryable in item_graph.subjects(): + queryable_props[str(queryable)] = QueryableProperty( + title=annotations_graph.value(queryable, PREZ.label), + description=annotations_graph.value(queryable, PREZ.description), + ) + if endpoint_uri == OGCFEAT["queryables-global"]: + title = "Global Queryables" + description = ( + "Global queryable properties for all collections in the OGC Features API." + ) + else: + title = "Local Queryables" + description = ( + "Local queryable properties for the collection in the OGC Features API." + ) + queryable_params = { + "$id": f"{settings.system_uri}{url.path}", + "title": title, + "description": description, + "properties": queryable_props, + } + return Queryables(**queryable_params) diff --git a/prez/services/link_generation.py b/prez/services/link_generation.py index a23d6235..1cd0b346 100755 --- a/prez/services/link_generation.py +++ b/prez/services/link_generation.py @@ -2,7 +2,7 @@ import time from string import Template -from rdflib import Graph, Literal, URIRef, BNode +from rdflib import Graph, Literal, URIRef from rdflib.namespace import SH, RDF from sparql_grammar_pydantic import ( IRI, @@ -46,11 +46,11 @@ async def add_prez_links(graph: Graph, repo: Repo, endpoint_structure): async def _link_generation( - uri: URIRef, - repo: Repo, - klasses, - graph: Graph, - endpoint_structure: str = settings.endpoint_structure, + uri: URIRef, + repo: Repo, + klasses, + graph: Graph, + endpoint_structure: str = settings.endpoint_structure, ): """ Generates links for the given URI if it is not already cached. @@ -89,21 +89,25 @@ async def _link_generation( # part of the link. e.g. ?path_node_1 will have result(s) but is not part of the link. for solution in result[1]: # create link strings - ( - curie_for_uri, - members_link, - object_link, - identifiers - ) = await create_link_strings( - ns.hierarchy_level, solution, uri, endpoint_structure + (curie_for_uri, members_link, object_link, identifiers) = ( + await create_link_strings( + ns.hierarchy_level, solution, uri, endpoint_structure + ) ) # add links and identifiers to graph and cache await add_links_to_graph_and_cache( - curie_for_uri, graph, members_link, object_link, uri, identifiers + curie_for_uri, + graph, + members_link, + object_link, + uri, + identifiers, ) else: - curie_for_uri, members_link, object_link, identifiers = await create_link_strings( - ns.hierarchy_level, {}, uri, endpoint_structure + curie_for_uri, members_link, object_link, identifiers = ( + await create_link_strings( + ns.hierarchy_level, {}, uri, endpoint_structure + ) ) await add_links_to_graph_and_cache( curie_for_uri, graph, members_link, object_link, uri, identifiers @@ -136,7 +140,7 @@ async def get_nodeshapes_constraining_class(klasses, uri): async def add_links_to_graph_and_cache( - curie_for_uri, graph, members_link, object_link, uri, identifiers: dict + curie_for_uri, graph, members_link, object_link, uri, identifiers: dict ): """ Adds links and identifiers to the given graph and cache. @@ -148,7 +152,7 @@ async def add_links_to_graph_and_cache( (uri_in_link_string, PREZ.identifier, Literal(curie_in_link_string), uri) ) if ( - members_link + members_link ): # TODO need to confirm the link value doesn't match the existing link value, as multiple endpoints can deliver # the same class/have different links for the same URI existing_members_link = list( @@ -165,8 +169,10 @@ async def create_link_strings(hierarchy_level, solution, uri, endpoint_structure """ Creates link strings based on the hierarchy level and solution provided. """ - identifiers = {URIRef(v["value"]): get_curie_id_for_uri(v["value"]) for k, v in solution.items()} | { - uri: get_curie_id_for_uri(uri)} + identifiers = { + URIRef(v["value"]): get_curie_id_for_uri(v["value"]) + for k, v in solution.items() + } | {uri: get_curie_id_for_uri(uri)} components = list(endpoint_structure[: int(hierarchy_level)]) variables = reversed( ["focus_node"] + [f"path_node_{i}" for i in range(1, len(components))] diff --git a/prez/services/listings.py b/prez/services/listings.py index d14fb954..4ad61159 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -1,15 +1,41 @@ import copy +import io +import json import logging +from datetime import datetime +from typing import Dict +from urllib.parse import urlencode +from zoneinfo import ZoneInfo from fastapi.responses import PlainTextResponse +from rdf2geojson import convert from rdflib import URIRef, Literal -from rdflib.namespace import RDF -from sparql_grammar_pydantic import IRI, Var, TriplesSameSubject +from rdflib.namespace import RDF, Namespace, GEO +from sparql_grammar_pydantic import ( + IRI, + Var, + TriplesSameSubject, + TriplesSameSubjectPath, + PrimaryExpression, + GraphPatternNotTriples, + Bind, + Expression, + IRIOrFunction, + OptionalGraphPattern, + GroupGraphPattern, + GroupGraphPatternSub, + TriplesBlock, +) from prez.cache import endpoints_graph_cache from prez.config import settings -from prez.reference_data.prez_ns import PREZ, ALTREXT, ONT -from prez.renderers.renderer import return_from_graph +from prez.enums import NonAnnotatedRDFMediaType +from prez.models.ogc_features import Collection, Link, Collections, Links +from prez.reference_data.prez_ns import PREZ, ALTREXT, ONT, OGCFEAT +from prez.renderers.renderer import return_from_graph, return_annotated_rdf +from prez.services.connegp_service import RDF_MEDIATYPES +from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri +from prez.services.generate_queryables import generate_queryables_json from prez.services.link_generation import add_prez_links from prez.services.query_generation.count import CountQuery from prez.services.query_generation.shacl import NodeShape @@ -20,6 +46,8 @@ log = logging.getLogger(__name__) +DWC = Namespace("http://rs.tdwg.org/dwc/terms/") + async def listing_function( data_repo, @@ -100,7 +128,7 @@ async def listing_function( if search_query: count = len(list(item_graph.subjects(RDF.type, PREZ.SearchResult))) if count == search_query.limit: - count_literal = f">{count-1}" + count_literal = f">{count - 1}" else: count_literal = f"{count}" item_graph.add((PREZ.SearchResult, PREZ["count"], Literal(count_literal))) @@ -115,6 +143,274 @@ async def listing_function( ) +async def ogc_features_listing_function( + endpoint_uri_type, + endpoint_nodeshape, + profile_nodeshape, + selected_mediatype, + url, + data_repo, + system_repo, + cql_parser, + query_params, + **path_params, +): + count_query = None + count = 0 + collectionId = path_params.get("collectionId") + subselect_kwargs = merge_listing_query_grammar_inputs( + endpoint_nodeshape=endpoint_nodeshape, + cql_parser=cql_parser, + query_params=query_params, + ) + # merge subselect and profile triples same subject (for construct triples) + construct_tss_list = [] + subselect_tss_list = subselect_kwargs.pop("construct_tss_list") + if subselect_tss_list: + construct_tss_list.extend(subselect_tss_list) + if profile_nodeshape.tss_list: + construct_tss_list.extend(profile_nodeshape.tss_list) + + queries = [] + if endpoint_uri_type[0] in [ + OGCFEAT["queryables-local"], + OGCFEAT["queryables-global"], + ]: + queryable_var = Var(value="queryable") + innser_select_triple = ( + Var(value="focus_node"), + queryable_var, + Var(value="queryable_value"), + ) + subselect_kwargs["inner_select_tssp_list"].append( + TriplesSameSubjectPath.from_spo(*innser_select_triple) + ) + subselect_kwargs["inner_select_vars"] = [queryable_var] + construct_triple = ( + queryable_var, + IRI(value=RDF.type), + IRI(value="http://www.opengis.net/def/rel/ogc/1.0/Queryable"), + ) + construct_tss_list = [TriplesSameSubject.from_spo(*construct_triple)] + query = PrezQueryConstructor( + construct_tss_list=construct_tss_list, + profile_triples=profile_nodeshape.tssp_list, + **subselect_kwargs, + ).to_string() + queries.append(query) + elif not collectionId: # list Feature Collections + query = PrezQueryConstructor( + construct_tss_list=construct_tss_list, + profile_triples=profile_nodeshape.tssp_list, + **subselect_kwargs, + ) + queries.append(query.to_string()) + # add the count query + subselect = copy.deepcopy(query.inner_select) + count_query = CountQuery(original_subselect=subselect).to_string() + else: # list items in a Feature Collection + # add inbound links - not currently possible via profiles + opt_inbound_gpnt = _add_inbound_triple_pattern_match(construct_tss_list) + profile_nodeshape.gpnt_list.append(opt_inbound_gpnt) + + feature_list_query = PrezQueryConstructor( + construct_tss_list=construct_tss_list, + profile_triples=profile_nodeshape.tssp_list, + profile_gpnt=profile_nodeshape.gpnt_list, + **subselect_kwargs, + ) + queries.append(feature_list_query.to_string()) + + # add the count query + subselect = copy.deepcopy(feature_list_query.inner_select) + count_query = CountQuery(original_subselect=subselect).to_string() + + # Features listing requires CBD of the Feature Collection as well; reuse items profile to get all props/bns to + # depth two. + collection_uri = await get_uri_for_curie_id(collectionId) + gpnt = GraphPatternNotTriples( + content=Bind( + expression=Expression.from_primary_expression( + PrimaryExpression( + content=IRIOrFunction(iri=IRI(value=collection_uri)) + ) + ), + var=Var(value="focus_node"), + ) + ) + feature_collection_query = PrezQueryConstructor( + construct_tss_list=construct_tss_list, + profile_triples=profile_nodeshape.tssp_list, + profile_gpnt=profile_nodeshape.gpnt_list, + inner_select_gpnt=[gpnt], # BIND( AS ?focus_node) + limit=1, + offset=0, + ).to_string() + queries.append(feature_collection_query) + + link_headers = None + if selected_mediatype == "application/sparql-query": + # queries_dict = {f"query_{i}": query for i, query in enumerate(queries)} + # just do the first query for now: + content = io.BytesIO(queries[0].encode("utf-8")) + # content = io.BytesIO(json.dumps(queries_dict).encode("utf-8")) + return content, link_headers + + item_graph, _ = await data_repo.send_queries(queries, []) + annotations_graph = await return_annotated_rdf(item_graph, data_repo, system_repo) + if count_query: + count_g, _ = await data_repo.send_queries([count_query], []) + if count_g: + count = int(next(iter(count_g.objects()))) + + if selected_mediatype == "application/json": + if endpoint_uri_type[0] in [ + OGCFEAT["queryables-local"], + OGCFEAT["queryables-global"], + ]: + queryables = generate_queryables_json( + item_graph, annotations_graph, url, endpoint_uri_type[0] + ) + content = io.BytesIO( + queryables.model_dump_json(exclude_none=True, by_alias=True).encode( + "utf-8" + ) + ) + else: + collections = create_collections_json( + item_graph, + annotations_graph, + url, + selected_mediatype, + query_params, + count, + ) + all_links = collections.links + for coll in collections.collections: + all_links.extend(coll.links) + link_headers = generate_link_headers(all_links) + content = io.BytesIO( + collections.model_dump_json(exclude_none=True).encode("utf-8") + ) + + elif selected_mediatype == "application/geo+json": + geojson = convert(g=item_graph, do_validate=False, iri2id=get_curie_id_for_uri) + all_links = create_self_alt_links(selected_mediatype, url, query_params, count) + all_links_dict = Links(links=all_links).model_dump(exclude_none=True) + link_headers = generate_link_headers(all_links) + geojson["links"] = all_links_dict["links"] + geojson["timeStamp"] = get_brisbane_timestamp() + geojson["numberMatched"] = count + geojson["numberReturned"] = len(geojson["features"]) + content = io.BytesIO(json.dumps(geojson).encode("utf-8")) + elif selected_mediatype in NonAnnotatedRDFMediaType: + content = io.BytesIO( + item_graph.serialize(format=selected_mediatype, encoding="utf-8") + ) + return content, link_headers + + +def _add_inbound_triple_pattern_match(construct_tss_list): + triple = (Var(value="inbound_s"), Var(value="inbound_p"), Var(value="focus_node")) + construct_tss_list.append(TriplesSameSubject.from_spo(*triple)) + inbound_tssp_list = [TriplesSameSubjectPath.from_spo(*triple)] + opt_inbound_gpnt = GraphPatternNotTriples( + content=OptionalGraphPattern( + group_graph_pattern=GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock.from_tssp_list(inbound_tssp_list) + ) + ) + ) + ) + return opt_inbound_gpnt + + +def create_collections_json( + item_graph, annotations_graph, url, selected_mediatype, query_params, count +): + collections_list = [] + for s, p, o in item_graph.triples((None, RDF.type, GEO.FeatureCollection)): + curie_id = get_curie_id_for_uri(s) + collections_list.append( + Collection( + id=curie_id, + title=annotations_graph.value( + subject=s, predicate=PREZ.label, default=None + ), + description=annotations_graph.value( + subject=s, predicate=PREZ.description, default=None + ), + links=[ + Link( + href=URIRef( + f"{settings.system_uri}{url.path}/{curie_id}/items?{urlencode({'_mediatype': mt})}" + ), + rel="items", + type=mt, + ) + for mt in ["application/geo+json", *RDF_MEDIATYPES] + ], + ) + ) + self_alt_links = create_self_alt_links(selected_mediatype, url, query_params, count) + collections = Collections( + collections=collections_list, + links=self_alt_links, + ) + return collections + + +def create_self_alt_links(selected_mediatype, url, query_params = None, count = None): + self_alt_links = [] + for mt in [selected_mediatype, *RDF_MEDIATYPES]: + self_alt_links.append( + Link( + href=URIRef( + f"{settings.system_uri}{url.path}?{urlencode({'_mediatype': mt})}" + ), + rel="self" if mt == selected_mediatype else "alternate", + type=mt, + title="this document", + ) + ) + if count: # only for listings; add prev/next links + page = query_params.page + limit = query_params.limit + if page != 1: + prev_page = page - 1 + self_alt_links.append( + Link( + href=URIRef( + f"{settings.system_uri}{url.path}?{urlencode({'_mediatype': selected_mediatype, 'page': prev_page, 'limit': limit})}" + ), + rel="prev", + type=selected_mediatype, + title="previous page", + ) + ) + if count > page * limit: + next_page = page + 1 + self_alt_links.append( + Link( + href=URIRef( + f"{settings.system_uri}{url.path}?{urlencode({'_mediatype': selected_mediatype, 'page': next_page, 'limit': limit})}" + ), + rel="next", + type=selected_mediatype, + title="next page", + ) + ) + return self_alt_links + + +def generate_link_headers(links) -> Dict[str, str]: + link_header = ", ".join( + [f'<{link.href}>; rel="{link.rel}"; type="{link.type}"' for link in links] + ) + return {"Link": link_header} + + async def handle_alt_profile(original_endpoint_type, pmts): endpoint_nodeshape_map = { ONT["ObjectEndpoint"]: URIRef("http://example.org/ns#AltProfilesForObject"), @@ -132,3 +428,14 @@ async def handle_alt_profile(original_endpoint_type, pmts): # 'dynamicaly' expressed in SHACL. The class is only known at runtime ) return endpoint_nodeshape + + +def get_brisbane_timestamp(): + # Get current time in Brisbane + brisbane_time = datetime.now(ZoneInfo("Australia/Brisbane")) + + # Format the timestamp + timestamp = brisbane_time.strftime("%Y-%m-%dT%H:%M:%S%z") + + # Insert colon in timezone offset + return f"{timestamp[:-2]}:{timestamp[-2:]}" diff --git a/prez/services/objects.py b/prez/services/objects.py index 0374d0ca..a3ebebb1 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -1,14 +1,26 @@ +import io +import json import logging import time +from urllib.parse import urlencode from fastapi.responses import PlainTextResponse -from sparql_grammar_pydantic import TriplesSameSubject, IRI +from rdf2geojson import convert +from rdflib import RDF, URIRef, RDFS +from rdflib.namespace import GEO +from sparql_grammar_pydantic import TriplesSameSubject, IRI, Var, TriplesSameSubjectPath +from prez.config import settings +from prez.exceptions.model_exceptions import URINotFoundException +from prez.models.ogc_features import Link, Collection, Links from prez.models.query_params import QueryParams -from prez.reference_data.prez_ns import ALTREXT, ONT -from prez.renderers.renderer import return_from_graph +from prez.reference_data.prez_ns import ALTREXT, ONT, PREZ +from prez.renderers.renderer import return_from_graph, return_annotated_rdf +from prez.services.connegp_service import RDF_MEDIATYPES +from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri from prez.services.link_generation import add_prez_links -from prez.services.listings import listing_function +from prez.services.listings import listing_function, generate_link_headers, create_self_alt_links, \ + get_brisbane_timestamp from prez.services.query_generation.umbrella import ( PrezQueryConstructor, ) @@ -17,11 +29,11 @@ async def object_function( - data_repo, - system_repo, - endpoint_structure, - pmts, - profile_nodeshape, + data_repo, + system_repo, + endpoint_structure, + pmts, + profile_nodeshape, ): if pmts.selected["profile"] == ALTREXT["alt-profile"]: none_keys = [ @@ -36,7 +48,11 @@ async def object_function( filter=None, q=None, page=1, - per_page=100, + limit=100, + datetime=None, + bbox=[], + filter_crs=None, + filter_lang=None, order_by=None, order_by_direction=None, ) @@ -80,3 +96,117 @@ async def object_function( data_repo, system_repo, ) + + +def create_parent_link(url): + return Link( + href=f"{settings.system_uri}{url.path.split('/items')[0]}", + rel="collection", + type="application/geo+json" + ) + + + +async def ogc_features_object_function( + template_query, + selected_mediatype, + url, + data_repo, + system_repo, + **path_params, +): + collectionId = path_params.get("collectionId") + featureId = path_params.get("featureId") + if featureId: + feature_uri = await get_uri_for_curie_id(featureId) + else: + feature_uri = None + collection_uri = await get_uri_for_curie_id(collectionId) + if template_query: + if featureId: + focus_uri = await get_uri_for_curie_id(featureId) + else: + focus_uri = collection_uri + query = template_query.replace( + "VALUES ?focusNode { UNDEF }", f"VALUES ?focusNode {{ {focus_uri.n3()} }}" + ) + else: + if featureId is None: # feature collection + collection_iri = IRI(value=collection_uri) + construct_tss_list = None + tssp_list = [ + TriplesSameSubjectPath.from_spo( + collection_iri, IRI(value=RDF.type), Var(value="type") + ) + ] + else: # feature + feature_uri = await get_uri_for_curie_id(featureId) + feature_iri = IRI(value=feature_uri) + triples = [ + (feature_iri, Var(value="prop"), Var(value="val")), + (feature_iri, IRI(value=GEO.hasGeometry), Var(value="bn")), # Pyoxigraph DESCRIBE does not follow blank nodes, so specify the geometry path + (Var(value="bn"), IRI(value=GEO.asWKT), Var(value="wkt")) + ] + tssp_list = [TriplesSameSubjectPath.from_spo(*triple) for triple in triples] + construct_tss_list = [TriplesSameSubject.from_spo(*triple) for triple in triples] + query = PrezQueryConstructor( + construct_tss_list=construct_tss_list, + profile_triples=tssp_list, + ).to_string() + + query_start_time = time.time() + item_graph, _ = await data_repo.send_queries([query], []) + if len(item_graph) == 0: + uri = feature_uri if feature_uri else collection_uri + raise URINotFoundException(uri) + annotations_graph = await return_annotated_rdf(item_graph, data_repo, system_repo) + log.debug(f"Query time: {time.time() - query_start_time}") + + link_headers = None + if selected_mediatype == "application/sparql-query": + content = io.BytesIO(query.encode("utf-8")) + elif selected_mediatype == "application/json": + collection = create_collection_json( + collectionId, collection_uri, annotations_graph, url + ) + link_headers = generate_link_headers(collection.links) + content = io.BytesIO( + collection.model_dump_json(exclude_none=True).encode("utf-8") + ) + elif selected_mediatype == "application/geo+json": + geojson = convert(g=item_graph, do_validate=False, iri2id=get_curie_id_for_uri) + self_alt_links = create_self_alt_links(selected_mediatype, url) + parent_link = create_parent_link(url) + all_links = [*self_alt_links, parent_link] + all_links_dict = Links(links=all_links).model_dump(exclude_none=True) + link_headers = generate_link_headers(all_links) + geojson["links"] = all_links_dict["links"] + geojson["timeStamp"] = get_brisbane_timestamp() + content = io.BytesIO(json.dumps(geojson).encode("utf-8")) + else: + content = io.BytesIO( + item_graph.serialize(format=selected_mediatype, encoding="utf-8") + ) + return content, link_headers + + +def create_collection_json(collection_curie, collection_uri, annotations_graph, url): + return Collection( + id=collection_curie, + title=annotations_graph.value( + subject=collection_uri, predicate=PREZ.label, default=None + ), + description=annotations_graph.value( + subject=collection_uri, predicate=PREZ.description, default=None + ), + links=[ + Link( + href=URIRef( + f"{settings.system_uri}{url.path}/items?{urlencode({'_mediatype': mt})}" + ), + rel="items", + type=mt, + ) + for mt in ["application/geo+json", *RDF_MEDIATYPES] + ], + ) diff --git a/prez/services/query_generation/bbox_filter.py b/prez/services/query_generation/bbox_filter.py new file mode 100644 index 00000000..fa88f2be --- /dev/null +++ b/prez/services/query_generation/bbox_filter.py @@ -0,0 +1,60 @@ +from typing import List + +from rdflib.namespace import GEO +from sparql_grammar_pydantic import ( + ArgList, + FunctionCall, + IRI, + Var, + GraphPatternNotTriples, + Expression, + PrimaryExpression, + TriplesSameSubjectPath, + RDFLiteral, + Filter, + Constraint, +) + +from prez.reference_data.cql.geo_function_mapping import GEOF +from prez.services.query_generation.cql import ( + get_wkt_from_coords, + format_coordinates_as_wkt, +) + + +def generate_bbox_filter( + bbox: List[float], filter_crs: str +) -> (GraphPatternNotTriples, List[TriplesSameSubjectPath]): + coordinates = format_coordinates_as_wkt(bbox) + wkt = get_wkt_from_coords(coordinates, "Polygon") + + wkt_with_crs = f"<{filter_crs}> {wkt}" + subject = Var(value="focus_node") + geom_bn_var = Var(value="geom_bnode") + geom_lit_var = Var(value="geom_var") + tssp_list = [] + tssp_list.append( + TriplesSameSubjectPath.from_spo( + subject, IRI(value=GEO.hasGeometry), geom_bn_var + ) + ) + tssp_list.append( + TriplesSameSubjectPath.from_spo(geom_bn_var, IRI(value=GEO.asWKT), geom_lit_var) + ) + + geom_func_iri = IRI(value=GEOF.sfIntersects) + geom_1_exp = Expression.from_primary_expression( + primary_expression=PrimaryExpression(content=geom_lit_var) + ) + geom_2_datatype = IRI(value="http://www.opengis.net/ont/geosparql#wktLiteral") + geom_2_exp = Expression.from_primary_expression( + primary_expression=PrimaryExpression( + content=RDFLiteral(value=wkt_with_crs, datatype=geom_2_datatype) + ) + ) + arg_list = ArgList(expressions=[geom_1_exp, geom_2_exp]) + fc = FunctionCall(iri=geom_func_iri, arg_list=arg_list) + + spatial_filter = Filter(constraint=Constraint(content=fc)) + filter_gpnt = GraphPatternNotTriples(content=spatial_filter) + return filter_gpnt, tssp_list diff --git a/prez/services/query_generation/count.py b/prez/services/query_generation/count.py index f21b72a6..e1e92d39 100755 --- a/prez/services/query_generation/count.py +++ b/prez/services/query_generation/count.py @@ -40,7 +40,7 @@ class CountQuery(ConstructQuery): """ Counts focus nodes that can be retrieved for listing queries. - Default limit is 1000 and can be configured in the settings. + Default limit is 100 and can be configured in the settings. Query is of the form: CONSTRUCT { @@ -53,10 +53,10 @@ class CountQuery(ConstructQuery): SELECT ?focus_node WHERE { <<< original where clause >>> - } LIMIT 1001 + } LIMIT 101 } } - BIND(IF(?count = 1001, ">1000", STR(?count)) AS ?count_str) + BIND(IF(?count = 101, ">100", STR(?count)) AS ?count_str) } """ @@ -95,7 +95,7 @@ def __init__(self, original_subselect: SubSelect): ), ) outer_ss_ggp = GroupGraphPattern(content=outer_ss) - count_equals_1001_expr = Expression( + count_equals_limit_expr = Expression( conditional_or_expression=ConditionalOrExpression( conditional_and_expressions=[ ConditionalAndExpression( @@ -134,7 +134,7 @@ def __init__(self, original_subselect: SubSelect): ] ) ) - gt_1000_exp = Expression.from_primary_expression( + gt_limit_exp = Expression.from_primary_expression( PrimaryExpression(content=RDFLiteral(value=f">{limit}")) ) str_count_exp = Expression.from_primary_expression( @@ -150,7 +150,11 @@ def __init__(self, original_subselect: SubSelect): PrimaryExpression( content=BuiltInCall( function_name="IF", - arguments=[count_equals_1001_expr, gt_1000_exp, str_count_exp], + arguments=[ + count_equals_limit_expr, + gt_limit_exp, + str_count_exp, + ], ) ) ), diff --git a/prez/services/query_generation/cql.py b/prez/services/query_generation/cql.py index 9ec26348..0d3eb73a 100755 --- a/prez/services/query_generation/cql.py +++ b/prez/services/query_generation/cql.py @@ -1,8 +1,14 @@ +import json +from datetime import datetime +from decimal import Decimal +from pathlib import Path from typing import Generator from pyld import jsonld +from rdf2geojson.contrib.geomet.util import flatten_multi_dim +from rdf2geojson.contrib.geomet.wkt import dumps from rdflib import URIRef, Namespace -from rdflib.namespace import GEO, SH +from rdflib.namespace import GEO from sparql_grammar_pydantic import ( ArgList, FunctionCall, @@ -32,18 +38,63 @@ RDFLiteral, Filter, Constraint, + ConditionalAndExpression, + ValueLogical, + RelationalExpression, + NumericExpression, + AdditiveExpression, + MultiplicativeExpression, + UnaryExpression, + BrackettedExpression, + ConditionalOrExpression, + BooleanLiteral, ) +from prez.models.query_params import parse_datetime from prez.reference_data.cql.geo_function_mapping import ( cql_sparql_spatial_mapping, - cql_to_shapely_mapping, ) CQL = Namespace("http://www.opengis.net/doc/IS/cql2/1.0/") +# SUPPORTED_CQL_TIME_OPERATORS = { +# "t_after", +# "t_before", +# "t_equals", +# "t_disjoint", +# "t_intersects", +# } + + +# all CQL time operators +SUPPORTED_CQL_TIME_OPERATORS = { + "t_after", + "t_before", + "t_contains", + "t_disjoint", + "t_during", + "t_equals", + "t_finishedBy", + "t_finishes", + "t_intersects", + "t_meets", + "t_metBy", + "t_overlappedBy", + "t_overlaps", + "t_startedBy", + "t_starts", +} + +UNBOUNDED = "unbounded" + +relations_path = Path(__file__).parent.parent.parent / ( + "reference_data/cql/bounded_temporal_interval_relation_matrix" ".json" +) +relations = json.loads(relations_path.read_text()) + class CQLParser: - def __init__(self, cql=None, context: dict = None, cql_json: dict = None): + def __init__(self, cql=None, context: dict = None, cql_json: dict = None, crs=None): self.ggps_inner_select = None self.inner_select_gpnt_list = None self.cql: dict = cql @@ -55,6 +106,7 @@ def __init__(self, cql=None, context: dict = None, cql_json: dict = None): self.gpnt_list = [] self.tss_list = [] self.tssp_list = [] + self.crs = crs def generate_jsonld(self): combined = {"@context": self.context, **self.cql} @@ -130,6 +182,8 @@ def parse_logical_operators( yield from self._handle_spatial(operator, args, ggps) elif operator == "in": yield from self._handle_in(args, ggps) + elif operator in SUPPORTED_CQL_TIME_OPERATORS: + yield from self._handle_temporal(operator, args, ggps) else: raise NotImplementedError(f"Operator {operator} not implemented.") @@ -236,7 +290,6 @@ def _handle_like(self, args, existing_ggps=None): ) ) ggps.add_pattern(filter_gpnt) - # self._append_graph_pattern(ggps, filter_expr) yield ggps def _handle_spatial(self, operator, args, existing_ggps=None): @@ -247,9 +300,10 @@ def _handle_spatial(self, operator, args, existing_ggps=None): coordinates, geom_type = self._extract_spatial_info(coordinates_list, args) if coordinates: - wkt = self.get_wkt_from_coords(coordinates, geom_type) + wkt = get_wkt_from_coords(coordinates, geom_type) + wkt_with_crs = f"<{self.crs}> {wkt}" prop = args[0].get(str(CQL.property))[0].get("@id") - if URIRef(prop) == SH.focusNode: + if prop == "http://example.com/geometry": subject = Var(value="focus_node") else: subject = IRI(value=prop) @@ -267,7 +321,7 @@ def _handle_spatial(self, operator, args, existing_ggps=None): ) geom_2_exp = Expression.from_primary_expression( primary_expression=PrimaryExpression( - content=RDFLiteral(value=wkt, datatype=geom_2_datatype) + content=RDFLiteral(value=wkt_with_crs, datatype=geom_2_datatype) ) ) arg_list = ArgList(expressions=[geom_1_exp, geom_2_exp]) @@ -278,15 +332,6 @@ def _handle_spatial(self, operator, args, existing_ggps=None): ggps.add_pattern(filter_gpnt) yield ggps - def get_wkt_from_coords(self, coordinates, geom_type): - shapely_spatial_class = cql_to_shapely_mapping.get(geom_type) - if not shapely_spatial_class: - raise NotImplementedError( - f'Geometry Class for "{geom_type}" not found in Shapely.' - ) - wkt = shapely_spatial_class(coordinates).wkt - return wkt - def _handle_in(self, args, existing_ggps=None): self.var_counter += 1 ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() @@ -322,7 +367,6 @@ def _handle_in(self, args, existing_ggps=None): content=InlineData(data_block=DataBlock(block=ildov)) ) ggps.add_pattern(gpnt) - # self._append_graph_pattern(ggps, gpnt) yield ggps def _extract_spatial_info(self, coordinates_list, args): @@ -338,12 +382,386 @@ def _extract_spatial_info(self, coordinates_list, args): if bbox_list: geom_type = "Polygon" bbox_values = [item["@value"] for item in bbox_list] - if len(bbox_values) == 4: - coordinates = [ - (bbox_values[0], bbox_values[1]), - (bbox_values[0], bbox_values[3]), - (bbox_values[2], bbox_values[3]), - (bbox_values[2], bbox_values[1]), - (bbox_values[0], bbox_values[1]), - ] + coordinates = format_coordinates_as_wkt(bbox_values, coordinates) return coordinates, geom_type + + def _handle_temporal(self, comp_func, args, existing_ggps=None): + ggps = existing_ggps if existing_ggps is not None else GroupGraphPatternSub() + + if len(args) != 2: + raise ValueError( + f"Temporal operator {comp_func} requires exactly 2 arguments." + ) + + operands = {} + for i, arg in enumerate(args, start=1): + # check if the arg is an interval + interval_list = arg.get(str(CQL.interval)) + if interval_list: + for n, item in enumerate(interval_list): + label = "start" if n == 0 else "end" + prop = item.get(str(CQL.property)) + if prop: + self._triple_for_time_prop(ggps, i, label, prop, operands) + date_val = item.get("@value") + if date_val: + self._dt_to_rdf_literal(i, date_val, label, operands) + continue + + # handle instants - prop and date + label = "instant" + # check if the arg is a property + prop = arg.get(str(CQL.property)) + if prop: + self._triple_for_time_prop(ggps, i, label, prop, operands) + continue + + # check if the arg is a date + date = ( + arg.get(str(CQL.date)) + or arg.get(str(CQL.datetime)) + or arg.get(str(CQL.timestamp)) + ) + if date: + date_val = date[0].get("@value") + self._dt_to_rdf_literal(i, date_val, label, operands) + + gpnt = self.process_temporal_function(comp_func, operands) + + ggps.add_pattern(gpnt) + yield ggps + + def get_type_and_bound(self, operands, prefix): + """ + Get the type label and abbreviation for a temporal operand. + Options are "instant" with "I", or "interval" with "U" (unbounded) or "B" (bounded). + """ + if f"{prefix}_instant" in operands: + return "instant", "I" + elif f"{prefix}_start" in operands or f"{prefix}_end" in operands: + start_bound = "U" if operands.get(f"{prefix}_start") is UNBOUNDED else "B" + end_bound = "U" if operands.get(f"{prefix}_end") is UNBOUNDED else "B" + return "interval", start_bound + end_bound + else: + raise ValueError(f"Invalid operand for {prefix}") + + def process_temporal_function(self, comp_func, operands): + t1_type, t1_bound = self.get_type_and_bound(operands, "t1") + t2_type, t2_bound = self.get_type_and_bound(operands, "t2") + + comparison_type = f"{t1_type}_{t2_type}" + + if comparison_type not in relations[comp_func]: + raise ValueError( + f"The {comp_func} function is not applicable to {comparison_type} comparisons." + ) + + key = f"{t1_bound}_{t2_bound}" + + result = relations[comp_func][comparison_type].get(key) + + if result is True or result is False: + return create_filter_bool_gpnt(result) + elif isinstance(result, dict): + negated = relations[comp_func].get("negated", False) + conditions = result["conditions"] + logic = result.get( + "logic", "AND" + ) # Default to AND if logic is not specified + comparisons = [ + (operands[left], op, operands[right]) for left, op, right in conditions + ] + if logic == "AND": + return create_temporal_and_gpnt(comparisons) + elif logic == "OR" and negated: # for t_intersects only + return create_temporal_or_gpnt(comparisons, negated=True) + elif logic == "OR": + return create_temporal_or_gpnt(comparisons) + else: + raise ValueError(f"Unknown logic type: {logic}") + else: + raise ValueError( + f"Unexpected result type for {comp_func} {comparison_type} {key}" + ) + + def _triple_for_time_prop(self, ggps, i, label, prop, operands): + prop_uri = prop[0].get("@id") + value = IRI(value=prop_uri) + var = Var(value=f"dt_{i}_{label}") + operands[f"t{i}_{label}"] = var + self._add_triple(ggps, Var(value="focus_node"), value, var) + + def _handle_interval_list(self, all_args, comparator_args, interval_list): + for item in interval_list: + if item.get(str(CQL.property)): + prop = item.get(str(CQL.property))[0].get("@id") + comparator_args.append(IRI(value=prop)) + elif item.get("@value"): + val = item.get("@value") + # self._dt_to_rdf_literal(comparator_args, val) + dt, _ = parse_datetime(val) + comparator_args.append( + RDFLiteral( + value=dt.isoformat(), + datatype=IRI(value="http://www.w3.org/2001/XMLSchema#dateTime"), + ) + ) + all_args.append(comparator_args) + + def _dt_to_rdf_literal(self, i, dt_str, label, operands): + if dt_str == "..": + operands[f"t{i}_{label}"] = UNBOUNDED + else: + dt, _ = parse_datetime(dt_str) + operands[f"t{i}_{label}"] = RDFLiteral( + value=dt.isoformat(), + datatype=IRI(value="http://www.w3.org/2001/XMLSchema#dateTime"), + ) + + +def format_coordinates_as_wkt(bbox_values): + if len(bbox_values) == 4: + coordinates = [ + [ + [bbox_values[0], bbox_values[1]], + [bbox_values[0], bbox_values[3]], + [bbox_values[2], bbox_values[3]], + [bbox_values[2], bbox_values[1]], + [bbox_values[0], bbox_values[1]], + ] + ] + else: + if len(bbox_values) == 6: + raise NotImplementedError("XYZ bbox not yet supported.") + else: + raise ValueError(f"Invalid number of values in bbox ({len(bbox_values)}).") + return coordinates + + +def count_decimal_places(num): + return abs(Decimal(str(num)).as_tuple().exponent) + + +def find_max_decimals(coordinates): + max_decimals = 0 + flattened = flatten_multi_dim(coordinates) + for value in flattened: + if isinstance(value, (int, float)): + max_decimals = max(max_decimals, count_decimal_places(value)) + return max_decimals + + +def get_wkt_from_coords(coordinates, geom_type: str): + max_decimals = find_max_decimals([(geom_type, coordinates, None)]) + return dumps({"type": geom_type, "coordinates": coordinates}, max_decimals) + + +def create_temporal_filter_gpnt(dt: datetime, op: str) -> GraphPatternNotTriples: + if op not in ["=", "<=", ">=", "<", ">"]: + raise ValueError(f"Invalid operator: {op}") + return GraphPatternNotTriples( + content=Filter.filter_relational( + focus=PrimaryExpression( + content=Var(value="datetime"), + ), + comparators=PrimaryExpression( + content=RDFLiteral( + value=dt.isoformat(), + datatype=IRI(value="http://www.w3.org/2001/XMLSchema#dateTime"), + ) + ), + operator=op, + ) + ) + + +def create_temporal_or_gpnt( + comparisons: list[tuple[Var | RDFLiteral, str, Var | RDFLiteral]], negated=False +) -> GraphPatternNotTriples: + """ + Create a FILTER with multiple conditions joined by OR (||). + + Format: FILTER ( comp1 op1 comp2 || comp3 op2 comp4 || ... ) + + if negated: + Format: FILTER (! (comp1 op1 comp2 || comp3 op2 comp4 || ...) ) + """ + _and_expressions = [] + for left_comp, op, right_comp in comparisons: + if op not in ["=", "<=", ">=", "<", ">"]: + raise ValueError(f"Invalid operator: {op}") + _and_expressions.append( + ConditionalAndExpression( + value_logicals=[ + ValueLogical( + relational_expression=RelationalExpression( + left=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=PrimaryExpression( + content=left_comp + ) + ) + ) + ) + ), + operator=op, + right=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=PrimaryExpression( + content=right_comp + ) + ) + ) + ) + ), + ) + ) + ] + ) + ) + if not negated: + return GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BrackettedExpression( + expression=Expression( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=_and_expressions + ) + ) + ) + ) + ) + ) + else: + return GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BrackettedExpression( + expression=Expression( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=[ + ConditionalAndExpression( + value_logicals=[ + ValueLogical( + relational_expression=RelationalExpression( + left=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + operator="!", + primary_expression=PrimaryExpression( + content=BrackettedExpression( + expression=Expression( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=_and_expressions + ) + ) + ) + ), + ) + ) + ) + ) + ) + ) + ] + ) + ] + ) + ) + ) + ) + ) + ) + + +def create_filter_bool_gpnt(boolean: bool) -> GraphPatternNotTriples: + """ + For filtering out all results in scenarios where the input arguments are valid but logically determine that the + filter will filter out all results. + + generates FILTER(false) or FILTER(true) + """ + return GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BrackettedExpression( + expression=Expression.from_primary_expression( + primary_expression=PrimaryExpression( + content=BooleanLiteral(value=boolean) + ) + ) + ) + ) + ) + ) + + +def create_temporal_and_gpnt( + comparisons: list[tuple[Var | RDFLiteral, str, Var | RDFLiteral]] +) -> GraphPatternNotTriples: + """ + Create a FILTER with multiple conditions joined by AND. + + :param comparisons: List of tuples, each containing (left_comp, operator, right_comp) + :return: GraphPatternNotTriples + + Format: + FILTER ( comp1 op1 comp2 && comp3 op2 comp4 && ... ) + """ + _vl_expressions = [] + + for left_comp, op, right_comp in comparisons: + if op not in ["=", "<=", ">=", "<", ">"]: + raise ValueError(f"Invalid operator: {op}") + + _vl_expressions.append( + ValueLogical( + relational_expression=RelationalExpression( + left=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=PrimaryExpression( + content=left_comp + ) + ) + ) + ) + ), + operator=op, + right=NumericExpression( + additive_expression=AdditiveExpression( + base_expression=MultiplicativeExpression( + base_expression=UnaryExpression( + primary_expression=PrimaryExpression( + content=right_comp + ) + ) + ) + ) + ), + ) + ) + ) + + return GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BrackettedExpression( + expression=Expression( + conditional_or_expression=ConditionalOrExpression( + conditional_and_expressions=[ + ConditionalAndExpression(value_logicals=_vl_expressions) + ] + ) + ) + ) + ) + ) + ) diff --git a/prez/services/query_generation/datetime_filter.py b/prez/services/query_generation/datetime_filter.py new file mode 100644 index 00000000..30795981 --- /dev/null +++ b/prez/services/query_generation/datetime_filter.py @@ -0,0 +1,62 @@ +from datetime import datetime +from typing import List, Optional +from prez.config import settings + +from sparql_grammar_pydantic import ( + GraphPatternNotTriples, + TriplesSameSubjectPath, + Filter, + PrimaryExpression, + Var, + IRI, + RDFLiteral, +) + +from prez.models.query_params import DateTimeOrUnbounded +from prez.services.query_generation.cql import create_temporal_filter_gpnt + + +def generate_datetime_filter( + datetime_1: DateTimeOrUnbounded, datetime_2: Optional[DateTimeOrUnbounded] +) -> (GraphPatternNotTriples, List[TriplesSameSubjectPath]): + # tssp + tssp_list = [ + TriplesSameSubjectPath.from_spo( + subject=Var(value="focus_node"), + predicate=IRI(value=settings.temporal_predicate), + object=Var(value="datetime"), + ) + ] + + # gpnt + filter_gpnts = [] + if datetime_1 is None and datetime_2 is None: + # Case: No datetime filter + pass + elif isinstance(datetime_1, datetime) and datetime_2 is None: + # Case 1: Single date-time + # Example: "2018-02-12T23:20:50Z" + op = "=" + filter_gpnts.append(create_temporal_filter_gpnt(datetime_1, op)) + elif datetime_1 == ".." and isinstance(datetime_2, datetime): + # Case 2: Half-bounded interval with open start + # Examples: "../2018-03-18T12:31:12Z" or "/2018-03-18T12:31:12Z" + op = "<=" + filter_gpnts.append(create_temporal_filter_gpnt(datetime_2, op)) + elif datetime_2 == ".." and isinstance(datetime_1, datetime): + # Case 3: Half-bounded interval with open end + # Examples: "2018-02-12T00:00:00Z/.." or "2018-02-12T00:00:00Z/" + op = ">=" + filter_gpnts.append(create_temporal_filter_gpnt(datetime_1, op)) + elif isinstance(datetime_1, datetime) and isinstance(datetime_2, datetime): + # Case 4: Fully bounded interval + # Example: "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z" + dt_1_op = ">=" + dt_2_op = "<=" + filter_gpnts.append(create_temporal_filter_gpnt(datetime_1, dt_1_op)) + filter_gpnts.append(create_temporal_filter_gpnt(datetime_2, dt_2_op)) + else: + raise ValueError( + f"Invalid datetime format, datetime_1: {datetime_1}, datetime_2: {datetime_2}" + ) + return filter_gpnts, tssp_list diff --git a/prez/services/query_generation/search.py b/prez/services/query_generation/search.py index b4bbb20a..2cc0dca4 100755 --- a/prez/services/query_generation/search.py +++ b/prez/services/query_generation/search.py @@ -379,8 +379,12 @@ def order_by_direction(self): @property def limit(self): - return self.where_clause.group_graph_pattern.content.solution_modifier.limit_offset.limit_clause.limit + return ( + self.where_clause.group_graph_pattern.content.solution_modifier.limit_offset.limit_clause.limit + ) @property def offset(self): - return self.where_clause.group_graph_pattern.content.solution_modifier.limit_offset.offset_clause.offset + return ( + self.where_clause.group_graph_pattern.content.solution_modifier.limit_offset.offset_clause.offset + ) diff --git a/prez/services/query_generation/shacl.py b/prez/services/query_generation/shacl.py index d67d7825..6ce6f2ad 100755 --- a/prez/services/query_generation/shacl.py +++ b/prez/services/query_generation/shacl.py @@ -1,7 +1,16 @@ from __future__ import annotations from string import Template -from typing import List, Optional, Any, Dict, Literal as TypingLiteral, Union, Tuple +from typing import ( + List, + Optional, + Any, + Dict, + Literal as TypingLiteral, + Union, + Tuple, + Type, +) from pydantic import BaseModel from rdflib import URIRef, BNode, Graph, RDFS @@ -9,6 +18,14 @@ from rdflib.namespace import SH, RDF from rdflib.term import Node from sparql_grammar_pydantic import ( + InlineData, + DataBlock, + InlineDataOneVar, + DataBlockValue, + Filter, + Constraint, + OptionalGraphPattern, + IRIOrFunction, IRI, Var, GraphPatternNotTriples, @@ -19,14 +36,21 @@ GroupGraphPatternSub, TriplesBlock, TriplesSameSubjectPath, - InlineData, - DataBlock, - InlineDataOneVar, - DataBlockValue, - Filter, - Constraint, - OptionalGraphPattern, - IRIOrFunction, + PropertyListPathNotEmpty, + VerbPath, + SG_Path, + PathAlternative, + ObjectListPath, + ObjectPath, + GraphNodePath, + VarOrTerm, + GraphTerm, + GroupOrUnionGraphPattern, + PathElt, + PathEltOrInverse, + PathPrimary, + PathSequence, + PathMod, ) from prez.reference_data.prez_ns import ONT, SHEXT @@ -92,8 +116,9 @@ def from_graph(self): # TODO this can be a SPARQL select against the system gra kind=self.kind, focus_node=self.focus_node, path_nodes=self.path_nodes, + shape_number=i, ) - for ps_uri in self.propertyShapesURIs + for i, ps_uri in enumerate(self.propertyShapesURIs) ] self.hierarchy_level = next( self.graph.objects(self.uri, ONT.hierarchyLevel), None @@ -247,7 +272,9 @@ class PropertyShape(Shape): kind: TypingLiteral["endpoint", "profile"] focus_node: Union[IRI, Var] # inputs - property_paths: Optional[List[PropertyPath]] = None + shape_number: int = 0 + and_property_paths: Optional[List[PropertyPath]] = None + union_property_paths: Optional[List[PropertyPath]] = None or_klasses: Optional[List[URIRef]] = None # outputs grammar: Optional[GroupGraphPatternSub] = None @@ -271,7 +298,8 @@ def maxCount(self): return int(maxc) def from_graph(self): - self.property_paths = [] + self.and_property_paths = [] + self.union_property_paths = [] _single_class = next(self.graph.objects(self.uri, SH["class"]), None) if _single_class: self.or_klasses = [URIRef(_single_class)] @@ -287,177 +315,146 @@ def from_graph(self): for pp in pps: self._process_property_path(pp) # get the longest property path first - for endpoints this will be the path any path_nodes apply to - self.property_paths = sorted( - self.property_paths, key=lambda x: len(x), reverse=True + self.and_property_paths = sorted( + self.and_property_paths, key=lambda x: len(x), reverse=True ) - def _process_property_path(self, pp): + def _process_property_path(self, pp, union: bool = False): if isinstance(pp, URIRef): - self.property_paths.append(Path(value=pp)) + self._add_path(Path(value=pp), union) elif isinstance(pp, BNode): - pred_objects_gen = self.graph.predicate_objects(subject=pp) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) + pred_objects = list(self.graph.predicate_objects(subject=pp)) + if not pred_objects: + return + + bn_pred, bn_obj = pred_objects[0] + if bn_obj == SH.union: - union_list = list(Collection(self.graph, pp)) - if union_list != [SH.union]: - union_list_bnode = union_list[1] - union_items = list(Collection(self.graph, union_list_bnode)) - for item in union_items: - self._process_property_path(item) - elif bn_pred == SH.inversePath: - self.property_paths.append(InversePath(value=bn_obj)) - # elif bn_pred == SH.alternativePath: - # predicates.extend(list(Collection(self.profile_graph, bn_obj))) + self._process_union(pp, union) + elif bn_pred in PRED_TO_PATH_CLASS: + path_class = PRED_TO_PATH_CLASS[bn_pred] + self._add_path(path_class(value=bn_obj), union) else: # sequence paths - paths = list(Collection(self.graph, pp)) - sp_list = [] - for path in paths: - if isinstance(path, BNode): - pred_objects_gen = self.graph.predicate_objects(subject=path) - bn_pred, bn_obj = next(pred_objects_gen, (None, None)) - if bn_pred == SH.inversePath: - sp_list.append(InversePath(value=bn_obj)) - elif isinstance(path, URIRef): - sp_list.append(Path(value=path)) - self.property_paths.append(SequencePath(value=sp_list)) + self._process_sequence(pp, union) + + def _process_union(self, pp, union: bool): + union_list = list(Collection(self.graph, pp)) + if union_list != [SH.union]: + union_list_bnode = union_list[1] + union_items = list(Collection(self.graph, union_list_bnode)) + for item in union_items: + self._process_property_path(item, True) + + def _process_sequence(self, pp, union: bool): + paths = list(Collection(self.graph, pp)) + sp_list = [] + for path in paths: + if isinstance(path, BNode): + pred_objects = list(self.graph.predicate_objects(subject=path)) + if pred_objects: + bn_pred, bn_obj = pred_objects[0] + if bn_pred in PRED_TO_PATH_CLASS: + path_class = PRED_TO_PATH_CLASS[bn_pred] + sp_list.append(path_class(value=bn_obj)) + elif isinstance(path, URIRef): + sp_list.append(Path(value=path)) + self._add_path(SequencePath(value=sp_list), union) + + def _add_path(self, path: PropertyPath, union: bool): + if union: + self.union_property_paths.append(path) + else: + self.and_property_paths.append(path) def to_grammar(self): # label nodes in the inner select and profile part of the query differently for clarity. if self.kind == "endpoint": path_or_prop = "path" elif self.kind == "profile": - path_or_prop = "prof" + path_or_prop = f"prof_{self.shape_number + 1}" # set up the path nodes - either from supplied values or set as variables - total_individual_nodes = sum([len(i) for i in self.property_paths]) + total_individual_nodes = sum([len(i) for i in self.and_property_paths]) for i in range(total_individual_nodes): path_node_str = f"{path_or_prop}_node_{i + 1}" if path_node_str not in self.path_nodes: self.path_nodes[path_node_str] = Var(value=path_node_str) self.tssp_list = [] - len_pp = max([len(i) for i in self.property_paths]) - # sh:class applies to the end of sequence paths - if f"{path_or_prop}_node_{len_pp}" in self.path_nodes: - path_node_term = self.path_nodes[f"{path_or_prop}_node_{len_pp}"] - else: - path_node_term = Var(value=f"{path_or_prop}_node_{len_pp}") - - # useful for determining which endpoint property shape should be used when a request comes in on endpoint - self.classes_at_len[f"{path_or_prop}_node_{len_pp}"] = self.or_klasses - - if self.or_klasses: - if len(self.or_klasses) == 1: - self.add_triple_to_tss_and_tssp( - ( - path_node_term, - IRI(value=RDF.type), - IRI(value=self.or_klasses[0]), - ) - ) + if path_or_prop == "path": + len_pp = max([len(i) for i in self.and_property_paths]) + # sh:class applies to the end of sequence paths + if f"{path_or_prop}_node_{len_pp}" in self.path_nodes: + path_node_term = self.path_nodes[f"{path_or_prop}_node_{len_pp}"] else: - self.add_triple_to_tss_and_tssp( - ( - path_node_term, - IRI(value=RDF.type), - Var(value=f"{path_or_prop}_node_classes_{len_pp}"), - ) - ) - dbvs = [ - DataBlockValue(value=IRI(value=klass)) for klass in self.or_klasses - ] - self.gpnt_list.append( - GraphPatternNotTriples( - content=InlineData( - data_block=DataBlock( - block=InlineDataOneVar( - variable=Var( - value=f"{path_or_prop}_node_classes_{len_pp}" - ), - datablockvalues=dbvs, - ) - ) - ) - ) - ) + path_node_term = Var(value=f"{path_or_prop}_node_{len_pp}") - if self.property_paths: - i = 0 - for property_path in self.property_paths: - if f"{path_or_prop}_node_{i + 1}" in self.path_nodes: - path_node_1 = self.path_nodes[f"{path_or_prop}_node_{i + 1}"] - else: - path_node_1 = Var(value=f"{path_or_prop}_node_{i + 1}") - # for sequence paths up to length two: - if f"{path_or_prop}_node_{i + 2}" in self.path_nodes: - path_node_2 = self.path_nodes[f"{path_or_prop}_node_{i + 2}"] - else: - path_node_2 = Var(value=f"{path_or_prop}_node_{i + 2}") - - if isinstance(property_path, Path): - if property_path.value == SHEXT.allPredicateValues: - pred = Var(value="preds") - obj = Var(value="vals") - else: - pred = IRI(value=property_path.value) - obj = path_node_1 - # vanilla property path + # useful for determining which endpoint property shape should be used when a request comes in on endpoint + self.classes_at_len[f"{path_or_prop}_node_{len_pp}"] = self.or_klasses + + if self.or_klasses: + if len(self.or_klasses) == 1: self.add_triple_to_tss_and_tssp( ( - self.focus_node, - pred, - obj, + path_node_term, + IRI(value=RDF.type), + IRI(value=self.or_klasses[0]), ) ) - i += 1 - - elif isinstance(property_path, InversePath): + else: self.add_triple_to_tss_and_tssp( ( - path_node_1, - IRI(value=property_path.value), - self.focus_node, + path_node_term, + IRI(value=RDF.type), + Var(value=f"{path_or_prop}_node_classes_{len_pp}"), ) ) - i += 1 - - elif isinstance(property_path, SequencePath): - for j, path in enumerate(property_path.value): - if isinstance(path, Path): - if j == 0: - self.add_triple_to_tss_and_tssp( - ( - self.focus_node, - IRI(value=path.value), - path_node_1, + dbvs = [ + DataBlockValue(value=IRI(value=klass)) + for klass in self.or_klasses + ] + self.gpnt_list.append( + GraphPatternNotTriples( + content=InlineData( + data_block=DataBlock( + block=InlineDataOneVar( + variable=Var( + value=f"{path_or_prop}_node_classes_{len_pp}" + ), + datablockvalues=dbvs, ) ) - else: - self.add_triple_to_tss_and_tssp( - ( - path_node_1, - IRI(value=path.value), - path_node_2, - ) - ) - elif isinstance(path, InversePath): - if j == 0: - self.add_triple_to_tss_and_tssp( - ( - path_node_1, - IRI(value=path.value), - self.focus_node, - ) - ) - else: - self.add_triple_to_tss_and_tssp( - ( - path_node_2, - IRI(value=path.value), - path_node_1, - ) - ) - i += len(property_path) + ) + ) + ) + + pp_i = 0 + tssp_list_for_and = [] + tssp_list_for_union = [] + if self.and_property_paths: + self.process_property_paths( + self.and_property_paths, path_or_prop, tssp_list_for_and, pp_i + ) + for inner_list in tssp_list_for_and: + self.tssp_list.extend(inner_list) + if self.union_property_paths: + self.process_property_paths( + self.union_property_paths, path_or_prop, tssp_list_for_union, pp_i + ) + ggp_list = [] + for inner_list in tssp_list_for_union: + ggp_list.append( + GroupGraphPattern( + content=GroupGraphPatternSub( + triples_block=TriplesBlock.from_tssp_list(inner_list) + ) + ) + ) + self.gpnt_list.append( + GraphPatternNotTriples( + content=GroupOrUnionGraphPattern(group_graph_patterns=ggp_list) + ) + ) if self.minCount == 0: # triples = self.tssp_list.copy() @@ -477,7 +474,7 @@ def to_grammar(self): self.tssp_list = [] if self.maxCount == 0: - for p in self.property_paths: + for p in self.and_property_paths: assert isinstance(p, Path) # only support filtering direct predicates # reset the triples list @@ -491,7 +488,7 @@ def to_grammar(self): values = [ PrimaryExpression(content=IRIOrFunction(iri=IRI(value=p.value))) - for p in self.property_paths + for p in self.and_property_paths ] gpnt = GraphPatternNotTriples( content=Filter.filter_relational( @@ -502,6 +499,130 @@ def to_grammar(self): ) self.gpnt_list.append(gpnt) + def process_property_paths(self, property_paths, path_or_prop, tssp_list, pp_i): + for property_path in property_paths: + if f"{path_or_prop}_node_{pp_i + 1}" in self.path_nodes: + path_node_1 = self.path_nodes[f"{path_or_prop}_node_{pp_i + 1}"] + else: + path_node_1 = Var(value=f"{path_or_prop}_node_{pp_i + 1}") + + if f"{path_or_prop}_node_{pp_i + 2}" in self.path_nodes: + path_node_2 = self.path_nodes[f"{path_or_prop}_node_{pp_i + 2}"] + else: + path_node_2 = Var(value=f"{path_or_prop}_node_{pp_i + 2}") + + current_tssp = [] + + if isinstance(property_path, Path): + if property_path.value == SHEXT.allPredicateValues: + pred = Var(value="preds") + obj = Var(value="vals") + else: + pred = IRI(value=property_path.value) + obj = path_node_1 + triple = (self.focus_node, pred, obj) + self.tss_list.append(TriplesSameSubject.from_spo(*triple)) + current_tssp.append(TriplesSameSubjectPath.from_spo(*triple)) + pp_i += 1 + + elif isinstance(property_path, InversePath): + triple = (path_node_1, IRI(value=property_path.value), self.focus_node) + self.tss_list.append(TriplesSameSubject.from_spo(*triple)) + current_tssp.append(TriplesSameSubjectPath.from_spo(*triple)) + pp_i += 1 + + elif isinstance( + property_path, Union[ZeroOrMorePath, OneOrMorePath, ZeroOrOnePath] + ): + triple = (self.focus_node, IRI(value=property_path.value), path_node_1) + self.tss_list.append(TriplesSameSubject.from_spo(*triple)) + self.tssp_list.append( + _tssp_for_pathmods( + self.focus_node, + IRI(value=property_path.value), + path_node_1, + property_path.operand, + ) + ) + pp_i += 1 + + elif isinstance(property_path, SequencePath): + for j, path in enumerate(property_path.value): + if isinstance(path, Path): + if j == 0: + triple = ( + self.focus_node, + IRI(value=path.value), + path_node_1, + ) + else: + triple = (path_node_1, IRI(value=path.value), path_node_2) + elif isinstance(path, InversePath): + if j == 0: + triple = ( + path_node_1, + IRI(value=path.value), + self.focus_node, + ) + else: + triple = (path_node_2, IRI(value=path.value), path_node_1) + self.tss_list.append(TriplesSameSubject.from_spo(*triple)) + current_tssp.append(TriplesSameSubjectPath.from_spo(*triple)) + pp_i += len(property_path.value) + + if current_tssp: + tssp_list.append(current_tssp) + + return pp_i + + +def _tssp_for_pathmods(focus_node, pred, obj, pathmod): + """ + Creates path modifier TriplesSameSubjectPath objects. + """ + if isinstance(focus_node, IRI): + focus_node = GraphTerm(value=focus_node) + return TriplesSameSubjectPath( + content=( + VarOrTerm(varorterm=focus_node), + PropertyListPathNotEmpty( + first_pair=( + VerbPath( + path=SG_Path( + path_alternative=PathAlternative( + sequence_paths=[ + PathSequence( + list_path_elt_or_inverse=[ + PathEltOrInverse( + path_elt=PathElt( + path_primary=PathPrimary( + value=pred, + ), + path_mod=PathMod(pathmod=pathmod), + ) + ) + ] + ) + ] + ) + ) + ), + ObjectListPath( + object_paths=[ + ObjectPath( + graph_node_path=GraphNodePath( + varorterm_or_triplesnodepath=VarOrTerm( + varorterm=obj + ) + ) + ) + ] + ), + ) + ), + ) + ) + class PropertyPath(BaseModel): class Config: @@ -509,23 +630,51 @@ class Config: uri: Optional[URIRef] = None + def __len__(self): + return 1 # Default length for all PropertyPath subclasses + class Path(PropertyPath): value: URIRef - def __len__(self): - return 1 - class SequencePath(PropertyPath): value: List[PropertyPath] def __len__(self): - return len(self.value) + return len(self.value) # Override to return the length of the sequence class InversePath(PropertyPath): value: URIRef + +class ZeroOrMorePath(PropertyPath): + value: URIRef + operand: str = "*" + + +class OneOrMorePath(PropertyPath): + value: URIRef + operand: str = "+" + + +class ZeroOrOnePath(PropertyPath): + value: URIRef + operand: str = "?" + + +class AlternativePath(PropertyPath): + value: List[PropertyPath] + def __len__(self): - return 1 + return len(self.value) + + +PRED_TO_PATH_CLASS: Dict[URIRef, Type[PropertyPath]] = { + SH.inversePath: InversePath, + SH.zeroOrMorePath: ZeroOrMorePath, + SH.oneOrMorePath: OneOrMorePath, + SH.zeroOrOnePath: ZeroOrOnePath, + SH.alternativePath: AlternativePath, +} diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index b75d5dc0..d83ee027 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -25,8 +25,10 @@ ) from prez.models.query_params import QueryParams +from prez.services.query_generation.bbox_filter import generate_bbox_filter from prez.services.query_generation.concept_hierarchy import ConceptHierarchyQuery from prez.services.query_generation.cql import CQLParser +from prez.services.query_generation.datetime_filter import generate_datetime_filter from prez.services.query_generation.search import SearchQueryRegex from prez.services.query_generation.shacl import NodeShape @@ -176,9 +178,12 @@ def merge_listing_query_grammar_inputs( query_params: Optional[QueryParams] = None, ) -> dict: page = query_params.page - per_page = query_params.per_page + limit = query_params.limit order_by = query_params.order_by order_by_direction = query_params.order_by_direction + bbox = query_params.bbox + datetime = query_params.datetime + filter_crs = query_params.filter_crs """ Merges the inputs for a query grammar. """ @@ -193,7 +198,7 @@ def merge_listing_query_grammar_inputs( "order_by_direction": order_by_direction, } - limit = int(per_page) + limit = int(limit) offset = limit * (int(page) - 1) kwargs["limit"] = limit kwargs["offset"] = offset @@ -237,4 +242,14 @@ def merge_listing_query_grammar_inputs( kwargs["inner_select_tssp_list"].extend(endpoint_nodeshape.tssp_list) kwargs["inner_select_gpnt"].extend(endpoint_nodeshape.gpnt_list) + if bbox: + gpnt, tssp_list = generate_bbox_filter(bbox, filter_crs) + kwargs["inner_select_gpnt"].append(gpnt) + kwargs["inner_select_tssp_list"].extend(tssp_list) + + if datetime: + gpnt_list, tssp_list = generate_datetime_filter(*datetime) + kwargs["inner_select_gpnt"].extend(gpnt_list) + kwargs["inner_select_tssp_list"].extend(tssp_list) + return kwargs diff --git a/prez/url.py b/prez/url.py deleted file mode 100755 index 0635d59a..00000000 --- a/prez/url.py +++ /dev/null @@ -1,38 +0,0 @@ -import math - - -def order_urls(order: list[str], values: list[str]): - """Order a set of URL values based on a preferred order. - - :param order: The preferred order - used to see if the URL values match it with a str.startswith check. - :param values: The URL values. - - >>> preferred_order = [ - >>> "/v/vocab", - >>> "/v/collection", - >>> "/s/catalogs", - >>> "/c/catalogs" - >>> ] - >>> urls = [ - >>> "/s/catalogs/blah", - >>> "/object/blah", - >>> "/v/collection/123", - >>> "/c/catalogs/321", - >>> "/v/vocab/some-scheme" - >>> ] - >>> sorted_urls = order_urls(preferred_order, urls) - >>> assert sorted_urls == [ - >>> "/v/vocab/some-scheme", - >>> "/v/collection/123", - >>> "/s/catalogs/blah", - >>> "/c/catalogs/321", - >>> "/object/blah" - >>> ] - """ - order_dict = {url: i for i, url in enumerate(order)} - # Set values matched with None to infinity. - order_dict.update({None: math.inf}) - return sorted( - values, - key=lambda url: order_dict[next((o for o in order if url.startswith(o)), None)], - ) diff --git a/pyproject.toml b/pyproject.toml index c7e3aee7..ebeb2b1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,19 +20,23 @@ include = [ [tool.poetry.dependencies] python = "^3.11" -uvicorn = "^0.30.0" +uvicorn = {version = "^0.30.0", optional = true } httpx = "^0.27.0" rdflib = "^7.0.0" toml = "^0.10.2" -fastapi = "^0.111.0" +fastapi = "^0.114.0" jinja2 = "^3.1.2" oxrdflib = "^0.3.6" -pydantic = "^2.7.2" -pydantic-settings = "^2.2.0" +pydantic = "^2.9.1" +pydantic-settings = "^2.5.0" pyld = "^2.0.4" -shapely = "^2.0.3" aiocache = "^0.12.2" sparql-grammar-pydantic = "^0.1.2" +rdf2geojson = {git = "https://github.com/ashleysommer/rdf2geojson.git", rev = "v0.2.1"} +python-multipart = "^0.0.9" + +[tool.poetry.extras] +server = ["uvicorn"] [tool.poetry.group.dev.dependencies] pytest = "^8.2.1" diff --git a/test_data/bnode_depth-2-2.ttl b/test_data/bnode_depth-2-2.ttl index daf7b4e3..4d31dbf4 100644 --- a/test_data/bnode_depth-2-2.ttl +++ b/test_data/bnode_depth-2-2.ttl @@ -30,6 +30,10 @@ PREFIX xsd: ; skos:historyNote "The scheme for Australian physiographic units is derived from: Pain, C, Gregory, L, Wilson, P and McKenzie, N (2011), The physiographic regions of Australia – Explanatory notes 2011; Australian Collaborative Land Evaluation Program and National Committee on Soil and Terrain (dataset available at https://www.asris.csiro.au/themes/PhysioRegions.html). The Geological Survey of Western Australia's regolith classification system is available at https://dmpbookshop.eruditetechnologies.com.au/product/revised-classification-system-for-regolith-in-western-australia-and-the-recommended-approach-to-regolith-mapping.do (2024 update including physiographic units is in progress)."@en ; skos:prefLabel "Australian physiographic units"@en ; + prov:qualifiedDerivation + [ + prov:hadRole ; + ] ; prov:qualifiedAttribution [ prov:agent [ diff --git a/test_data/catprez.ttl b/test_data/catprez.ttl index 47f17372..ab8584b5 100644 --- a/test_data/catprez.ttl +++ b/test_data/catprez.ttl @@ -6,11 +6,11 @@ PREFIX rdfs: ex:CatalogOne a dcat:Catalog ; rdfs:label "Catalog One" ; - dcterms:hasPart ex:DCATResource ; + dcterms:hasPart ex:DCATDataset ; ex:property "Catalog property" ; . -ex:DCATResource a dcat:Resource ; +ex:DCATDataset a dcat:Dataset ; rdfs:label "DCAT Resource" ; dcterms:hasPart ex:RDFResource ; ex:property "DCAT Resource property" @@ -23,11 +23,11 @@ ex:RDFResource a rdf:Resource ; ex:CatalogTwo a dcat:Catalog ; rdfs:label "amazing catalog" ; - dcterms:hasPart ex:DCATResourceTwo ; + dcterms:hasPart ex:DCATDatasetTwo ; ex:property "complete" ; . -ex:DCATResourceTwo a dcat:Resource ; +ex:DCATDatasetTwo a dcat:Dataset ; rdfs:label "rightful" ; dcterms:hasPart ex:RDFResourceTwo ; ex:property "exposure" diff --git a/test_data/cql/expected_generated_queries/additional_temporal_disjoint_instant.rq b/test_data/cql/expected_generated_queries/additional_temporal_disjoint_instant.rq new file mode 100644 index 00000000..3f61c22d --- /dev/null +++ b/test_data/cql/expected_generated_queries/additional_temporal_disjoint_instant.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (?dt_1_instant > "2012-08-10T05:30:00+00:00"^^ || ?dt_1_instant < "2012-08-10T05:30:00+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/additional_temporal_during_intervals.rq b/test_data/cql/expected_generated_queries/additional_temporal_during_intervals.rq new file mode 100644 index 00000000..c3d32c5d --- /dev/null +++ b/test_data/cql/expected_generated_queries/additional_temporal_during_intervals.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start > "2017-06-10T07:30:00+00:00"^^ && ?dt_1_end < "2017-06-11T10:30:00+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/clause7_12.rq b/test_data/cql/expected_generated_queries/clause7_12.rq new file mode 100644 index 00000000..dd4a5abe --- /dev/null +++ b/test_data/cql/expected_generated_queries/clause7_12.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (! (?dt_1_instant > "1969-07-24T16:50:35+00:00"^^ || ?dt_1_instant < "1969-07-16T05:32:00+00:00"^^)) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/clause7_13.rq b/test_data/cql/expected_generated_queries/clause7_13.rq new file mode 100644 index 00000000..a1ce5696 --- /dev/null +++ b/test_data/cql/expected_generated_queries/clause7_13.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start > "1969-07-16T13:32:00+00:00"^^ && ?dt_1_end < "1969-07-24T16:50:35+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/clause7_17.rq b/test_data/cql/expected_generated_queries/clause7_17.rq new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/test_data/cql/expected_generated_queries/clause7_17.rq @@ -0,0 +1 @@ + diff --git a/test_data/cql/expected_generated_queries/example20.rq b/test_data/cql/expected_generated_queries/example20.rq new file mode 100644 index 00000000..ce6a0ae3 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example20.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (?dt_1_instant < "2015-01-01T00:00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example21.rq b/test_data/cql/expected_generated_queries/example21.rq new file mode 100644 index 00000000..1ef93f98 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example21.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (?dt_1_instant > "2012-06-05T00:00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example22.rq b/test_data/cql/expected_generated_queries/example22.rq new file mode 100644 index 00000000..c3d32c5d --- /dev/null +++ b/test_data/cql/expected_generated_queries/example22.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start > "2017-06-10T07:30:00+00:00"^^ && ?dt_1_end < "2017-06-11T10:30:00+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example27.rq b/test_data/cql/expected_generated_queries/example27.rq new file mode 100644 index 00000000..373d83d4 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example27.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?datetime +} +WHERE { +?focus_node ?datetime +FILTER (?datetime > "2012-06-05T00:00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example53.rq b/test_data/cql/expected_generated_queries/example53.rq new file mode 100644 index 00000000..4f1ae886 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example53.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (?dt_1_instant > "2010-02-10T00:00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example54.rq b/test_data/cql/expected_generated_queries/example54.rq new file mode 100644 index 00000000..8d1ae435 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example54.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (?dt_1_instant < "2012-08-10T05:30:00+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example55.rq b/test_data/cql/expected_generated_queries/example55.rq new file mode 100644 index 00000000..1c71d701 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example55.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start +} +WHERE { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start + +FILTER ("2000-01-01T00:00:00+00:00"^^ < ?dt_2_start && "2005-01-10T01:01:01.393216+00:00"^^ > ?dt_2_end) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example56.rq b/test_data/cql/expected_generated_queries/example56.rq new file mode 100644 index 00000000..4cf45fcc --- /dev/null +++ b/test_data/cql/expected_generated_queries/example56.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start +} +WHERE { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start + +FILTER ("2005-01-10T01:01:01.393216+00:00"^^ < ?dt_2_start) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example57.rq b/test_data/cql/expected_generated_queries/example57.rq new file mode 100644 index 00000000..1e25ab9c --- /dev/null +++ b/test_data/cql/expected_generated_queries/example57.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start > "2005-01-10T00:00:00"^^ && ?dt_1_end < "2010-02-10T00:00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example58.rq b/test_data/cql/expected_generated_queries/example58.rq new file mode 100644 index 00000000..f12a8efc --- /dev/null +++ b/test_data/cql/expected_generated_queries/example58.rq @@ -0,0 +1,7 @@ +CONSTRUCT { +?focus_node ?dt_1_instant +} +WHERE { +?focus_node ?dt_1_instant +FILTER (?dt_1_instant = "1851-04-29T00:00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example59.rq b/test_data/cql/expected_generated_queries/example59.rq new file mode 100644 index 00000000..0990ae97 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example59.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start < "1991-10-07T08:21:06.393262+00:00"^^ && ?dt_1_end = "2010-02-10T05:29:20.073225+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example60.rq b/test_data/cql/expected_generated_queries/example60.rq new file mode 100644 index 00000000..e601b608 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example60.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start > "1991-10-07T00:00:00"^^ && ?dt_1_end = "2010-02-10T05:29:20.073225+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example61.rq b/test_data/cql/expected_generated_queries/example61.rq new file mode 100644 index 00000000..e8ff1c78 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example61.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (! (?dt_1_end < "1991-10-07T08:21:06.393262+00:00"^^ || ?dt_1_start > "2010-02-10T05:29:20.073225+00:00"^^)) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example62.rq b/test_data/cql/expected_generated_queries/example62.rq new file mode 100644 index 00000000..f9fb02ce --- /dev/null +++ b/test_data/cql/expected_generated_queries/example62.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start +} +WHERE { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start + +FILTER ("2010-02-10T00:00:00"^^ = ?dt_2_start) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example63.rq b/test_data/cql/expected_generated_queries/example63.rq new file mode 100644 index 00000000..0fd6c765 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example63.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start +} +WHERE { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start + +FILTER ("2010-02-10T05:29:20.073225+00:00"^^ = ?dt_2_end) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example64.rq b/test_data/cql/expected_generated_queries/example64.rq new file mode 100644 index 00000000..d709e1bb --- /dev/null +++ b/test_data/cql/expected_generated_queries/example64.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start +} +WHERE { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start + +FILTER ("1991-10-07T08:21:06.393262+00:00"^^ > ?dt_2_start && "1991-10-07T08:21:06.393262+00:00"^^ < ?dt_2_end && "2010-02-10T05:29:20.073225+00:00"^^ > ?dt_2_end) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example65.rq b/test_data/cql/expected_generated_queries/example65.rq new file mode 100644 index 00000000..5801e893 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example65.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start < "1991-10-07T08:21:06.393262+00:00"^^ && ?dt_1_end > "1991-10-07T08:21:06.393262+00:00"^^ && ?dt_1_end < "1992-10-09T08:08:08.393473+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example66.rq b/test_data/cql/expected_generated_queries/example66.rq new file mode 100644 index 00000000..b5d0c198 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example66.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start +} +WHERE { +?focus_node ?dt_2_end . +?focus_node ?dt_2_start + +FILTER ("1991-10-07T08:21:06.393262+00:00"^^ = ?dt_2_start && "2010-02-10T05:29:20.073225+00:00"^^ > ?dt_2_end) +} \ No newline at end of file diff --git a/test_data/cql/expected_generated_queries/example67.rq b/test_data/cql/expected_generated_queries/example67.rq new file mode 100644 index 00000000..eca86355 --- /dev/null +++ b/test_data/cql/expected_generated_queries/example67.rq @@ -0,0 +1,10 @@ +CONSTRUCT { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start +} +WHERE { +?focus_node ?dt_1_end . +?focus_node ?dt_1_start + +FILTER (?dt_1_start = "1991-10-07T08:21:06.393262+00:00"^^) +} \ No newline at end of file diff --git a/test_data/cql/input/additional_temporal_disjoint_instant.json b/test_data/cql/input/additional_temporal_disjoint_instant.json new file mode 100644 index 00000000..b3e0c8fb --- /dev/null +++ b/test_data/cql/input/additional_temporal_disjoint_instant.json @@ -0,0 +1,7 @@ +{ + "op": "t_disjoint", + "args": [ + { "property": "updated_at" }, + { "timestamp": "2012-08-10T05:30:00Z" } + ] +} diff --git a/test_data/cql/input/additional_temporal_during_intervals.json b/test_data/cql/input/additional_temporal_during_intervals.json new file mode 100644 index 00000000..34033d15 --- /dev/null +++ b/test_data/cql/input/additional_temporal_during_intervals.json @@ -0,0 +1,7 @@ +{ + "op": "t_during", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "2017-06-10T07:30:00Z", "2017-06-11T10:30:00Z" ] } + ] +} diff --git a/test_data/cql/input/additional_temporal_intersects_instant.json b/test_data/cql/input/additional_temporal_intersects_instant.json new file mode 100644 index 00000000..b3e0c8fb --- /dev/null +++ b/test_data/cql/input/additional_temporal_intersects_instant.json @@ -0,0 +1,7 @@ +{ + "op": "t_disjoint", + "args": [ + { "property": "updated_at" }, + { "timestamp": "2012-08-10T05:30:00Z" } + ] +} diff --git a/test_data/cql/input/clause6_01.json b/test_data/cql/input/clause6_01.json new file mode 100644 index 00000000..ca73c3d2 --- /dev/null +++ b/test_data/cql/input/clause6_01.json @@ -0,0 +1 @@ +{ "op": "avg", "args": [ { "property": "windSpeed" } ] } diff --git a/test_data/cql/input/clause6_02a.json b/test_data/cql/input/clause6_02a.json new file mode 100644 index 00000000..f72df612 --- /dev/null +++ b/test_data/cql/input/clause6_02a.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "property": "city" }, + "Toronto" + ] +} diff --git a/test_data/cql/input/clause6_02b.json b/test_data/cql/input/clause6_02b.json new file mode 100644 index 00000000..afcfda5f --- /dev/null +++ b/test_data/cql/input/clause6_02b.json @@ -0,0 +1,10 @@ +{ + "op": "<", + "args": [ + { + "op": "avg", + "args": [ { "property": "windSpeed" } ] + }, + 4 + ] +} diff --git a/test_data/cql/input/clause6_02c.json b/test_data/cql/input/clause6_02c.json new file mode 100644 index 00000000..8665ea81 --- /dev/null +++ b/test_data/cql/input/clause6_02c.json @@ -0,0 +1,13 @@ +{ + "op": ">", + "args": [ + { + "op": "-", + "args": [ + { "property": "balance" }, + 150.0 + ] + }, + 0 + ] +} diff --git a/test_data/cql/input/clause6_02d.json b/test_data/cql/input/clause6_02d.json new file mode 100644 index 00000000..965a47a1 --- /dev/null +++ b/test_data/cql/input/clause6_02d.json @@ -0,0 +1,7 @@ +{ + "op": ">=", + "args": [ + { "property": "updated" }, + { "date": "1970-01-01" } + ] +} diff --git a/test_data/cql/input/clause6_03.json b/test_data/cql/input/clause6_03.json new file mode 100644 index 00000000..1196a55e --- /dev/null +++ b/test_data/cql/input/clause6_03.json @@ -0,0 +1,9 @@ +{ + "op": "not", + "args": [ + { + "op": "isNull", + "args": [ { "property": "geometry" } ] + } + ] +} diff --git a/test_data/cql/input/clause7_01.json b/test_data/cql/input/clause7_01.json new file mode 100644 index 00000000..7c3fba5a --- /dev/null +++ b/test_data/cql/input/clause7_01.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "property": "name" }, + "Smith%" + ] +} diff --git a/test_data/cql/input/clause7_02.json b/test_data/cql/input/clause7_02.json new file mode 100644 index 00000000..07666b42 --- /dev/null +++ b/test_data/cql/input/clause7_02.json @@ -0,0 +1,8 @@ +{ + "op": "between", + "args": [ + { "property": "depth" }, + 100.0, + 150.0 + ] +} diff --git a/test_data/cql/input/clause7_03a.json b/test_data/cql/input/clause7_03a.json new file mode 100644 index 00000000..78d2a35d --- /dev/null +++ b/test_data/cql/input/clause7_03a.json @@ -0,0 +1,7 @@ +{ + "op": "in", + "args": [ + { "property": "cityName" }, + [ "Toronto", "Frankfurt", "Tokyo", "New York" ] + ] +} diff --git a/test_data/cql/input/clause7_03b.json b/test_data/cql/input/clause7_03b.json new file mode 100644 index 00000000..1ac740a9 --- /dev/null +++ b/test_data/cql/input/clause7_03b.json @@ -0,0 +1,12 @@ +{ + "op": "not", + "args": [ + { + "op": "in", + "args": [ + { "property": "category" }, + [ 1, 2, 3, 4 ] + ] + } + ] +} diff --git a/test_data/cql/input/clause7_04.json b/test_data/cql/input/clause7_04.json new file mode 100644 index 00000000..0d0370dc --- /dev/null +++ b/test_data/cql/input/clause7_04.json @@ -0,0 +1,13 @@ +{ + "op": "in", + "args": [ + { + "op": "casei", + "args": [ { "property": "road_class" } ] + }, + [ + { "op": "casei", "args": [ "Οδος" ] }, + { "op": "casei", "args": [ "Straße" ] } + ] + ] +} diff --git a/test_data/cql/input/clause7_05.json b/test_data/cql/input/clause7_05.json new file mode 100644 index 00000000..6071fb47 --- /dev/null +++ b/test_data/cql/input/clause7_05.json @@ -0,0 +1,13 @@ +{ + "op": "=", + "args": [ + { + "op": "accenti", + "args": [ { "property": "etat_vol" } ] + }, + { + "op": "accenti", + "args": [ "débárquér" ] + } + ] +} diff --git a/test_data/cql/input/clause7_07.json b/test_data/cql/input/clause7_07.json new file mode 100644 index 00000000..dd865c34 --- /dev/null +++ b/test_data/cql/input/clause7_07.json @@ -0,0 +1,10 @@ +{ + "op": "s_intersects", + "args": [ + { "property": "geometry" }, + { + "type": "Point", + "coordinates": [ 36.319836, 32.288087 ] + } + ] +} diff --git a/test_data/cql/input/clause7_10.json b/test_data/cql/input/clause7_10.json new file mode 100644 index 00000000..335b9f30 --- /dev/null +++ b/test_data/cql/input/clause7_10.json @@ -0,0 +1,19 @@ +{ + "op": "s_crosses", + "args": [ + { "property": "road" }, + { + "type": "Polygon", + "coordinates": [ + [ + [ 43.7286, -79.2986 ], [ 43.7311, -79.2996 ], [ 43.7323, -79.2972 ], + [ 43.7326, -79.2971 ], [ 43.7350, -79.2981 ], [ 43.7350, -79.2982 ], + [ 43.7352, -79.2982 ], [ 43.7357, -79.2956 ], [ 43.7337, -79.2948 ], + [ 43.7343, -79.2933 ], [ 43.7339, -79.2923 ], [ 43.7327, -79.2947 ], + [ 43.7320, -79.2942 ], [ 43.7322, -79.2937 ], [ 43.7306, -79.2930 ], + [ 43.7303, -79.2930 ], [ 43.7299, -79.2928 ], [ 43.7286, -79.2986 ] + ] + ] + } + ] +} diff --git a/test_data/cql/input/clause7_12.json b/test_data/cql/input/clause7_12.json new file mode 100644 index 00000000..bbef8c63 --- /dev/null +++ b/test_data/cql/input/clause7_12.json @@ -0,0 +1,7 @@ +{ + "op": "t_intersects", + "args": [ + { "property": "event_time" }, + { "interval": [ "1969-07-16T05:32:00Z", "1969-07-24T16:50:35Z" ] } + ] +} diff --git a/test_data/cql/input/clause7_13.json b/test_data/cql/input/clause7_13.json new file mode 100644 index 00000000..19d6aaaf --- /dev/null +++ b/test_data/cql/input/clause7_13.json @@ -0,0 +1,7 @@ +{ + "op": "t_during", + "args": [ + { "interval": [ { "property": "touchdown" }, { "property": "liftOff" } ] }, + { "interval": [ "1969-07-16T13:32:00Z", "1969-07-24T16:50:35Z" ] } + ] +} diff --git a/test_data/cql/input/clause7_15.json b/test_data/cql/input/clause7_15.json new file mode 100644 index 00000000..46b6543a --- /dev/null +++ b/test_data/cql/input/clause7_15.json @@ -0,0 +1,7 @@ +{ + "op": "a_contains", + "args": [ + { "property": "layer:ids" }, + [ "layers-ca", "layers-us" ] + ] +} diff --git a/test_data/cql/input/clause7_16.json b/test_data/cql/input/clause7_16.json new file mode 100644 index 00000000..cfe4f0a0 --- /dev/null +++ b/test_data/cql/input/clause7_16.json @@ -0,0 +1,26 @@ +{ + "op": "s_crosses", + "args": [ + { + "type": "LineString", + "coordinates": [ + [ 43.72992, -79.2998 ], [ 43.73005, -79.2991 ], [ 43.73006, -79.2984 ], + [ 43.73140, -79.2956 ], [ 43.73259, -79.2950 ], [ 43.73266, -79.2945 ], + [ 43.73320, -79.2936 ], [ 43.73378, -79.2936 ], [ 43.73486, -79.2917 ] + ] + }, + { + "type": "Polygon", + "coordinates": [ + [ + [ 43.7286, -79.2986 ], [ 43.7311, -79.2996 ], [ 43.7323, -79.2972 ], + [ 43.7326, -79.2971 ], [ 43.7350, -79.2981 ], [ 43.7350, -79.2982 ], + [ 43.7352, -79.2982 ], [ 43.7357, -79.2956 ], [ 43.7337, -79.2948 ], + [ 43.7343, -79.2933 ], [ 43.7339, -79.2923 ], [ 43.7327, -79.2947 ], + [ 43.7320, -79.2942 ], [ 43.7322, -79.2937 ], [ 43.7306, -79.2930 ], + [ 43.7303, -79.2930 ], [ 43.7299, -79.2928 ], [ 43.7286, -79.2986 ] + ] + ] + } + ] +} diff --git a/test_data/cql/input/clause7_17.json b/test_data/cql/input/clause7_17.json new file mode 100644 index 00000000..24f0adc1 --- /dev/null +++ b/test_data/cql/input/clause7_17.json @@ -0,0 +1,7 @@ +{ + "op": "t_during", + "args": [ + { "interval": [ "1969-07-20T20:17:40Z", "1969-07-21T17:54:00Z" ] }, + { "interval": [ "1969-07-16T13:32:00Z", "1969-07-24T16:50:35Z" ] } + ] +} diff --git a/test_data/cql/input/clause7_18.json b/test_data/cql/input/clause7_18.json new file mode 100644 index 00000000..6cecc0a9 --- /dev/null +++ b/test_data/cql/input/clause7_18.json @@ -0,0 +1,14 @@ +{ + "op": "s_within", + "args": [ + { "property": "road" }, + { + "op": "Buffer", + "args": [ + { "property": "geometry" }, + 10, + "m" + ] + } + ] +} diff --git a/test_data/cql/input/clause7_19.json b/test_data/cql/input/clause7_19.json new file mode 100644 index 00000000..d9b785c6 --- /dev/null +++ b/test_data/cql/input/clause7_19.json @@ -0,0 +1,13 @@ +{ + "op": ">", + "args": [ + { "property": "vehicle_height" }, + { + "op": "-", + "args": [ + { "property": "bridge_clearance" }, + 1 + ] + } + ] +} diff --git a/test_data/cql/example01.json b/test_data/cql/input/example01.json similarity index 60% rename from test_data/cql/example01.json rename to test_data/cql/input/example01.json index b81dafdf..443a0a9c 100644 --- a/test_data/cql/example01.json +++ b/test_data/cql/input/example01.json @@ -1,7 +1,7 @@ { "op": "=", "args": [ - { "property": "scene_id" }, + { "property": "landsat:scene_id" }, "LC82030282019133LGN00" ] } diff --git a/test_data/cql/example02.json b/test_data/cql/input/example02.json similarity index 100% rename from test_data/cql/example02.json rename to test_data/cql/input/example02.json diff --git a/test_data/cql/example03.json b/test_data/cql/input/example03.json similarity index 100% rename from test_data/cql/example03.json rename to test_data/cql/input/example03.json diff --git a/test_data/cql/input/example04.json b/test_data/cql/input/example04.json new file mode 100644 index 00000000..4b56541b --- /dev/null +++ b/test_data/cql/input/example04.json @@ -0,0 +1,26 @@ +{ + "op": "and", + "args": [ + { + "op": "<", + "args": [ + { "property": "eo:cloud_cover" }, + 0.1 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_row" }, + 28 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_path" }, + 203 + ] + } + ] +} diff --git a/test_data/cql/example05a.json b/test_data/cql/input/example05a.json similarity index 66% rename from test_data/cql/example05a.json rename to test_data/cql/input/example05a.json index de561ead..8d5bc34f 100644 --- a/test_data/cql/example05a.json +++ b/test_data/cql/input/example05a.json @@ -4,14 +4,14 @@ { "op": "=", "args": [ - { "property": "ro:cloud_cover" }, + { "property": "eo:cloud_cover" }, 0.1 ] }, { "op": "=", "args": [ - { "property": "ro:cloud_cover" }, + { "property": "eo:cloud_cover" }, 0.2 ] } diff --git a/test_data/cql/example05b.json b/test_data/cql/input/example05b.json similarity index 57% rename from test_data/cql/example05b.json rename to test_data/cql/input/example05b.json index aa0fa9a6..5701ac32 100644 --- a/test_data/cql/example05b.json +++ b/test_data/cql/input/example05b.json @@ -1,7 +1,7 @@ { "op": "in", "args": [ - { "property": "ro:cloud_cover" }, + { "property": "eo:cloud_cover" }, [ 0.1, 0.2 ] ] } diff --git a/test_data/cql/input/example06a.json b/test_data/cql/input/example06a.json new file mode 100644 index 00000000..8adeb429 --- /dev/null +++ b/test_data/cql/input/example06a.json @@ -0,0 +1,26 @@ +{ + "op": "and", + "args": [ + { + "op": "between", + "args": [ + { "property": "eo:cloud_cover" }, + 0.1, 0.2 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_row" }, + 28 + ] + }, + { + "op": "=", + "args": [ + { "property": "landsat:wrs_path" }, + 203 + ] + } + ] +} diff --git a/test_data/cql/example06b.json b/test_data/cql/input/example06b.json similarity index 100% rename from test_data/cql/example06b.json rename to test_data/cql/input/example06b.json diff --git a/test_data/cql/example07.json b/test_data/cql/input/example07.json similarity index 100% rename from test_data/cql/example07.json rename to test_data/cql/input/example07.json diff --git a/test_data/cql/example08.json b/test_data/cql/input/example08.json similarity index 86% rename from test_data/cql/example08.json rename to test_data/cql/input/example08.json index 8eb14bd9..f1494126 100644 --- a/test_data/cql/example08.json +++ b/test_data/cql/input/example08.json @@ -31,14 +31,14 @@ { "type": "Polygon", "coordinates": [ - [ [ -77.117938, 38.93686 ], + [ [ -77.117938, 38.936860 ], [ -77.040604, 39.995648 ], [ -76.910536, 38.892912 ], [ -77.039359, 38.791753 ], [ -77.047906, 38.841462 ], [ -77.034183, 38.840655 ], - [ -77.033142, 38.85749 ], - [ -77.117938, 38.93686 ] + [ -77.033142, 38.857490 ], + [ -77.117938, 38.936860 ] ] ] } diff --git a/test_data/cql/example09.json b/test_data/cql/input/example09.json similarity index 100% rename from test_data/cql/example09.json rename to test_data/cql/input/example09.json diff --git a/test_data/cql/example10.json b/test_data/cql/input/example10.json similarity index 100% rename from test_data/cql/example10.json rename to test_data/cql/input/example10.json diff --git a/test_data/cql/example11.json b/test_data/cql/input/example11.json similarity index 100% rename from test_data/cql/example11.json rename to test_data/cql/input/example11.json diff --git a/test_data/cql/example12.json b/test_data/cql/input/example12.json similarity index 100% rename from test_data/cql/example12.json rename to test_data/cql/input/example12.json diff --git a/test_data/cql/input/example13.json b/test_data/cql/input/example13.json new file mode 100644 index 00000000..871284b8 --- /dev/null +++ b/test_data/cql/input/example13.json @@ -0,0 +1,12 @@ +{ + "op": "not", + "args": [ + { + "op": "like", + "args": [ + { "property": "owner" }, + "%Mike%" + ] + } + ] +} diff --git a/test_data/cql/example14.json b/test_data/cql/input/example14.json similarity index 100% rename from test_data/cql/example14.json rename to test_data/cql/input/example14.json diff --git a/test_data/cql/example15.json b/test_data/cql/input/example15.json similarity index 85% rename from test_data/cql/example15.json rename to test_data/cql/input/example15.json index 8f7704c7..588e5e1c 100644 --- a/test_data/cql/example15.json +++ b/test_data/cql/input/example15.json @@ -4,7 +4,7 @@ { "op": ">", "args": [ - { "property": "floor" }, + { "property": "floors" }, 5 ] }, diff --git a/test_data/cql/input/example16.json b/test_data/cql/input/example16.json new file mode 100644 index 00000000..c56fc669 --- /dev/null +++ b/test_data/cql/input/example16.json @@ -0,0 +1,38 @@ +{ + "op": "and", + "args": [ + { + "op": "=", + "args": [ + { "property": "swimming_pool" }, + true + ] + }, + { + "op": "or", + "args": [ + { + "op": ">", + "args": [ + { "property": "floors" }, + 5 + ] + }, + { + "op": "like", + "args": [ + { "property": "material" }, + "brick%" + ] + }, + { + "op": "like", + "args": [ + { "property": "material" }, + "%brick" + ] + } + ] + } + ] +} diff --git a/test_data/cql/example17.json b/test_data/cql/input/example17.json similarity index 100% rename from test_data/cql/example17.json rename to test_data/cql/input/example17.json diff --git a/test_data/cql/input/example18.json b/test_data/cql/input/example18.json new file mode 100644 index 00000000..62be7dee --- /dev/null +++ b/test_data/cql/input/example18.json @@ -0,0 +1,24 @@ +{ + "op": "or", + "args": [ + { + "op": "not", + "args": [ + { + "op": "<", + "args": [ + { "property": "floors" }, + 5 + ] + } + ] + }, + { + "op": "=", + "args": [ + { "property": "swimming_pool" }, + true + ] + } + ] +} diff --git a/test_data/cql/input/example19.json b/test_data/cql/input/example19.json new file mode 100644 index 00000000..ecf9c31d --- /dev/null +++ b/test_data/cql/input/example19.json @@ -0,0 +1,31 @@ +{ + "op": "and", + "args": [ + { + "op": "or", + "args": [ + { + "op": "like", + "args": [ + { "property": "owner" }, + "mike%" + ] + }, + { + "op": "like", + "args": [ + { "property": "owner" }, + "Mike%" + ] + } + ] + }, + { + "op": "<", + "args": [ + { "property": "floors" }, + 4 + ] + } + ] +} diff --git a/test_data/cql/input/example20.json b/test_data/cql/input/example20.json new file mode 100644 index 00000000..9968d5eb --- /dev/null +++ b/test_data/cql/input/example20.json @@ -0,0 +1,7 @@ +{ + "op": "t_before", + "args": [ + { "property": "built" }, + { "date": "2015-01-01" } + ] +} diff --git a/test_data/cql/input/example21.json b/test_data/cql/input/example21.json new file mode 100644 index 00000000..82f86cd2 --- /dev/null +++ b/test_data/cql/input/example21.json @@ -0,0 +1,7 @@ +{ + "op": "t_after", + "args": [ + { "property": "built" }, + { "date": "2012-06-05" } + ] +} diff --git a/test_data/cql/input/example22.json b/test_data/cql/input/example22.json new file mode 100644 index 00000000..34033d15 --- /dev/null +++ b/test_data/cql/input/example22.json @@ -0,0 +1,7 @@ +{ + "op": "t_during", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "2017-06-10T07:30:00Z", "2017-06-11T10:30:00Z" ] } + ] +} diff --git a/test_data/cql/input/example23.json b/test_data/cql/input/example23.json new file mode 100644 index 00000000..b60365d7 --- /dev/null +++ b/test_data/cql/input/example23.json @@ -0,0 +1,7 @@ +{ + "op": "s_within", + "args": [ + { "property": "location" }, + { "bbox": [ -118, 33.8, -117.9, 34 ] } + ] +} diff --git a/test_data/cql/input/example24.json b/test_data/cql/input/example24.json new file mode 100644 index 00000000..41e5b206 --- /dev/null +++ b/test_data/cql/input/example24.json @@ -0,0 +1,10 @@ +{ + "op": "s_intersects", + "args": [ + { "property": "geometry" }, + { + "type": "Polygon", + "coordinates": [ [ [ -10, -10 ], [ 10, -10 ], [ 10, 10 ], [ -10, -10 ] ] ] + } + ] +} diff --git a/test_data/cql/input/example25.json b/test_data/cql/input/example25.json new file mode 100644 index 00000000..4233215a --- /dev/null +++ b/test_data/cql/input/example25.json @@ -0,0 +1,19 @@ +{ + "op": "and", + "args": [ + { + "op": ">", + "args": [ + { "property": "floors" }, + 5 + ] + }, + { + "op": "s_within", + "args": [ + { "property": "geometry" }, + { "bbox": [ -118, 33.8, -117.9, 34 ] } + ] + } + ] +} diff --git a/test_data/cql/input/example26.json b/test_data/cql/input/example26.json new file mode 100644 index 00000000..d46b1165 --- /dev/null +++ b/test_data/cql/input/example26.json @@ -0,0 +1,10 @@ +{ + "op": "in", + "args": [ + { "op": "casei", "args": [ { "property": "road_class" } ] }, + [ + { "op": "casei", "args": [ "Οδος" ] }, + { "op": "casei", "args": [ "Straße" ] } + ] + ] +} diff --git a/test_data/cql/input/example27.json b/test_data/cql/input/example27.json new file mode 100644 index 00000000..0c94cbc1 --- /dev/null +++ b/test_data/cql/input/example27.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "op": "accenti", "args": [ { "property": "etat_vol" } ] }, + { "op": "accenti", "args": [ "débárquér" ] } + ] +} diff --git a/test_data/cql/input/example28.json b/test_data/cql/input/example28.json new file mode 100644 index 00000000..d2e5e488 --- /dev/null +++ b/test_data/cql/input/example28.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "op": "casei", "args": [ { "property": "geophys:SURVEY_NAME" } ] }, + { "op": "casei", "args": [ "%calcutta%" ] } + ] +} diff --git a/test_data/cql/example29.json b/test_data/cql/input/example29.json similarity index 100% rename from test_data/cql/example29.json rename to test_data/cql/input/example29.json diff --git a/test_data/cql/input/example30.json b/test_data/cql/input/example30.json new file mode 100644 index 00000000..6056f60d --- /dev/null +++ b/test_data/cql/input/example30.json @@ -0,0 +1,7 @@ +{ + "op": "<>", + "args": [ + { "property": "id" }, + "fa7e1920-9107-422d-a3db-c468cbc5d6df" + ] +} diff --git a/test_data/cql/example31.json b/test_data/cql/input/example31.json similarity index 100% rename from test_data/cql/example31.json rename to test_data/cql/input/example31.json diff --git a/test_data/cql/example32.json b/test_data/cql/input/example32.json similarity index 100% rename from test_data/cql/example32.json rename to test_data/cql/input/example32.json diff --git a/test_data/cql/example33.json b/test_data/cql/input/example33.json similarity index 100% rename from test_data/cql/example33.json rename to test_data/cql/input/example33.json diff --git a/test_data/cql/example34.json b/test_data/cql/input/example34.json similarity index 100% rename from test_data/cql/example34.json rename to test_data/cql/input/example34.json diff --git a/test_data/cql/example35.json b/test_data/cql/input/example35.json similarity index 100% rename from test_data/cql/example35.json rename to test_data/cql/input/example35.json diff --git a/test_data/cql/input/example36.json b/test_data/cql/input/example36.json new file mode 100644 index 00000000..273ace5b --- /dev/null +++ b/test_data/cql/input/example36.json @@ -0,0 +1,12 @@ +{ + "op": "not", + "args": [ + { + "op": "like", + "args": [ + { "property": "name" }, + "foo%" + ] + } + ] +} diff --git a/test_data/cql/input/example37.json b/test_data/cql/input/example37.json new file mode 100644 index 00000000..2ec6db3c --- /dev/null +++ b/test_data/cql/input/example37.json @@ -0,0 +1,7 @@ +{ + "op": "between", + "args": [ + { "property": "value" }, + 10, 20 + ] +} diff --git a/test_data/cql/input/example38.json b/test_data/cql/input/example38.json new file mode 100644 index 00000000..c729fd3d --- /dev/null +++ b/test_data/cql/input/example38.json @@ -0,0 +1,12 @@ +{ + "op": "not", + "args": [ + { + "op": "between", + "args": [ + { "property": "value" }, + 10, 20 + ] + } + ] +} diff --git a/test_data/cql/example39.json b/test_data/cql/input/example39.json similarity index 100% rename from test_data/cql/example39.json rename to test_data/cql/input/example39.json diff --git a/test_data/cql/input/example40.json b/test_data/cql/input/example40.json new file mode 100644 index 00000000..a4dc7f80 --- /dev/null +++ b/test_data/cql/input/example40.json @@ -0,0 +1,12 @@ +{ + "op": "not", + "args": [ + { + "op": "in", + "args": [ + { "property": "value" }, + [ "a", "b", "c" ] + ] + } + ] +} diff --git a/test_data/cql/input/example41.json b/test_data/cql/input/example41.json new file mode 100644 index 00000000..bb31b499 --- /dev/null +++ b/test_data/cql/input/example41.json @@ -0,0 +1,4 @@ +{ + "op": "isNull", + "args": [ { "property": "value" } ] +} diff --git a/test_data/cql/input/example42.json b/test_data/cql/input/example42.json new file mode 100644 index 00000000..29c3bb03 --- /dev/null +++ b/test_data/cql/input/example42.json @@ -0,0 +1,9 @@ +{ + "op": "not", + "args": [ + { + "op": "isNull", + "args": [ { "property": "value" } ] + } + ] +} diff --git a/test_data/cql/input/example43.json b/test_data/cql/input/example43.json new file mode 100644 index 00000000..04986cb3 --- /dev/null +++ b/test_data/cql/input/example43.json @@ -0,0 +1,24 @@ +{ + "op": "and", + "args": [ + { + "op": "not", + "args": [ + { + "op": "like", + "args": [ + { "property": "name" }, + "foo%" + ] + } + ] + }, + { + "op": ">", + "args": [ + { "property": "value" }, + 10 + ] + } + ] +} diff --git a/test_data/cql/input/example44.json b/test_data/cql/input/example44.json new file mode 100644 index 00000000..65fdbc43 --- /dev/null +++ b/test_data/cql/input/example44.json @@ -0,0 +1,16 @@ +{ + "op": "or", + "args": [ + { + "op": "isNull", + "args": [ { "property": "value" } ] + }, + { + "op": "between", + "args": [ + { "property": "value" }, + 10, 20 + ] + } + ] +} diff --git a/test_data/cql/input/example45.json b/test_data/cql/input/example45.json new file mode 100644 index 00000000..e7760243 --- /dev/null +++ b/test_data/cql/input/example45.json @@ -0,0 +1,7 @@ +{ + "op": "s_intersects", + "args": [ + { "property": "geometry" }, + { "bbox": [ -128.098193, -1.1, -99999.0, 180.0, 90.0, 100000.0 ] } + ] +} diff --git a/test_data/cql/input/example46.json b/test_data/cql/input/example46.json new file mode 100644 index 00000000..0d83f427 --- /dev/null +++ b/test_data/cql/input/example46.json @@ -0,0 +1,16 @@ +{ + "op": "s_equals", + "args": [ + { + "type": "Polygon", + "coordinates": [ [ [ -0.333333, 89.0 ], + [ -102.723546, -0.5 ], + [ -179.0, -89.0 ], + [ -1.9, 89.0 ], + [ -0.0, 89.0 ], + [ 2.00001, -1.9 ], + [ -0.333333, 89.0 ] ] ] + }, + { "property": "geometry" } + ] +} diff --git a/test_data/cql/input/example47.json b/test_data/cql/input/example47.json new file mode 100644 index 00000000..b856c4b8 --- /dev/null +++ b/test_data/cql/input/example47.json @@ -0,0 +1,13 @@ +{ + "op": "s_disjoint", + "args": [ + { "property": "geometry" }, + { + "type": "MultiPolygon", + "coordinates": [ [ [ [ 144.022387, 45.176126 ], + [ -1.1, 0.0 ], + [ 180.0, 47.808086 ], + [ 144.022387, 45.176126 ] ] ] ] + } + ] +} diff --git a/test_data/cql/input/example48.json b/test_data/cql/input/example48.json new file mode 100644 index 00000000..5945b26f --- /dev/null +++ b/test_data/cql/input/example48.json @@ -0,0 +1,16 @@ +{ + "op": "s_touches", + "args": [ + { "property": "geometry" }, + { + "type": "MultiLineString", + "coordinates": [ [ [ -1.9, -0.99999 ], + [ 75.292574, 1.5 ], + [ -0.5, -4.016458 ], + [ -31.708594, -74.743801 ], + [ 179.0, -90.0 ] ], + [ [ -1.9, -1.1 ], + [ 1.5, 8.547371 ] ] ] + } + ] +} diff --git a/test_data/cql/input/example49.json b/test_data/cql/input/example49.json new file mode 100644 index 00000000..6eb04d3e --- /dev/null +++ b/test_data/cql/input/example49.json @@ -0,0 +1,17 @@ +{ + "op": "s_within", + "args": [ + { + "type": "Polygon", + "coordinates": [ [ [ -49.88024, 0.5, -75993.341684 ], + [ -1.5, -0.99999, -100000.0 ], + [ 0.0, 0.5, -0.333333 ], + [ -49.88024, 0.5, -75993.341684 ] ], + [ [ -65.887123, 2.00001, -100000.0 ], + [ 0.333333, -53.017711, -79471.332949 ], + [ 180.0, 0.0, 1852.616704 ], + [ -65.887123, 2.00001, -100000.0 ] ] ] + }, + { "property": "geometry" } + ] +} diff --git a/test_data/cql/input/example50.json b/test_data/cql/input/example50.json new file mode 100644 index 00000000..f444c352 --- /dev/null +++ b/test_data/cql/input/example50.json @@ -0,0 +1,7 @@ +{ + "op": "s_overlaps", + "args": [ + { "property": "geometry" }, + { "bbox": [ -179.912109, 1.9, 180.0, 16.897016 ] } + ] +} diff --git a/test_data/cql/input/example51.json b/test_data/cql/input/example51.json new file mode 100644 index 00000000..af434070 --- /dev/null +++ b/test_data/cql/input/example51.json @@ -0,0 +1,17 @@ +{ + "op": "s_crosses", + "args": [ + { "property": "geometry" }, + { + "type": "LineString", + "coordinates": [ [ 172.03086, 1.5 ], + [ 1.1, -90.0 ], + [ -159.757695, 0.99999 ], + [ -180.0, 0.5 ], + [ -12.111235, 81.336403 ], + [ -0.5, 64.43958 ], + [ 0.0, 81.991815 ], + [ -155.93831, 90.0 ] ] + } + ] +} diff --git a/test_data/cql/input/example52.json b/test_data/cql/input/example52.json new file mode 100644 index 00000000..b2412b3f --- /dev/null +++ b/test_data/cql/input/example52.json @@ -0,0 +1,10 @@ +{ + "op": "s_contains", + "args": [ + { "property": "geometry" }, + { + "type": "Point", + "coordinates": [ -3.508362, -1.754181 ] + } + ] +} diff --git a/test_data/cql/input/example53.json b/test_data/cql/input/example53.json new file mode 100644 index 00000000..bc5dd7f7 --- /dev/null +++ b/test_data/cql/input/example53.json @@ -0,0 +1,7 @@ +{ + "op": "t_after", + "args": [ + { "property": "updated_at" }, + { "date": "2010-02-10" } + ] +} diff --git a/test_data/cql/input/example54.json b/test_data/cql/input/example54.json new file mode 100644 index 00000000..df7842a6 --- /dev/null +++ b/test_data/cql/input/example54.json @@ -0,0 +1,7 @@ +{ + "op": "t_before", + "args": [ + { "property": "updated_at" }, + { "timestamp": "2012-08-10T05:30:00Z" } + ] +} diff --git a/test_data/cql/input/example55.json b/test_data/cql/input/example55.json new file mode 100644 index 00000000..0cf0b477 --- /dev/null +++ b/test_data/cql/input/example55.json @@ -0,0 +1,8 @@ +{ + "op": "t_contains", + "args": [ + { "interval": [ "2000-01-01T00:00:00Z", "2005-01-10T01:01:01.393216Z" ] }, + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] } + + ] +} diff --git a/test_data/cql/input/example56.json b/test_data/cql/input/example56.json new file mode 100644 index 00000000..3568235e --- /dev/null +++ b/test_data/cql/input/example56.json @@ -0,0 +1,7 @@ +{ + "op": "t_disjoint", + "args": [ + { "interval": [ "..", "2005-01-10T01:01:01.393216Z" ] }, + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] } + ] +} diff --git a/test_data/cql/input/example57.json b/test_data/cql/input/example57.json new file mode 100644 index 00000000..63d460dc --- /dev/null +++ b/test_data/cql/input/example57.json @@ -0,0 +1,8 @@ +{ + "op": "t_during", + "args": [ + {"interval": [{ "property": "starts_at" }, { "property": "ends_at" }]}, + {"interval": ["2005-01-10", "2010-02-10"] + } + ] +} diff --git a/test_data/cql/input/example58.json b/test_data/cql/input/example58.json new file mode 100644 index 00000000..ede6c7ab --- /dev/null +++ b/test_data/cql/input/example58.json @@ -0,0 +1,7 @@ +{ + "op": "t_equals", + "args": [ + { "property": "updated_at" }, + { "date": "1851-04-29" } + ] +} diff --git a/test_data/cql/input/example59.json b/test_data/cql/input/example59.json new file mode 100644 index 00000000..4d479c09 --- /dev/null +++ b/test_data/cql/input/example59.json @@ -0,0 +1,7 @@ +{ + "op": "t_finishedBy", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] } + ] +} diff --git a/test_data/cql/input/example60.json b/test_data/cql/input/example60.json new file mode 100644 index 00000000..423d24f5 --- /dev/null +++ b/test_data/cql/input/example60.json @@ -0,0 +1,7 @@ +{ + "op": "t_finishes", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "1991-10-07", "2010-02-10T05:29:20.073225Z" ] } + ] +} diff --git a/test_data/cql/input/example61.json b/test_data/cql/input/example61.json new file mode 100644 index 00000000..ca95d573 --- /dev/null +++ b/test_data/cql/input/example61.json @@ -0,0 +1,7 @@ +{ + "op": "t_intersects", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] } + ] +} diff --git a/test_data/cql/input/example62.json b/test_data/cql/input/example62.json new file mode 100644 index 00000000..23360fb5 --- /dev/null +++ b/test_data/cql/input/example62.json @@ -0,0 +1,7 @@ +{ + "op": "t_meets", + "args": [ + { "interval": [ "2005-01-10", "2010-02-10" ] }, + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] } + ] +} diff --git a/test_data/cql/input/example63.json b/test_data/cql/input/example63.json new file mode 100644 index 00000000..a4f7788e --- /dev/null +++ b/test_data/cql/input/example63.json @@ -0,0 +1,7 @@ +{ + "op": "t_metBy", + "args": [ + { "interval": [ "2010-02-10T05:29:20.073225Z", "2010-10-07" ] }, + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] } + ] +} diff --git a/test_data/cql/input/example64.json b/test_data/cql/input/example64.json new file mode 100644 index 00000000..51055eb8 --- /dev/null +++ b/test_data/cql/input/example64.json @@ -0,0 +1,7 @@ +{ + "op": "t_overlappedBy", + "args": [ + { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] }, + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] } + ] +} diff --git a/test_data/cql/input/example65.json b/test_data/cql/input/example65.json new file mode 100644 index 00000000..ff749212 --- /dev/null +++ b/test_data/cql/input/example65.json @@ -0,0 +1,7 @@ +{ + "op": "t_overlaps", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "1991-10-07T08:21:06.393262Z", "1992-10-09T08:08:08.393473Z" ] } + ] +} diff --git a/test_data/cql/input/example66.json b/test_data/cql/input/example66.json new file mode 100644 index 00000000..118ba307 --- /dev/null +++ b/test_data/cql/input/example66.json @@ -0,0 +1,7 @@ +{ + "op": "t_startedBy", + "args": [ + { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] }, + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] } + ] +} diff --git a/test_data/cql/input/example67.json b/test_data/cql/input/example67.json new file mode 100644 index 00000000..551cf2cb --- /dev/null +++ b/test_data/cql/input/example67.json @@ -0,0 +1,7 @@ +{ + "op": "t_starts", + "args": [ + { "interval": [ { "property": "starts_at" }, { "property": "ends_at" } ] }, + { "interval": [ "1991-10-07T08:21:06.393262Z", ".." ] } + ] +} diff --git a/test_data/cql/input/example68.json b/test_data/cql/input/example68.json new file mode 100644 index 00000000..57e6d4ad --- /dev/null +++ b/test_data/cql/input/example68.json @@ -0,0 +1,10 @@ +{ + "op": "=", + "args": [ + { + "op": "Foo", + "args": [ { "property": "geometry" } ] + }, + true + ] +} diff --git a/test_data/cql/input/example69.json b/test_data/cql/input/example69.json new file mode 100644 index 00000000..57705832 --- /dev/null +++ b/test_data/cql/input/example69.json @@ -0,0 +1,10 @@ +{ + "op": "<>", + "args": [ + false, + { + "op": "Bar", + "args": [ { "property": "geometry" }, 100, "a", "b", false ] + } + ] +} diff --git a/test_data/cql/input/example70.json b/test_data/cql/input/example70.json new file mode 100644 index 00000000..032569ff --- /dev/null +++ b/test_data/cql/input/example70.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "op": "accenti", "args": [ { "property": "owner" } ] }, + { "op": "accenti", "args": [ "Beyoncé" ] } + ] +} diff --git a/test_data/cql/input/example71.json b/test_data/cql/input/example71.json new file mode 100644 index 00000000..184cde74 --- /dev/null +++ b/test_data/cql/input/example71.json @@ -0,0 +1,7 @@ +{ + "op": "=", + "args": [ + { "op": "casei", "args": [ { "property": "owner" } ] }, + { "op": "casei", "args": [ "somebody else" ] } + ] +} diff --git a/test_data/cql/input/example72.json b/test_data/cql/input/example72.json new file mode 100644 index 00000000..e97cc2db --- /dev/null +++ b/test_data/cql/input/example72.json @@ -0,0 +1,13 @@ +{ + "op": ">", + "args": [ + { "property": "value" }, + { + "op": "+", + "args": [ + { "property": "foo" }, + 10 + ] + } + ] +} diff --git a/test_data/cql/input/example73.json b/test_data/cql/input/example73.json new file mode 100644 index 00000000..e436fc5d --- /dev/null +++ b/test_data/cql/input/example73.json @@ -0,0 +1,13 @@ +{ + "op": "<", + "args": [ + { "property": "value" }, + { + "op": "-", + "args": [ + { "property": "foo" }, + 10 + ] + } + ] +} diff --git a/test_data/cql/input/example74.json b/test_data/cql/input/example74.json new file mode 100644 index 00000000..5321438b --- /dev/null +++ b/test_data/cql/input/example74.json @@ -0,0 +1,13 @@ +{ + "op": "<>", + "args": [ + { "property": "value" }, + { + "op": "*", + "args": [ + 22.1, + { "property": "foo" } + ] + } + ] +} diff --git a/test_data/cql/input/example75.json b/test_data/cql/input/example75.json new file mode 100644 index 00000000..4bf7b7a4 --- /dev/null +++ b/test_data/cql/input/example75.json @@ -0,0 +1,13 @@ +{ + "op": "=", + "args": [ + { "property": "value" }, + { + "op": "/", + "args": [ + 2, + { "property": "foo" } + ] + } + ] +} diff --git a/test_data/cql/input/example76.json b/test_data/cql/input/example76.json new file mode 100644 index 00000000..32020f0a --- /dev/null +++ b/test_data/cql/input/example76.json @@ -0,0 +1,10 @@ +{ + "op": "<=", + "args": [ + { "property": "value" }, + { + "op": "^", + "args": [ 2, { "property": "foo" } ] + } + ] +} diff --git a/test_data/cql/input/example77.json b/test_data/cql/input/example77.json new file mode 100644 index 00000000..4486f909 --- /dev/null +++ b/test_data/cql/input/example77.json @@ -0,0 +1,10 @@ +{ + "op": "=", + "args": [ + 0, + { + "op": "%", + "args": [ { "property": "foo" }, 2 ] + } + ] +} diff --git a/test_data/cql/input/example78.json b/test_data/cql/input/example78.json new file mode 100644 index 00000000..f5a69441 --- /dev/null +++ b/test_data/cql/input/example78.json @@ -0,0 +1,10 @@ +{ + "op": "=", + "args": [ + 1, + { + "op": "div", + "args": [ { "property": "foo" }, 2 ] + } + ] +} diff --git a/test_data/cql/input/example79.json b/test_data/cql/input/example79.json new file mode 100644 index 00000000..e3311634 --- /dev/null +++ b/test_data/cql/input/example79.json @@ -0,0 +1,7 @@ +{ + "op": "a_containedBy", + "args": [ + { "property": "values" }, + [ "a", "b", "c" ] + ] +} diff --git a/test_data/cql/input/example80.json b/test_data/cql/input/example80.json new file mode 100644 index 00000000..bef09367 --- /dev/null +++ b/test_data/cql/input/example80.json @@ -0,0 +1,7 @@ +{ + "op": "a_contains", + "args": [ + { "property": "values" }, + [ "a", "b", "c" ] + ] +} diff --git a/test_data/cql/input/example81.json b/test_data/cql/input/example81.json new file mode 100644 index 00000000..4d35c75f --- /dev/null +++ b/test_data/cql/input/example81.json @@ -0,0 +1,7 @@ +{ + "op": "a_equals", + "args": [ + [ "a", true, 1.0, 8 ], + { "property": "values" } + ] +} diff --git a/test_data/cql/input/example82.json b/test_data/cql/input/example82.json new file mode 100644 index 00000000..1537ec2e --- /dev/null +++ b/test_data/cql/input/example82.json @@ -0,0 +1,7 @@ +{ + "op": "a_overlaps", + "args": [ + { "property": "values" }, + [ { "timestamp": "2012-08-10T05:30:00Z" }, { "date": "2010-02-10" }, false ] + ] +} diff --git a/test_data/cql/input/example83.json b/test_data/cql/input/example83.json new file mode 100644 index 00000000..16c2dc91 --- /dev/null +++ b/test_data/cql/input/example83.json @@ -0,0 +1,14 @@ +{ + "op": "s_equals", + "args": [ + { + "type": "MultiPoint", + "coordinates": [ [ 180.0, -0.5 ], + [ 179.0, -47.121701 ], + [ 180.0, -0.0 ], + [ 33.470475, -0.99999 ], + [ 179.0, -15.333062 ] ] + }, + { "property": "geometry" } + ] +} diff --git a/test_data/cql/input/example84.json b/test_data/cql/input/example84.json new file mode 100644 index 00000000..ab6b64ae --- /dev/null +++ b/test_data/cql/input/example84.json @@ -0,0 +1,38 @@ +{ + "op": "s_equals", + "args": [ + { + "type": "GeometryCollection", + "geometries": [ + { + "type": "Point", + "coordinates": [ 1.9, 2.00001 ] + }, + { + "type": "Point", + "coordinates": [ 0.0, -2.00001 ] + }, + { + "type": "MultiLineString", + "coordinates": [ [ [ -2.00001, -0.0 ], + [ -77.292642, -0.5 ], + [ -87.515626, -0.0 ], + [ -180.0, 12.502773 ], + [ 21.204842, -1.5 ], + [ -21.878857, -90.0 ] ] ] + }, + { + "type": "Point", + "coordinates": [ 1.9, 0.5 ] + }, + { + "type": "LineString", + "coordinates": [ [ 179.0, 1.179148 ], + [ -148.192487, -65.007816 ], + [ 0.5, 0.333333 ] ] + } + ] + }, + { "property": "geometry" } + ] +} diff --git a/test_data/cql/input/example85.json b/test_data/cql/input/example85.json new file mode 100644 index 00000000..cc442d1c --- /dev/null +++ b/test_data/cql/input/example85.json @@ -0,0 +1,34 @@ +{ + "op": "=", + "args": [ + { "property": "value" }, + { + "op": "-", + "args": [ + { + "op": "+", + "args": [ + { + "op": "*", + "args": [ + { + "op": "*", + "args": [ -1, { "property": "foo" } ] + }, + 2.0 + ] + }, + { + "op": "/", + "args": [ { "property": "bar" }, 6.1234 ] + } + ] + }, + { + "op": "^", + "args": [ { "property": "x" }, 2.0 ] + } + ] + } + ] +} diff --git a/test_data/cql/input/example86.json b/test_data/cql/input/example86.json new file mode 100644 index 00000000..8d773393 --- /dev/null +++ b/test_data/cql/input/example86.json @@ -0,0 +1,7 @@ +{ + "op": "like", + "args": [ + { "property": "name" }, + { "op": "casei", "args": [ "FOO%" ] } + ] +} diff --git a/test_data/ogc_features.ttl b/test_data/ogc_features.ttl new file mode 100644 index 00000000..eec80c5e --- /dev/null +++ b/test_data/ogc_features.ttl @@ -0,0 +1,77 @@ +@prefix dcat: . +@prefix dcterms: . +@prefix geo: . +@prefix rdfs: . +@prefix xsd: . +@prefix ex: . +@prefix skos: . +@prefix sdo: . +@prefix sosa: . +@prefix void: . + +ex:DemoCatalog a dcat:Catalog ; + dcterms:title "Demo Catalog" ; + dcterms:description "A demonstration catalog containing a dataset with geographic features." ; + dcterms:hasPart ex:GeoDataset ; + ex:catalogVersion "1.0" ; + dcterms:issued "2024-09-02"^^xsd:date . + +ex:GeoDataset a dcat:Dataset ; + dcterms:title "Geographic Dataset" ; + dcterms:description "A dataset containing a feature collection of geographic features." ; + ex:datasetTheme "Geography" ; + dcterms:creator "Jane Doe" . + +ex:FeatureCollection a geo:FeatureCollection ; + dcterms:description "A collection of geographic features representing points of interest." ; + geo:hasGeometry [ + geo:asWKT "POLYGON((0 0, 0 10, 10 10, 10 0, 0 0))"^^geo:wktLiteral + ] ; + rdfs:member ex:Feature1, ex:Feature2 ; + ex:featureCount 2 ; + void:inDataset ex:GeoDataset . + +ex:Feature1 a geo:Feature ; + rdfs:label "Point of Interest 1" ; + skos:prefLabel "POI 1" ; + dcterms:description "A notable location within the feature collection." ; + geo:hasGeometry [ + geo:asWKT "POINT(5 5)"^^geo:wktLiteral + ] ; + ex:category "Landmark" ; + ex:visitorCount 1000 ; + sdo:additionalProperty [ + a sdo:PropertyValue ; + sdo:propertyID "height" ; + sdo:value "100"^^xsd:integer + ] ; + sosa:isFeatureOfInterestOf ex:Observation1 . + +ex:Feature2 a geo:Feature ; + rdfs:label "Point of Interest 2" ; + dcterms:description "Another notable location within the feature collection." ; + geo:hasGeometry [ + geo:asWKT "POINT(7 3)"^^geo:wktLiteral + ] ; + sdo:spatial [ + geo:hasGeometry [ + geo:asWKT "POLYGON((6 2, 6 4, 8 4, 8 2, 6 2))"^^geo:wktLiteral + ] + ] ; + ex:category "Historical Site" ; + ex:yearEstablished 1850 ; + sdo:additionalProperty [ + a sdo:PropertyValue ; + sdo:propertyID "age" ; + sdo:value "174"^^xsd:integer + ] . + +ex:Observation1 a sosa:Observation ; + sosa:hasFeatureOfInterest ex:Feature1 ; + sosa:observedProperty ex:Temperature ; + sosa:hasResult [ + a sdo:PropertyValue ; + sdo:value "25.5"^^xsd:decimal ; + sdo:unitCode "CEL" + ] ; + sosa:resultTime "2024-09-02T12:00:00Z"^^xsd:dateTime . \ No newline at end of file diff --git a/test_data/spaceprez.ttl b/test_data/spaceprez.ttl index 380bb9f0..169f177d 100644 --- a/test_data/spaceprez.ttl +++ b/test_data/spaceprez.ttl @@ -1,28 +1,41 @@ PREFIX dcat: PREFIX dcterms: -PREFIX ex: +PREFIX sp: PREFIX geo: PREFIX rdfs: +PREFIX void: -ex:SpacePrezCatalog a dcat:Catalog ; +sp:SpacePrezCatalog a dcat:Catalog ; dcterms:title "SpacePrez Catalog" ; dcterms:description "A catalog of SpacePrez data" ; - dcterms:hasPart ex:FeatureCollection ; + dcterms:hasPart sp:SpacePrezDataset ; . -ex:FeatureCollection a geo:FeatureCollection ; +sp:SpacePrezDataset a dcat:Dataset ; + dcterms:title "SpacePrez Dataset" ; + dcterms:description "A dataset of SpacePrez data" ; +. + +sp:FeatureCollection a geo:FeatureCollection ; + void:inDataset sp:SpacePrezDataset ; rdfs:label "Geo Feature Collection" ; - rdfs:member ex:Feature1 , ex:Feature2 ; - ex:property "lower level feature collection property" + rdfs:member sp:Feature1 , sp:Feature2 ; + sp:property "lower level feature collection property" . -ex:Feature1 a geo:Feature ; +sp:Feature1 a geo:Feature ; rdfs:label "Feature 1" ; - ex:property "feature property" ; + geo:hasGeometry [ + geo:asWKT "POLYGON((1 1, 0 10, 10 10, 10 0, 1 1))"^^geo:wktLiteral + ] ; + sp:property "feature property" ; . -ex:Feature2 a geo:Feature ; +sp:Feature2 a geo:Feature ; rdfs:label "Feature 2" ; - ex:property "feature property" ; + geo:hasGeometry [ + geo:asWKT "POLYGON((2 2, 0 10, 10 10, 10 0, 2 2))"^^geo:wktLiteral + ] ; + sp:property "feature property" ; . \ No newline at end of file diff --git a/test_data/vocprez.ttl b/test_data/vocprez.ttl index b8de2b10..d0a4cdef 100644 --- a/test_data/vocprez.ttl +++ b/test_data/vocprez.ttl @@ -6,7 +6,7 @@ PREFIX skos: ex:VocPrezCatalog a dcat:Catalog ; rdfs:label "A Demo Catalog" ; - dcterms:hasPart ex:SchemingConceptScheme ; + dcterms:hasPart ex:SchemingConceptScheme , ; ex:property "cataract" ; . diff --git a/tests/_test_cql_fuseki.py b/tests/_test_cql_fuseki.py index bd81e987..f09d1cab 100755 --- a/tests/_test_cql_fuseki.py +++ b/tests/_test_cql_fuseki.py @@ -77,12 +77,9 @@ def test_spatial_contains_filter(client_fuseki): cql = json.load(f) cql_str = json.dumps(cql) cql_encoded = quote_plus(cql_str) - response = client_fuseki.get( - f"/cql?filter={cql_encoded}&_mediatype=application/sparql-query" - ) + response = client_fuseki.get(f"/cql?filter={cql_encoded}") response_graph = Graph().parse(data=response.text) print(response_graph.serialize(format="turtle")) - print("x") def test_spatial_contains_like(client_fuseki): @@ -102,8 +99,6 @@ def test_spatial_contains_inverse(client_fuseki): cql = json.load(f) cql_str = json.dumps(cql) cql_encoded = quote_plus(cql_str) - response = client_fuseki.get( - f"/cql?filter={cql_encoded}&_mediatype=application/sparql-query" - ) + response = client_fuseki.get(f"/cql?filter={cql_encoded}") response_graph = Graph().parse(data=response.text) print(response_graph.serialize(format="turtle")) diff --git a/tests/conftest.py b/tests/conftest.py index 402e5d6f..17ffa5ec 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,8 @@ import os -from rdflib import Graph, URIRef +from rdflib import Graph, URIRef, RDF +from rdflib.namespace import GEO +from starlette.routing import Mount # comment / uncomment for the CQL tests - cannot figure out how to get a different conftest picked up. os.environ["SPARQL_REPO_TYPE"] = "pyoxigraph" @@ -47,6 +49,10 @@ def override_get_repo(): app.dependency_overrides[get_data_repo] = override_get_repo + for route in app.routes: + if isinstance(route, Mount): + route.app.dependency_overrides[get_data_repo] = override_get_repo + with TestClient(app) as c: yield c @@ -67,29 +73,28 @@ def client_no_override() -> TestClient: def a_spaceprez_catalog_link(client): r = client.get("/catalogs") g = Graph().parse(data=r.text) - member_uri = URIRef("https://example.com/SpacePrezCatalog") - link = g.value(member_uri, URIRef(f"https://prez.dev/link", None)) + cat_uri = URIRef("https://example.com/spaceprez/SpacePrezCatalog") + link = g.value(cat_uri, URIRef(f"https://prez.dev/link", None)) return link @pytest.fixture() -def an_fc_link(client, a_spaceprez_catalog_link): +def a_spaceprez_dataset_link(client, a_spaceprez_catalog_link): r = client.get(f"{a_spaceprez_catalog_link}/collections") g = Graph().parse(data=r.text) - links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) - for link in links: - if link != a_spaceprez_catalog_link: - return link + ds_uri = URIRef("https://example.com/spaceprez/SpacePrezDataset") + link = g.value(ds_uri, URIRef(f"https://prez.dev/link", None)) + return link + + +@pytest.fixture() +def an_fc_link(client, a_spaceprez_dataset_link): + return f"{a_spaceprez_dataset_link}/features/collections/spcprz:FeatureCollection" @pytest.fixture() def a_feature_link(client, an_fc_link): - r = client.get(f"{an_fc_link}/items") - g = Graph().parse(data=r.text) - links = g.objects(subject=None, predicate=URIRef(f"https://prez.dev/link")) - for link in links: - if link != an_fc_link: - return link + return f"{an_fc_link}/items/spcprz:Feature1" @pytest.fixture() diff --git a/tests/data/prefixes/data_using_prefixes.ttl b/tests/data/prefixes/data_using_prefixes.ttl index 0d43b5d0..270d1040 100644 --- a/tests/data/prefixes/data_using_prefixes.ttl +++ b/tests/data/prefixes/data_using_prefixes.ttl @@ -6,6 +6,6 @@ PREFIX rdfs: a dcat:Catalog ; rdfs:label "A Catalog with prefixed david" ; - dcterms:hasPart ex:DCATResource ; + dcterms:hasPart ex:DCATDataset ; ex:property "some property" ; . \ No newline at end of file diff --git a/tests/test_cql_time.py b/tests/test_cql_time.py new file mode 100755 index 00000000..4d40790f --- /dev/null +++ b/tests/test_cql_time.py @@ -0,0 +1,63 @@ +import json +from pathlib import Path + +import pytest + +from prez.services.query_generation.cql import CQLParser + +cql_time_filenames = [ + "example20.json", # t_before instant + "example21.json", # t_after instant + "example22.json", # t_during + "example53.json", # t_after instant + "example54.json", # t_before instant + "example55.json", # t_contains interval + "example56.json", # t_disjoint interval + "example57.json", # t_during + "clause7_13.json", # t_during + # "clause7_17.json", # t_during + "additional_temporal_disjoint_instant.json", + "example58.json", # t_equals instant + "example59.json", # t_finishedBy interval + "example60.json", # t_finishes interval + "additional_temporal_during_intervals.json", # t_before interval + "example61.json", # t_intersects interval + "example62.json", # t_meets interval + "example63.json", # t_metBy interval + "example64.json", # t_overlappedBy interval + "example65.json", # t_overlaps interval + "example66.json", # t_startedBy interval + "example67.json", # t_starts interval + "clause7_12.json", # t_intersects +] + +cql_time_generated_queries = [ + Path(name).with_suffix(".rq") for name in cql_time_filenames +] + + +@pytest.mark.parametrize( + "cql_json_filename, output_query_filename", + [i for i in (zip(cql_time_filenames, cql_time_generated_queries))], +) +def test_time_funcs(cql_json_filename, output_query_filename): + cql_json_path = ( + Path(__file__).parent.parent / f"test_data/cql/input/{cql_json_filename}" + ) + cql_json = json.loads(cql_json_path.read_text()) + reference_query = ( + Path(__file__).parent.parent + / f"test_data/cql/expected_generated_queries/{output_query_filename}" + ).read_text() + context = json.load( + ( + Path(__file__).parent.parent + / "prez/reference_data/cql/default_context.json" + ).open() + ) + cql_parser = CQLParser(cql=cql_json, context=context) + cql_parser.generate_jsonld() + cql_parser.parse() + if not cql_parser.query_str == reference_query: + print(f"\n{cql_parser.query_str}") + assert cql_parser.query_str == reference_query diff --git a/tests/test_endpoints_catprez.py b/tests/test_endpoints_catprez.py index 6ec76a23..79c06d91 100755 --- a/tests/test_endpoints_catprez.py +++ b/tests/test_endpoints_catprez.py @@ -34,8 +34,8 @@ def test_lower_level_listing_anot(client, a_catprez_catalog_link): r = client.get(f"{a_catprez_catalog_link}/collections?_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) expected_response = ( - URIRef("https://example.com/DCATResource"), + URIRef("https://example.com/DCATDataset"), RDF.type, - DCAT.Resource, + DCAT.Dataset, ) assert next(response_graph.triples(expected_response)) diff --git a/tests/test_endpoints_object.py b/tests/test_endpoints_object.py index 29b16be6..3028b674 100755 --- a/tests/test_endpoints_object.py +++ b/tests/test_endpoints_object.py @@ -3,20 +3,22 @@ def test_feature_collection(client): - r = client.get(f"/object?uri=https://example.com/FeatureCollection") + r = client.get( + f"/object?uri=https://example.com/spaceprez/FeatureCollection&_mediatype=text/turtle" + ) response_graph = Graph().parse(data=r.text) assert ( - URIRef("https://example.com/FeatureCollection"), + URIRef("https://example.com/spaceprez/FeatureCollection"), RDF.type, GEO.FeatureCollection, ) in response_graph def test_feature(client): - r = client.get(f"/object?uri=https://example.com/Feature1") + r = client.get(f"/object?uri=https://example.com/spaceprez/Feature1&_mediatype=text/turtle") response_graph = Graph().parse(data=r.text) assert ( - URIRef("https://example.com/Feature1"), + URIRef("https://example.com/spaceprez/Feature1"), RDF.type, GEO.Feature, ) in response_graph diff --git a/tests/test_endpoints_profiles.py b/tests/test_endpoints_profiles.py index c237c222..c1ad0230 100755 --- a/tests/test_endpoints_profiles.py +++ b/tests/test_endpoints_profiles.py @@ -3,7 +3,7 @@ def test_profile(client_no_override): - r = client_no_override.get("/profiles?per_page=50") + r = client_no_override.get("/profiles?limit=50") g = Graph().parse(data=r.text) assert (URIRef("https://prez.dev/profile/prez"), RDF.type, PROF.Profile) in g diff --git a/tests/test_endpoints_spaceprez.py b/tests/test_endpoints_spaceprez.py index e58e9429..4d9f590d 100755 --- a/tests/test_endpoints_spaceprez.py +++ b/tests/test_endpoints_spaceprez.py @@ -7,7 +7,7 @@ def test_dataset_anot(client, a_spaceprez_catalog_link): g_text = r.text response_graph = Graph().parse(data=g_text) expected_response_1 = ( - URIRef("https://example.com/SpacePrezCatalog"), + URIRef("https://example.com/spaceprez/SpacePrezCatalog"), RDF.type, DCAT.Catalog, ) @@ -19,7 +19,7 @@ def test_feature_collection(client, an_fc_link): g_text = r.text response_graph = Graph().parse(data=g_text) assert ( - URIRef("https://example.com/FeatureCollection"), + URIRef("https://example.com/spaceprez/FeatureCollection"), RDF.type, GEO.FeatureCollection, ) in response_graph @@ -30,7 +30,7 @@ def test_feature(client, a_feature_link): g_text = r.text response_graph = Graph().parse(data=g_text) expected_response_1 = ( - URIRef("https://example.com/Feature1"), + URIRef("https://example.com/spaceprez/Feature1"), RDF.type, GEO.Feature, ) @@ -42,12 +42,12 @@ def test_feature_listing_anot(client, an_fc_link): g_text = r.text response_graph = Graph().parse(data=g_text) expected_response_1 = ( - URIRef("https://example.com/Feature1"), + URIRef("https://example.com/spaceprez/Feature1"), RDF.type, GEO.Feature, ) expected_response_2 = ( - URIRef("https://example.com/Feature2"), + URIRef("https://example.com/spaceprez/Feature2"), RDF.type, GEO.Feature, ) diff --git a/tests/test_geojson_to_wkt.py b/tests/test_geojson_to_wkt.py new file mode 100644 index 00000000..0071e8db --- /dev/null +++ b/tests/test_geojson_to_wkt.py @@ -0,0 +1,139 @@ +import pytest + +from prez.services.query_generation.cql import get_wkt_from_coords + + +@pytest.mark.parametrize( + "geom_type, coordinates, expected_wkt, expected_wkt_alternative", + [ + ("Point", [0.123, 0.456], "POINT (0.123 0.456)", None), + ("Point", [10.123456, -85.123456], "POINT (10.123456 -85.123456)", None), + ( + "MultiPoint", + [[0.1, 0.1], [1.1, 1.1]], + "MULTIPOINT ((0.1 0.1), (1.1 1.1))", + "MULTIPOINT (0.1 0.1, 1.1 1.1)", + ), + ( + "MultiPoint", + [[10.5, 40.5], [40.25, 30.75], [20.123, 20.456], [30.0001, 10.0001]], + "MULTIPOINT ((10.5 40.5), (40.25 30.75), (20.123 20.456), (30.0001 10.0001))", + "MULTIPOINT (10.5 40.5, 40.25 30.75, 20.123 20.456, 30.0001 10.0001)", + ), + ( + "LineString", + [[0.0, 0.0], [1.5, 1.5], [2.25, 2.25]], + "LINESTRING (0.0 0.0, 1.5 1.5, 2.25 2.25)", + "LINESTRING (0.00 0.00, 1.50 1.50, 2.25 2.25)", + ), + ( + "LineString", + [[100.123, 0.123], [101.456, 1.456], [102.789, 2.789]], + "LINESTRING (100.123 0.123, 101.456 1.456, 102.789 2.789)", + None, + ), + ( + "MultiLineString", + [[[0.1, 0.1], [1.1, 1.1]], [[2.2, 2.2], [3.3, 3.3]]], + "MULTILINESTRING ((0.1 0.1, 1.1 1.1), (2.2 2.2, 3.3 3.3))", + None, + ), + ( + "MultiLineString", + [ + [[100.001, 0.001], [101.001, 1.001]], + [[102.002, 2.002], [103.002, 3.002]], + ], + "MULTILINESTRING ((100.001 0.001, 101.001 1.001), (102.002 2.002, 103.002 3.002))", + None, + ), + ( + "Polygon", + [ + [ + [100.01, 0.01], + [101.02, 0.01], + [101.02, 1.02], + [100.01, 1.02], + [100.01, 0.01], + ] + ], + "POLYGON ((100.01 0.01, 101.02 0.01, 101.02 1.02, 100.01 1.02, 100.01 0.01))", + None, + ), + ( + "Polygon", + [ + [ + [35.001, 10.001], + [45.002, 45.002], + [15.003, 40.003], + [10.004, 20.004], + [35.001, 10.001], + ], + [ + [20.005, 30.005], + [35.006, 35.006], + [30.007, 20.007], + [20.005, 30.005], + ], + ], + "POLYGON ((35.001 10.001, 45.002 45.002, 15.003 40.003, 10.004 20.004, 35.001 10.001), (20.005 30.005, 35.006 35.006, 30.007 20.007, 20.005 30.005))", + None, + ), + ( + "MultiPolygon", + [ + [[[0.1, 0.1], [1.1, 1.1], [1.1, 0.1], [0.1, 0.1]]], + [[[2.2, 2.2], [3.3, 3.3], [3.3, 2.2], [2.2, 2.2]]], + ], + "MULTIPOLYGON (((0.1 0.1, 1.1 1.1, 1.1 0.1, 0.1 0.1)), ((2.2 2.2, 3.3 3.3, 3.3 2.2, 2.2 2.2)))", + None, + ), + ( + "MultiPolygon", + [ + [ + [ + [102.001, 2.001], + [103.001, 2.001], + [103.001, 3.001], + [102.001, 3.001], + [102.001, 2.001], + ] + ], + [ + [ + [100.002, 0.002], + [101.002, 0.002], + [101.002, 1.002], + [100.002, 1.002], + [100.002, 0.002], + ], + [ + [100.503, 0.503], + [100.753, 0.503], + [100.753, 0.753], + [100.503, 0.753], + [100.503, 0.503], + ], + ], + ], + "MULTIPOLYGON (((102.001 2.001, 103.001 2.001, 103.001 3.001, 102.001 3.001, 102.001 2.001)), ((100.002 0.002, 101.002 0.002, 101.002 1.002, 100.002 1.002, 100.002 0.002), (100.503 0.503, 100.753 0.503, 100.753 0.753, 100.503 0.753, 100.503 0.503)))", + None, + ), + ], +) +def test_get_wkt_from_coords_valid( + geom_type, coordinates, expected_wkt, expected_wkt_alternative +): + assert ( + get_wkt_from_coords(coordinates, geom_type) == expected_wkt + or expected_wkt_alternative + ) + + +# Shapely appears to have a bug with input Polygon formats. The above tests fails ONLY for Polygon for Shapely. +# Geomet works with: +# [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]] +# which appears to be as per spec. diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index f11e9491..8b2c70c0 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -20,10 +20,9 @@ def test_nodeshape_parsing(nodeshape_uri): focus_node=Var(value="focus_node"), ) assert ns.targetClasses == [ - URIRef("http://www.opengis.net/ont/geosparql#FeatureCollection"), URIRef("http://www.w3.org/2004/02/skos/core#ConceptScheme"), URIRef("http://www.w3.org/2004/02/skos/core#Collection"), - URIRef("http://www.w3.org/ns/dcat#Resource"), + URIRef("http://www.w3.org/ns/dcat#Dataset"), ] assert len(ns.propertyShapesURIs) == 1 diff --git a/tests/test_ogc.py b/tests/test_ogc.py index 9a983990..280ee4d9 100644 --- a/tests/test_ogc.py +++ b/tests/test_ogc.py @@ -16,7 +16,7 @@ def test_store() -> Store: # Create a new pyoxigraph Store store = Store() - file = Path("../test_data/spaceprez.ttl") + file = Path(__file__).parent.parent / "test_data/ogc_features.ttl" store.load(file.read_bytes(), "text/turtle") return store @@ -39,14 +39,39 @@ def override_get_data_repo(): app.dependency_overrides[get_data_repo] = override_get_data_repo with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: + c.base_url = "http://localhost:8000/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features" yield c # Remove the override to ensure subsequent tests are unaffected app.dependency_overrides.clear() -@pytest.mark.xfail() -def test_features_core(client: TestClient): - scope = "features/core" +@pytest.mark.parametrize( + "test_file", + [ + pytest.param( + "apidefinition", + marks=pytest.mark.xfail( + reason="see https://github.com/RDFLib/prez/pull/265#issuecomment-2367130294" + ), + ), + "collection", + "collections", + "conformance", + pytest.param( + "crs", + marks=pytest.mark.xfail( + reason="see https://github.com/RDFLib/prez/issues/267" + ), + ), + "errorconditions", + "feature", + "features", + "general", + "landingpage", + ], +) +def test_features_core(client: TestClient, test_file: str): + scope = f"features/core/test_{test_file}.py" exit_code = run_ogctests(scope, test_client=client) assert exit_code == pytest.ExitCode.OK diff --git a/tests/test_ogc_features_manual.py b/tests/test_ogc_features_manual.py new file mode 100644 index 00000000..51bf074d --- /dev/null +++ b/tests/test_ogc_features_manual.py @@ -0,0 +1,16 @@ +from rdflib import Graph +from rdflib.namespace import RDF, GEO + + +def test_ogc_features_root(client): + r = client.get(f"/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features") + assert r.status_code == 200 + + +# +# def test_bbox_query(client): +# r = client.get(f"/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features/collections/ex:FeatureCollection/items?bbox=4.0,4.0,6.0,6.0") +# assert r.status_code == 200 +# g = Graph().parse(data=r.text, format="turtle") +# # this should filter one feature but not the other +# assert len(list(g.triples((None, RDF.type, GEO.Feature)))) == 1 diff --git a/tests/test_parse_datetimes.py b/tests/test_parse_datetimes.py new file mode 100644 index 00000000..35129b20 --- /dev/null +++ b/tests/test_parse_datetimes.py @@ -0,0 +1,97 @@ +import pytest +from datetime import datetime, timezone +from typing import Tuple, Optional + +from prez.models.query_params import parse_datetime + + +@pytest.mark.parametrize( + "input_str, expected_output", + [ + # Full RFC 3339 date-time strings + ( + "2018-02-12T23:20:50Z", + (datetime(2018, 2, 12, 23, 20, 50, tzinfo=timezone.utc), None), + ), + ( + "2018-02-12T23:20:50+00:00", + (datetime(2018, 2, 12, 23, 20, 50, tzinfo=timezone.utc), None), + ), + ( + "2018-02-12T23:20:50-07:00", + (datetime(2018, 2, 13, 6, 20, 50, tzinfo=timezone.utc), None), + ), + ( + "2018-02-12T23:20:50.123Z", + (datetime(2018, 2, 12, 23, 20, 50, 123000, tzinfo=timezone.utc), None), + ), + # RFC 3339 date strings (if your function supports them) + ("2018-02-12", (datetime(2018, 2, 12, 0, 0, 0), None)), + # Intervals + ( + "2018-02-12T23:20:50Z/2018-03-18T12:31:12Z", + ( + datetime(2018, 2, 12, 23, 20, 50, tzinfo=timezone.utc), + datetime(2018, 3, 18, 12, 31, 12, tzinfo=timezone.utc), + ), + ), + ( + "../2018-03-18T12:31:12Z", + ("..", datetime(2018, 3, 18, 12, 31, 12, tzinfo=timezone.utc)), + ), + ( + "2018-02-12T23:20:50Z/..", + (datetime(2018, 2, 12, 23, 20, 50, tzinfo=timezone.utc), ".."), + ), + ( + "/2018-03-18T12:31:12Z", + (None, datetime(2018, 3, 18, 12, 31, 12, tzinfo=timezone.utc)), + ), + ( + "2018-02-12T23:20:50Z/", + (datetime(2018, 2, 12, 23, 20, 50, tzinfo=timezone.utc), None), + ), + # Edge cases + ( + "2018-02-12T23:20:50+01:00", + (datetime(2018, 2, 12, 22, 20, 50, tzinfo=timezone.utc), None), + ), + ( + "2018-02-12t23:20:50z", # Testing lower case 't' and 'z' if supported + (datetime(2018, 2, 12, 23, 20, 50, tzinfo=timezone.utc), None), + ), + ], +) +def test_parse_datetime_valid( + input_str: str, expected_output: Tuple[Optional[datetime], Optional[datetime]] +): + parsed = parse_datetime(input_str) + assert parsed == expected_output + + +@pytest.mark.parametrize( + "input_str", + [ + # Invalid cases + "invalid_datetime", + # "2018-02-12 23:20:50Z", # Space instead of 'T' + # "2018-02-12T23:20:50", # Missing timezone + "2018-02-12T23:20:50ZZ", # Invalid timezone + # "2018-02-12T23:20:50+0000", # Invalid timezone format + "2018-02-12T23:20:50Z/2018-03-18T12:31:12Z/extra", # Too many parts + "../..", # Both parts open + "2018-13-12T00:00:00Z", # Invalid month + "2018-02-30T00:00:00Z", # Invalid day + "2018-02-12T24:00:00Z", # Invalid hour + "2018-02-12T23:60:00Z", # Invalid minute + "2018-02-12T23:59:61Z", # Invalid second (60 is valid for leap seconds) + ], +) +def test_parse_datetime_invalid(input_str: str): + with pytest.raises(ValueError): + parse_datetime(input_str) + + +def test_parse_datetime_none_input(): + with pytest.raises(AttributeError): + parse_datetime(None) diff --git a/tests/test_property_selection_shacl.py b/tests/test_property_selection_shacl.py index d36213f7..c326b626 100755 --- a/tests/test_property_selection_shacl.py +++ b/tests/test_property_selection_shacl.py @@ -1,3 +1,4 @@ +import pytest from rdflib import Graph, URIRef, SH, RDF, PROV, DCTERMS from prez.reference_data.prez_ns import REG @@ -8,24 +9,10 @@ OptionalGraphPattern, Filter, TriplesSameSubjectPath, + TriplesSameSubject, ) -# uri: URIRef | BNode # URI of the shape -# graph: Graph -# focus_node: IRI | Var = Var(value="focus_node") -# # inputs -# property_paths: Optional[List[PropertyPath]] = None -# or_klasses: Optional[List[URIRef]] = None -# # outputs -# grammar: Optional[GroupGraphPatternSub] = None -# tssp_list: Optional[List[SimplifiedTriple]] = None -# gpnt_list: Optional[List[GraphPatternNotTriples]] = None -# prof_nodes: Optional[Dict[str, Var | IRI]] = {} -# classes_at_len: Optional[Dict[str, List[URIRef]]] = {} -# _select_vars: Optional[List[Var]] = None - - def test_simple_path(): g = Graph().parse( data=""" @@ -43,7 +30,7 @@ def test_simple_path(): TriplesSameSubjectPath.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=RDF.type), - object=Var(value="prof_node_1"), + object=Var(value="prof_1_node_1"), ) in ps.tssp_list ) @@ -66,15 +53,15 @@ def test_sequence_path(): TriplesSameSubjectPath.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=PROV.qualifiedDerivation), - object=Var(value="prof_node_1"), + object=Var(value="prof_1_node_1"), ) in ps.tssp_list ) assert ( TriplesSameSubjectPath.from_spo( - subject=Var(value="prof_node_1"), + subject=Var(value="prof_1_node_1"), predicate=IRI(value=PROV.hadRole), - object=Var(value="prof_node_2"), + object=Var(value="prof_1_node_2"), ) in ps.tssp_list ) @@ -107,52 +94,52 @@ def test_union(): uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") ) assert ( - TriplesSameSubjectPath.from_spo( + TriplesSameSubject.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=PROV.qualifiedDerivation), - object=Var(value="prof_node_1"), + object=Var(value="prof_1_node_3"), ) - in ps.tssp_list + in ps.tss_list ) assert ( - TriplesSameSubjectPath.from_spo( - subject=Var(value="prof_node_1"), + TriplesSameSubject.from_spo( + subject=Var(value="prof_1_node_3"), predicate=IRI(value=PROV.hadRole), - object=Var(value="prof_node_2"), + object=Var(value="prof_1_node_4"), ) - in ps.tssp_list + in ps.tss_list ) assert ( - TriplesSameSubjectPath.from_spo( + TriplesSameSubject.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=PROV.qualifiedDerivation), - object=Var(value="prof_node_3"), + object=Var(value="prof_1_node_5"), ) - in ps.tssp_list + in ps.tss_list ) assert ( - TriplesSameSubjectPath.from_spo( - subject=Var(value="prof_node_3"), + TriplesSameSubject.from_spo( + subject=Var(value="prof_1_node_5"), predicate=IRI(value=PROV.entity), - object=Var(value="prof_node_4"), + object=Var(value="prof_1_node_6"), ) - in ps.tssp_list + in ps.tss_list ) assert ( - TriplesSameSubjectPath.from_spo( + TriplesSameSubject.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=DCTERMS.publisher), - object=Var(value="prof_node_5"), + object=Var(value="prof_1_node_1"), ) - in ps.tssp_list + in ps.tss_list ) assert ( - TriplesSameSubjectPath.from_spo( + TriplesSameSubject.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=REG.status), - object=Var(value="prof_node_6"), + object=Var(value="prof_1_node_2"), ) - in ps.tssp_list + in ps.tss_list ) @@ -189,12 +176,7 @@ def test_complex_optional_props(): sh:property [ sh:minCount 0 ; - sh:path ( - sh:union ( - dcterms:publisher - ( prov:qualifiedDerivation prov:hadRole ) - ) - ) + sh:path dcterms:publisher , ( prov:qualifiedDerivation prov:hadRole ) ] . @@ -218,12 +200,7 @@ def test_excluded_props(): sh:property [ sh:maxCount 0 ; - sh:path ( - sh:union ( - dcterms:publisher - reg:status - ) - ) + sh:path dcterms:publisher , reg:status ] . @@ -242,3 +219,40 @@ def test_excluded_props(): in ps.tssp_list ) assert isinstance(ps.gpnt_list[0].content, Filter) + + +@pytest.mark.parametrize( + ["cardinality_type", "expected_result"], + [ + ( + "sh:zeroOrMorePath", + "?focus_node * ?prof_1_node_1", + ), + ( + "sh:oneOrMorePath", + "?focus_node + ?prof_1_node_1", + ), + ( + "sh:zeroOrOnePath", + "?focus_node ? ?prof_1_node_1", + ), + ], +) +def test_cardinality_props(cardinality_type, expected_result): + g = Graph().parse( + data=f""" + PREFIX dcterms: + PREFIX sh: + + sh:property [ + sh:path [ {cardinality_type} dcterms:publisher ] ; + ] + . + + """ + ) + path_bn = g.value(subject=URIRef("http://example-profile"), predicate=SH.property) + ps = PropertyShape( + uri=path_bn, graph=g, kind="profile", focus_node=Var(value="focus_node") + ) + assert ps.tssp_list[0].to_string() == expected_result