From e317e5ca8b0a6eaadb37f8c043e93df6f677059d Mon Sep 17 00:00:00 2001 From: david Date: Mon, 16 Sep 2024 09:24:56 +1000 Subject: [PATCH] MVP runtime editable profiles. --- prez/app.py | 3 ++ prez/config.py | 1 + prez/dependencies.py | 22 ++++++++--- prez/repositories/pyoxigraph.py | 6 +-- prez/routers/configuration.py | 65 ++++++++++++++++++++++++++++++++ prez/services/connegp_service.py | 2 + tests/conftest.py | 5 ++- 7 files changed, 94 insertions(+), 10 deletions(-) create mode 100644 prez/routers/configuration.py diff --git a/prez/app.py b/prez/app.py index 0cd2b0e8..6187469e 100755 --- a/prez/app.py +++ b/prez/app.py @@ -34,6 +34,7 @@ from prez.routers.ogc_router import router as ogc_records_router from prez.routers.ogc_features_router import features_subapi from prez.routers.sparql import router as sparql_router +from prez.routers.configuration import router as configuration_router from prez.services.app_service import ( healthcheck_sparql_endpoints, count_objects, @@ -173,6 +174,8 @@ def assemble_app( app.include_router(management_router) app.include_router(ogc_records_router) + if settings.configuration_mode: + app.include_router(configuration_router) if _settings.enable_sparql_endpoint: app.include_router(sparql_router) app.mount( diff --git a/prez/config.py b/prez/config.py index 9412692b..0441e3ab 100755 --- a/prez/config.py +++ b/prez/config.py @@ -81,6 +81,7 @@ class Settings(BaseSettings): enable_sparql_endpoint: bool = False temporal_predicate: Optional[URIRef] = SDO.temporal endpoint_to_template_query_filename: Optional[Dict[str, str]] = {} + configuration_mode: bool = False @field_validator("prez_version") @classmethod diff --git a/prez/dependencies.py b/prez/dependencies.py index a4558d8b..c79cf1e7 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -3,8 +3,8 @@ import httpx from fastapi import Depends, Request, HTTPException -from pyoxigraph import Store -from rdflib import Dataset, URIRef, Graph, SKOS, RDF +from pyoxigraph import Store, NamedNode +from rdflib import Dataset, URIRef, Graph, SKOS, RDF, PROF from sparql_grammar_pydantic import IRI, Var from prez.cache import ( @@ -114,12 +114,20 @@ async def load_system_data_to_oxigraph(store: Store): """ # TODO refactor to use the local files directly for f in (Path(__file__).parent / "reference_data/profiles").glob("*.ttl"): - prof_bytes = Graph().parse(f).serialize(format="nt", encoding="utf-8") + prof_g = Graph().parse(f) + prof_uri = prof_g.value(None, RDF.type, PROF.Profile) + prof_bytes = prof_g.serialize(format="nt", encoding="utf-8") # profiles_bytes = profiles_graph_cache.default_context.serialize(format="nt", encoding="utf-8") - store.load(prof_bytes, "application/n-triples") + store.load( + prof_bytes, "application/n-triples", to_graph=NamedNode(str(prof_uri)) + ) endpoints_bytes = endpoints_graph_cache.serialize(format="nt", encoding="utf-8") - store.load(endpoints_bytes, "application/n-triples") + store.load( + endpoints_bytes, + "application/n-triples", + to_graph=NamedNode(str(ONT["endpoints"])), + ) async def load_annotations_data_to_oxigraph(store: Store): @@ -130,7 +138,9 @@ async def load_annotations_data_to_oxigraph(store: Store): for file in (Path(__file__).parent / "reference_data/annotations").glob("*"): g.parse(file) file_bytes = g.serialize(format="nt", encoding="utf-8") - store.load(file_bytes, "application/n-triples") + store.load( + file_bytes, "application/n-triples", to_graph=NamedNode(str(ONT["annotations"])) + ) async def cql_post_parser_dependency(request: Request) -> CQLParser: diff --git a/prez/repositories/pyoxigraph.py b/prez/repositories/pyoxigraph.py index 520ac127..c1f2e2c6 100755 --- a/prez/repositories/pyoxigraph.py +++ b/prez/repositories/pyoxigraph.py @@ -48,14 +48,14 @@ def _handle_query_triples_results(results: pyoxigraph.QueryTriples) -> Graph: return g.parse(data=ntriples, format="ntriples") def _sync_rdf_query_to_graph(self, query: str) -> Graph: - results = self.pyoxi_store.query(query) + results = self.pyoxi_store.query(query, use_default_graph_as_union=True) result_graph = self._handle_query_triples_results(results) return result_graph def _sync_tabular_query_to_table(self, query: str, context: URIRef = None) -> tuple: - results = self.pyoxi_store.query(query) + named_graphs = list(self.pyoxi_store.named_graphs()) + results = self.pyoxi_store.query(query, default_graph=named_graphs) results_dict = self._handle_query_solution_results(results) - # only return the bindings from the results. return context, results_dict["results"]["bindings"] def _sparql(self, query: str) -> dict | Graph | bool: diff --git a/prez/routers/configuration.py b/prez/routers/configuration.py new file mode 100644 index 00000000..8b465dfc --- /dev/null +++ b/prez/routers/configuration.py @@ -0,0 +1,65 @@ +import logging +from pathlib import Path as PLPath + +from fastapi import APIRouter, Depends, Body +from fastapi import HTTPException +from fastapi import Path as FAPath +from pyoxigraph import NamedNode +from rdflib import Graph +from rdflib.exceptions import ParserError + +from prez.cache import profiles_graph_cache +from prez.dependencies import get_system_repo +from prez.repositories import Repo +from prez.services.curie_functions import get_uri_for_curie_id + +router = APIRouter(tags=["Configuration"]) +log = logging.getLogger(__name__) + +# Read the example RDF data from a file +example_profile = (PLPath(__file__).parent / "example_profile.ttl").read_text() + + +@router.put( + "/update-profile/{profile_name}", summary="Update Profile", tags=["Configuration"] +) +async def update_profile( + profile_name: str = FAPath( + ..., + title="Profile Name", + description="The name of the profile to update", + example="prez:ExProf", + ), + profile_update: str = Body( + ..., + example=example_profile, + media_type="text/turtle", + ), + system_repo: Repo = Depends(get_system_repo), +): + profile_uri = await get_uri_for_curie_id(profile_name) + try: + new_profile_g = Graph().parse(data=profile_update, format="turtle") + except ParserError as e: + raise HTTPException(status_code=400, detail=f"Error parsing profile: {e}") + try: + old_profile = profiles_graph_cache.cbd(profile_uri) + except KeyError: + raise HTTPException( + status_code=404, detail=f"Profile {profile_name} not found." + ) + for t in old_profile: + profiles_graph_cache.remove(t) + try: + # system_repo.pyoxi_store.update(f"DELETE DATA {{ {" ".join([i.n3() for i in t])} }}") + system_repo.pyoxi_store.remove_graph(NamedNode(str(profile_uri))) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error updating profile: {e}") + for t in new_profile_g: + profiles_graph_cache.add(t) + new_prof_bytes = new_profile_g.serialize(format="nt", encoding="utf-8") + system_repo.pyoxi_store.load( + new_prof_bytes, "application/n-triples", to_graph=NamedNode(str(profile_uri)) + ) + log.info(f"Profile {profile_name} updated.") + return {"message": f"Profile {profile_name} updated."} diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index d4ff47f1..4f77c52c 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -258,6 +258,7 @@ def _compose_select_query(self) -> str: SELECT ?profile ?title ?class (count(?mid) as ?distance) ?req_profile ?def_profile ?format ?req_format ?def_format WHERE {{ + GRAPH ?g {{ VALUES ?class {{{" ".join('<' + str(klass) + '>' for klass in self.classes)}}} ?class rdfs:subClassOf* ?mid . ?mid rdfs:subClassOf* ?base_class . @@ -274,6 +275,7 @@ def _compose_select_query(self) -> str: altr-ext:hasDefaultProfile ?profile }} AS ?def_profile) {self._generate_mediatype_if_statements()} BIND(EXISTS {{ ?profile altr-ext:hasDefaultResourceFormat ?format }} AS ?def_format) + }} }} GROUP BY ?class ?profile ?req_profile ?def_profile ?format ?req_format ?def_format ?title ORDER BY DESC(?req_profile) DESC(?distance) DESC(?def_profile) DESC(?req_format) DESC(?def_format) diff --git a/tests/conftest.py b/tests/conftest.py index f1bcacb1..9676f857 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import os +from pyoxigraph import NamedNode from rdflib import Graph, URIRef, RDF from rdflib.namespace import GEO from starlette.routing import Mount @@ -28,7 +29,9 @@ def test_store() -> Store: store = Store() for file in Path(__file__).parent.glob("../test_data/*.ttl"): - store.load(file.read_bytes(), "text/turtle") + store.load( + file.read_bytes(), "text/turtle", to_graph=NamedNode(f"https://{file.stem}") + ) return store