diff --git a/demo/prez-v4-backend/docker-compose.yml b/demo/prez-v4-backend/docker-compose.yml index 09930154..aa8cd560 100755 --- a/demo/prez-v4-backend/docker-compose.yml +++ b/demo/prez-v4-backend/docker-compose.yml @@ -2,7 +2,7 @@ version: "3" services: fuseki: - image: "ghcr.io/zazuko/fuseki-geosparql:v3.3.0" + image: "ghcr.io/zazuko/fuseki-geosparql:v3.3.1" ports: - "3030:3030" volumes: @@ -20,14 +20,14 @@ services: timeout: 10s retries: 3 - prez: - build: - context: ../../ - dockerfile: ./Dockerfile - ports: - - "8000:8000" - environment: - SPARQL_ENDPOINT: 'http://fuseki:3030/dataset' - depends_on: - fuseki: - condition: service_healthy +# prez: +# build: +# context: ../../ +# dockerfile: ./Dockerfile +# ports: +# - "8000:8000" +# environment: +# SPARQL_ENDPOINT: 'http://fuseki:3030/dataset' +# depends_on: +# fuseki: +# condition: service_healthy diff --git a/prez/app.py b/prez/app.py index affe3d76..40a6fc31 100755 --- a/prez/app.py +++ b/prez/app.py @@ -24,7 +24,6 @@ NoProfilesException, ) from prez.repositories import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo -from prez.routers.cql import router as cql_router from prez.routers.identifier import router as identifier_router from prez.routers.management import router as management_router from prez.routers.ogc_router import router as ogc_records_router @@ -59,7 +58,6 @@ } ) -app.include_router(cql_router) app.include_router(management_router) app.include_router(sparql_router) app.include_router(search_router) diff --git a/prez/dependencies.py b/prez/dependencies.py index 0b901578..d38629df 100755 --- a/prez/dependencies.py +++ b/prez/dependencies.py @@ -248,6 +248,9 @@ async def get_focus_node( def handle_special_cases(ep_uri, focus_node): + """ + uris provided to the nodeshapes are those in prez/reference_data/endpoints/endpoint_nodeshapes.ttl + """ if ep_uri == EP["system/object"]: return NodeShape( uri=URIRef("http://example.org/ns#Object"), @@ -269,6 +272,13 @@ def handle_special_cases(ep_uri, focus_node): kind="endpoint", focus_node=focus_node, ) + elif ep_uri == EP["extended-ogc-records/cql-get"]: + return NodeShape( + uri=URIRef("http://example.org/ns#CQL"), + graph=endpoints_graph_cache, + kind="endpoint", + focus_node=focus_node, + ) async def get_endpoint_nodeshapes( @@ -285,6 +295,7 @@ async def get_endpoint_nodeshapes( ep_uri = endpoint_uri_type[0] if ep_uri in [ EP["system/object"], + EP["extended-ogc-records/cql-get"], EP["extended-ogc-records/top-concepts"], EP["extended-ogc-records/narrowers"], ]: diff --git a/prez/queries/object.py b/prez/queries/object.py deleted file mode 100755 index 3ed69a5b..00000000 --- a/prez/queries/object.py +++ /dev/null @@ -1,33 +0,0 @@ -from textwrap import dedent - -from jinja2 import Template - - -def object_inbound_query(iri: str, predicate: str) -> str: - query = Template( - """ - SELECT (COUNT(?iri) as ?count) - WHERE { - BIND(<{{ iri }}> as ?iri) - - ?other <{{ predicate }}> ?iri . - } - """ - ).render(iri=iri, predicate=predicate) - - return dedent(query) - - -def object_outbound_query(iri: str, predicate: str) -> str: - query = Template( - """ - SELECT (COUNT(?iri) as ?count) - WHERE { - BIND(<{{ iri }}> as ?iri) - - ?iri <{{ predicate }}> ?other . - } - """ - ).render(iri=iri, predicate=predicate) - - return dedent(query) diff --git a/prez/reference_data/endpoints/endpoint_metadata.ttl b/prez/reference_data/endpoints/endpoint_metadata.ttl index 3c2c7140..aaf71e48 100644 --- a/prez/reference_data/endpoints/endpoint_metadata.ttl +++ b/prez/reference_data/endpoints/endpoint_metadata.ttl @@ -19,6 +19,11 @@ sys:object ont:relevantShapes ex:Profiles ; . +ogce:cql-get + a ont:ListingEndpoint ; + ont:relevantShapes ex:CQL ; + . + ogce:catalog-listing a ont:ListingEndpoint ; ont:relevantShapes ex:Catalogs ; diff --git a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl b/prez/reference_data/endpoints/endpoint_nodeshapes.ttl similarity index 97% rename from prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl rename to prez/reference_data/endpoints/endpoint_nodeshapes.ttl index 6f8fd0d3..ebb63361 100644 --- a/prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl +++ b/prez/reference_data/endpoints/endpoint_nodeshapes.ttl @@ -148,4 +148,10 @@ ex:Narrowers a sh:NodeShape ; sh:targetClass skos:Concept ; ont:hierarchyLevel 1 ; -. \ No newline at end of file +. + +ex:CQL + a sh:NodeShape ; + sh:targetClass rdfs:Resource ; + ont:hierarchyLevel 1 ; + . diff --git a/prez/reference_data/profiles/prez_default_profiles.ttl b/prez/reference_data/profiles/prez_default_profiles.ttl index bf8190ef..063ebed7 100755 --- a/prez/reference_data/profiles/prez_default_profiles.ttl +++ b/prez/reference_data/profiles/prez_default_profiles.ttl @@ -5,6 +5,8 @@ PREFIX geo: PREFIX owl: PREFIX prez: PREFIX prof: +PREFIX prov: +PREFIX reg: PREFIX rdf: PREFIX rdfs: PREFIX sh: @@ -49,6 +51,32 @@ PREFIX xsd: shext:bnode-depth 2 ; . + + + a prof:Profile , prez:ListingProfile; + dcterms:identifier "cqlgeo"^^xsd:token ; + dcterms:description "A CQL profile targeted towards listing CQL results, including geospatial information." ; + dcterms:title "CQL Geo profile" ; + altr-ext:constrainsClass rdfs:Resource ; + altr-ext:hasDefaultResourceFormat "text/anot+turtle" ; + altr-ext:hasResourceFormat "application/ld+json" , + "application/ld+json" , + "application/anot+ld+json" , + "application/rdf+xml" , + "text/anot+turtle" , + "text/turtle" ; + sh:property [ + sh:minCount 0 ; + sh:path ( + sh:union ( + rdf:type + ( geo:hasGeometry geo:asWKT ) + ) + ) + ] ; + . + + a prof:Profile , prez:ListingProfile ; dcterms:description "A very basic data model that lists the members of container objects only, i.e. not their other properties" ; diff --git a/prez/routers/ogc_router.py b/prez/routers/ogc_router.py index 2e49c15d..cf087a2b 100755 --- a/prez/routers/ogc_router.py +++ b/prez/routers/ogc_router.py @@ -42,6 +42,11 @@ summary="List Profiles", name=EP["system/profile-listing"], ) +@router.get( + path="/cql", + summary="CQL GET endpoint", + name=OGCE["cql-get"], +) @router.get( "/catalogs", summary="Catalog Listing", diff --git a/prez/services/connegp_service.py b/prez/services/connegp_service.py index 973df3aa..04e358f3 100755 --- a/prez/services/connegp_service.py +++ b/prez/services/connegp_service.py @@ -237,7 +237,7 @@ def _compose_select_query(self) -> str: VALUES ?base_class {{ dcat:Dataset geo:FeatureCollection geo:Feature skos:ConceptScheme skos:Concept skos:Collection dcat:Catalog rdf:Resource dcat:Resource prof:Profile prez:SPARQLQuery - prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object }} + prez:SearchResult prez:CQLObjectList prez:QueryablesList prez:Object rdfs:Resource }} ?profile altr-ext:constrainsClass ?class ; altr-ext:hasResourceFormat ?format ; dcterms:title ?title .\ diff --git a/prez/services/listings.py b/prez/services/listings.py index 0512cbe6..57bf468f 100755 --- a/prez/services/listings.py +++ b/prez/services/listings.py @@ -51,16 +51,20 @@ async def listing_function( order_by=order_by, order_by_direction=order_by_direction, ) - profile_tss_list = profile_nodeshape.tss_list - profile_construct_triples = None - if profile_tss_list: - profile_construct_triples = ConstructTriples.from_tss_list(profile_tss_list) + + # merge subselect and profile triples same subject (for construct triples) + construct_tss_list = [] + subselect_tss_list = subselect_kwargs.pop("construct_tss_list") + if subselect_tss_list: + construct_tss_list.extend(subselect_tss_list) + if profile_nodeshape.tss_list: + construct_tss_list.extend(profile_nodeshape.tss_list) queries = [] main_query = PrezQueryConstructor( + construct_tss_list=construct_tss_list, profile_triples=profile_nodeshape.tssp_list, profile_gpnt=profile_nodeshape.gpnt_list, - profile_construct_triples=profile_construct_triples, **subselect_kwargs, ) queries.append(main_query.to_string()) diff --git a/prez/services/objects.py b/prez/services/objects.py index fdd2f210..3df782ab 100755 --- a/prez/services/objects.py +++ b/prez/services/objects.py @@ -44,15 +44,10 @@ async def object_function( **none_kwargs, ) - profile_tss_list = profile_nodeshape.tss_list - profile_construct_triples = None - if profile_tss_list: - profile_construct_triples = ConstructTriples.from_tss_list(profile_tss_list) - query = PrezQueryConstructor( profile_triples=profile_nodeshape.tssp_list, profile_gpnt=profile_nodeshape.gpnt_list, - profile_construct_triples=profile_construct_triples + construct_tss_list=profile_nodeshape.tss_list ).to_string() if pmts.requested_mediatypes[0][0] == "application/sparql-query": diff --git a/prez/services/query_generation/concept_hierarchy.py b/prez/services/query_generation/concept_hierarchy.py index 8f89d224..2c89a1be 100644 --- a/prez/services/query_generation/concept_hierarchy.py +++ b/prez/services/query_generation/concept_hierarchy.py @@ -301,6 +301,10 @@ def __init__( def construct_triples(self): return self.construct_template.construct_triples + @property + def tss_list(self): + return [self.construct_template.construct_triples.triples] + @property def inner_select_vars(self): return self.where_clause.group_graph_pattern.content.graph_patterns_or_triples_blocks[0].content. \ diff --git a/prez/services/query_generation/cql.py b/prez/services/query_generation/cql.py index 8691de19..fbc0a6a7 100755 --- a/prez/services/query_generation/cql.py +++ b/prez/services/query_generation/cql.py @@ -4,12 +4,11 @@ from rdflib import URIRef, Namespace from rdflib.namespace import GEO, SH -from temp.grammar import * - from prez.reference_data.cql.geo_function_mapping import ( cql_sparql_spatial_mapping, cql_to_shapely_mapping, ) +from temp.grammar import * CQL = Namespace("http://www.opengis.net/doc/IS/cql2/1.0/") @@ -17,33 +16,21 @@ class CQLParser: def __init__(self, cql=None, context: dict = None, cql_json: dict = None): self.ggps_inner_select = None + self.inner_select_gpnt_list = None self.cql: dict = cql self.context = context self.cql_json = cql_json self.var_counter = 0 self.query_object = None self.query_str = None + self.gpnt_list = [] + self.tss_list = [] + self.tssp_list = [] def generate_jsonld(self): combined = {"@context": self.context, **self.cql} self.cql_json = jsonld.expand(combined, options={"base": "h"})[0] - def extract_prefixes(self, prefix_dict: dict) -> dict: - """ - Extracts prefixes and their URIs from the dictionary and formats them for SPARQL queries. - - :param prefix_dict: Dictionary containing prefixes and their URIs. - :return: Dictionary containing PREFIX statements for SPARQL queries. - """ - sparql_prefixes = {} - - # Filtering out keys that don't correspond to prefixes or are special keys - special_keys = ["args", "op", "property", "@version"] - for prefix, entry in prefix_dict.items(): - if prefix not in special_keys and isinstance(entry, str): - sparql_prefixes[prefix] = URIRef(entry) - return sparql_prefixes - def parse(self): root = self.cql_json self.ggps_inner_select = next(self.parse_logical_operators(root)) @@ -51,7 +38,7 @@ def parse(self): group_graph_pattern=GroupGraphPattern(content=self.ggps_inner_select) ) construct_template = ConstructTemplate( - construct_triples=ConstructTriples.from_tss_list(where.collect_triples()) + construct_triples=ConstructTriples.from_tss_list(self.tss_list) ) solution_modifier = SolutionModifier() self.query_object = ConstructQuery( @@ -60,9 +47,12 @@ def parse(self): solution_modifier=solution_modifier, ) self.query_str = self.query_object.to_string() + gpotb = self.query_object.where_clause.group_graph_pattern.content + gpnt_list = [i for i in gpotb.graph_patterns_or_triples_blocks if isinstance(i, GraphPatternNotTriples)] + self.inner_select_gpnt_list = gpnt_list def parse_logical_operators( - self, element, existing_ggps=None + self, element, existing_ggps=None ) -> Generator[GroupGraphPatternSub, None, None]: operator = element.get(str(CQL.operator))[0].get("@value") args = element.get(str(CQL.args)) @@ -89,8 +79,11 @@ def parse_logical_operators( component = GroupGraphPattern(content=component) or_components.append(component) - gougp = GroupOrUnionGraphPattern(group_graph_patterns=or_components) - gpnt = GraphPatternNotTriples(content=gougp) + gpnt = GraphPatternNotTriples( + content=GroupOrUnionGraphPattern( + group_graph_patterns=or_components + ) + ) if ggps.graph_patterns_or_triples_blocks: ggps.graph_patterns_or_triples_blocks.append(gpnt) else: @@ -110,13 +103,14 @@ def parse_logical_operators( raise NotImplementedError(f"Operator {operator} not implemented.") def _add_triple(self, ggps, subject, predicate, object): - simple_triple = TriplesSameSubjectPath.from_spo( - subject=subject, predicate=predicate, object=object - ) + tssp = TriplesSameSubjectPath.from_spo(subject=subject, predicate=predicate, object=object) + tss = TriplesSameSubject.from_spo(subject=subject, predicate=predicate, object=object) + self.tss_list.append(tss) + self.tssp_list.append(tssp) if ggps.triples_block: - ggps.triples_block.triples.append(simple_triple) + ggps.triples_block = TriplesBlock(triples=tssp, triples_block=ggps.triples_block) else: - ggps.triples_block = TriplesBlock(triples=[simple_triple]) + ggps.triples_block = TriplesBlock(triples=tssp) # def _append_graph_pattern(self, ggps, graph_pattern): # if ggps.graph_patterns_or_triples_blocks: @@ -191,17 +185,28 @@ def _handle_like(self, args, existing_ggps=None): else: self._add_triple(ggps, subject, predicate, obj) - te = Expression.from_primary_expression( - primary_expression=PrimaryExpression(content=obj) - ) - pe = Expression.from_primary_expression( - primary_expression=PrimaryExpression(content=RDFLiteral(value=value)) + filter_gpnt = GraphPatternNotTriples( + content=Filter( + constraint=Constraint( + content=BuiltInCall( + other_expressions=RegexExpression( + text_expression=Expression.from_primary_expression( + primary_expression=PrimaryExpression( + content=obj + ) + ), + pattern_expression=Expression.from_primary_expression( + primary_expression=PrimaryExpression( + content=RDFLiteral( + value=value + ) + ) + ) + ) + ) + ) + ) ) - re = RegexExpression(text_expression=te, pattern_expression=pe) - bic = BuiltInCall(other_expressions=re) - cons = Constraint(content=bic) - filter_expr = Filter(constraint=cons) - filter_gpnt = GraphPatternNotTriples(content=filter_expr) ggps.add_pattern(filter_gpnt) # self._append_graph_pattern(ggps, filter_expr) yield ggps diff --git a/prez/services/query_generation/search.py b/prez/services/query_generation/search.py index 221d805b..668784fa 100755 --- a/prez/services/query_generation/search.py +++ b/prez/services/query_generation/search.py @@ -303,6 +303,10 @@ def create_inner_ggp( ggp.content.add_pattern(GraphPatternNotTriples(content=filter_expr)) return ggp + @property + def tss_list(self): + return self.construct_template.construct_triples.to_tss_list() + # convenience properties for the construct query @property def construct_triples(self): diff --git a/prez/services/query_generation/umbrella.py b/prez/services/query_generation/umbrella.py index 094e40fa..46c72675 100755 --- a/prez/services/query_generation/umbrella.py +++ b/prez/services/query_generation/umbrella.py @@ -13,13 +13,13 @@ class PrezQueryConstructor(ConstructQuery): Query format: CONSTRUCT { - + } WHERE { # for listing queries only: { SELECT ?focus_node WHERE { - + } ORDER BY () @@ -33,22 +33,28 @@ class PrezQueryConstructor(ConstructQuery): """ def __init__( - self, - additional_construct_triples: Optional[ConstructTriples] = None, - profile_triples: Optional[List[TriplesSameSubjectPath]] = [], - profile_gpnt: Optional[List[GraphPatternNotTriples]] = [], - profile_construct_triples: Optional[ConstructTriples] = None, - inner_select_vars: Optional[List[Union[Var, Tuple[Expression, Var]]]] = [], - inner_select_triples: Optional[List[TriplesSameSubjectPath]] = [], - inner_select_gpnt: Optional[List[GraphPatternNotTriples]] = [], - limit: Optional[int] = None, - offset: Optional[int] = None, - order_by: Optional[Var] = None, - order_by_direction: Optional[str] = None, + self, + construct_tss_list: Optional[List[TriplesSameSubject]] = None, + + profile_triples: Optional[List[TriplesSameSubjectPath]] = [], + profile_gpnt: Optional[List[GraphPatternNotTriples]] = [], + + inner_select_vars: Optional[List[Union[Var, Tuple[Expression, Var]]]] = [], + inner_select_tssp_list: Optional[List[TriplesSameSubjectPath]] = [], + inner_select_gpnt: Optional[List[GraphPatternNotTriples]] = [], + + limit: Optional[int] = None, + offset: Optional[int] = None, + order_by: Optional[Var] = None, + order_by_direction: Optional[str] = None, ): # where clause triples and GraphPatternNotTriples - set up first as in the case of a listing query, the inner # select is appended to this list as a GraphPatternNotTriples - gpotb = [TriplesBlock.from_tssp_list(profile_triples), *profile_gpnt] + gpotb = [] + if profile_triples: + gpotb.append(TriplesBlock.from_tssp_list(profile_triples)) + if profile_gpnt: + gpotb.extend(profile_gpnt) # inner_select_vars typically set for search queries or custom select queries; otherwise we only want the focus # node from the inner select query @@ -69,12 +75,12 @@ def __init__( # for listing queries only, add an inner select to the where clause ss_gpotb = [] - if inner_select_triples: - ss_gpotb.append(TriplesBlock.from_tssp_list(inner_select_triples)) + if inner_select_tssp_list: + ss_gpotb.append(TriplesBlock.from_tssp_list(inner_select_tssp_list)) if inner_select_gpnt: ss_gpotb.extend(inner_select_gpnt) - if inner_select_triples or inner_select_gpnt: + if inner_select_tssp_list or inner_select_gpnt: gpnt_inner_subselect = GraphPatternNotTriples( content=GroupOrUnionGraphPattern( group_graph_patterns=[ @@ -117,13 +123,15 @@ def __init__( # construct triples is usually only from the profile, but in the case of search queries for example, additional # triples are added construct_triples = None - ct_list = [] - if profile_construct_triples: - ct_list.append(profile_construct_triples) - if additional_construct_triples: - ct_list.append(additional_construct_triples) - if ct_list: - construct_triples = ConstructTriples.merge_ct(ct_list) + if construct_tss_list: + construct_triples = ConstructTriples.from_tss_list(construct_tss_list) + # ct_list = [] + # if profile_construct_triples: + # ct_list.append(profile_construct_triples) + # if construct_tss_list: + # ct_list.append(construct_tss_list) + # if ct_list: + # construct_triples = ConstructTriples.merge_ct(ct_list) construct_template = ConstructTemplate( construct_triples=construct_triples @@ -146,22 +154,22 @@ def inner_select(self): def merge_listing_query_grammar_inputs( - cql_parser: Optional[CQLParser] = None, - endpoint_nodeshape: Optional[NodeShape] = None, - search_query: Optional[SearchQueryRegex] = None, - concept_hierarchy_query: Optional[ConceptHierarchyQuery] = None, - page: Optional[int] = None, - per_page: Optional[int] = None, - order_by: Optional[str] = None, - order_by_direction: Optional[bool] = None, + cql_parser: Optional[CQLParser] = None, + endpoint_nodeshape: Optional[NodeShape] = None, + search_query: Optional[SearchQueryRegex] = None, + concept_hierarchy_query: Optional[ConceptHierarchyQuery] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, + order_by: Optional[str] = None, + order_by_direction: Optional[bool] = None, ) -> dict: """ Merges the inputs for a query grammar. """ kwargs = { - "additional_construct_triples": None, + "construct_tss_list": [], "inner_select_vars": [], - "inner_select_triples": [], + "inner_select_tssp_list": [], "inner_select_gpnt": [], "limit": None, "offset": None, @@ -174,14 +182,15 @@ def merge_listing_query_grammar_inputs( kwargs["limit"] = limit kwargs["offset"] = offset if concept_hierarchy_query: - kwargs["additional_construct_triples"] = concept_hierarchy_query.construct_triples + kwargs["construct_tss_list"] = concept_hierarchy_query.tss_list kwargs["inner_select_vars"] = concept_hierarchy_query.inner_select_vars kwargs["order_by"] = concept_hierarchy_query.order_by kwargs["inner_select_gpnt"] = [concept_hierarchy_query.inner_select_gpnt] - #TODO can remove limit/offset/order by from search query - apply from QSA or defaults. + + # TODO can remove limit/offset/order by from search query - apply from QSA or defaults. elif search_query: - kwargs["additional_construct_triples"] = search_query.construct_triples + kwargs["construct_tss_list"] = search_query.tss_list kwargs["inner_select_vars"] = search_query.inner_select_vars kwargs["limit"] = search_query.limit kwargs["offset"] = search_query.offset @@ -197,10 +206,12 @@ def merge_listing_query_grammar_inputs( kwargs["order_by_direction"] = "ASC" if cql_parser: - pass + kwargs["construct_tss_list"].extend(cql_parser.tss_list) + kwargs["inner_select_tssp_list"].extend(cql_parser.tssp_list) + kwargs["inner_select_gpnt"].extend(cql_parser.inner_select_gpnt_list) if endpoint_nodeshape: - kwargs["inner_select_triples"].extend(endpoint_nodeshape.tssp_list) + kwargs["inner_select_tssp_list"].extend(endpoint_nodeshape.tssp_list) kwargs["inner_select_gpnt"].extend(endpoint_nodeshape.gpnt_list) return kwargs diff --git a/temp/grammar/grammar.py b/temp/grammar/grammar.py index 2abed6c2..59bbaea0 100755 --- a/temp/grammar/grammar.py +++ b/temp/grammar/grammar.py @@ -587,6 +587,7 @@ class GraphPatternNotTriples(SPARQLGrammarBase): ] def render(self) -> Generator[str, None, None]: + yield "\n" yield from self.content.render() @@ -754,7 +755,7 @@ class Filter(SPARQLGrammarBase): constraint: Constraint def render(self) -> Generator[str, None, None]: - yield "\nFILTER " + yield "FILTER " yield from self.constraint.render() @classmethod @@ -1123,6 +1124,14 @@ def from_tss_list(cls, tss_list: List[TriplesSameSubject]): print('') return ct + def to_tss_list(self): + tss_list = [] + ct = self + while ct: + tss_list.append(ct.triples) + ct = ct.construct_triples + return tss_list + @classmethod def merge_ct(cls, ct_list: List[ConstructTriples]): """ diff --git a/tests/_test_cql.py b/tests/_test_cql.py deleted file mode 100755 index 9de4352b..00000000 --- a/tests/_test_cql.py +++ /dev/null @@ -1,94 +0,0 @@ -import json -from pathlib import Path - -import pytest -from fastapi.testclient import TestClient -from pyoxigraph.pyoxigraph import Store - -from prez.app import app -from prez.dependencies import get_data_repo -from prez.repositories import Repo, PyoxigraphRepo -from urllib.parse import quote_plus - - -@pytest.fixture(scope="session") -def test_store() -> Store: - # Create a new pyoxigraph Store - store = Store() - - for file in Path(__file__).parent.glob("../tests/data/*/input/*.ttl"): - store.load(file.read_bytes(), "text/turtle") - - return store - - -@pytest.fixture(scope="session") -def test_repo(test_store: Store) -> Repo: - # Create a PyoxigraphQuerySender using the test_store - return PyoxigraphRepo(test_store) - - -@pytest.fixture(scope="session") -def client(test_repo: Repo) -> TestClient: - # Override the dependency to use the test_repo - def override_get_repo(): - return test_repo - - app.dependency_overrides[get_data_repo] = override_get_repo - - with TestClient(app) as c: - yield c - - # Remove the override to ensure subsequent tests are unaffected - app.dependency_overrides.clear() - - -@pytest.mark.parametrize( - "cql_json_filename", - [ - "example01.json", - "example02.json", - "example03.json", - "example05a.json", - "example05b.json", - "example06b.json", - "example09.json", - "example10.json", - "example11.json", - "example12.json", - "example14.json", - "example15.json", - "example17.json", - "example29.json", - "example31.json", - "example32.json", - "example33.json", - "example34.json", - "example35.json", - "example39.json", - ], -) -def test_simple(client, cql_json_filename): - cql_json = Path(__file__).parent / f"data/cql/input/{cql_json_filename}" - cql_json_as_json = json.loads(cql_json.read_text()) - headers = {"content-type": "application/json"} - response = client.post("/cql", json=cql_json_as_json, headers=headers) - assert response.status_code == 200 - - -def test_intersects_post(client): - cql_json = Path(__file__).parent / f"data/cql/input/geo_intersects.json" - cql_json_as_json = json.loads(cql_json.read_text()) - headers = {"content-type": "application/json"} - response = client.post("/cql", json=cql_json_as_json, headers=headers) - assert response.status_code == 200 - - -def test_intersects_get(client): - cql_json = Path(__file__).parent / f"data/cql/input/geo_intersects.json" - cql_json_as_json = json.loads(cql_json.read_text()) - query_string = quote_plus(json.dumps(cql_json_as_json)) - response = client.get( - f"/cql?filter={query_string}&_mediatype=application/sparql-query" - ) - assert response.status_code == 200 diff --git a/tests/test_cql.py b/tests/test_cql.py new file mode 100755 index 00000000..d8061acf --- /dev/null +++ b/tests/test_cql.py @@ -0,0 +1,71 @@ +import json +from pathlib import Path +from urllib.parse import quote_plus + +import pytest + + +cql_filenames = [ + "example01.json", + "example02.json", + "example03.json", + "example05a.json", + "example05b.json", + "example06b.json", + "example09.json", + "example10.json", + "example11.json", + "example12.json", + "example14.json", + "example15.json", + "example17.json", + "example29.json", + "example31.json", + "example32.json", + "example33.json", + "example34.json", + "example35.json", + "example39.json" + ] + +# @pytest.mark.parametrize( +# "cql_json_filename", +# cql_filenames +# ) +# def test_simple_post(client, cql_json_filename): +# cql_json_path = Path(__file__).parent.parent / f"test_data/cql/input/{cql_json_filename}" +# cql_json = json.loads(cql_json_path.read_text()) +# headers = {"content-type": "application/json"} +# response = client.post("/cql", json=cql_json, headers=headers) +# assert response.status_code == 200 + +@pytest.mark.parametrize( + "cql_json_filename", + cql_filenames +) +def test_simple_get(client, cql_json_filename): + cql_json_path = Path(__file__).parent.parent / f"test_data/cql/input/{cql_json_filename}" + cql_json = json.loads(cql_json_path.read_text()) + query_string = quote_plus(json.dumps(cql_json)) + response = client.get( + f"/cql?filter={query_string}" + ) + assert response.status_code == 200 + + +# def test_intersects_post(client): +# cql_json_path = Path(__file__).parent.parent / f"test_data/cql/input/geo_intersects.json" +# cql_json = json.loads(cql_json_path.read_text()) +# headers = {"content-type": "application/json"} +# response = client.post("/cql", json=cql_json, headers=headers) +# assert response.status_code == 200 + + +def test_intersects_get(client): + cql_json_path = Path(__file__).parent.parent / f"test_data/cql/input/geo_intersects.json" + cql_json = json.loads(cql_json_path.read_text()) + query_string = quote_plus(json.dumps(cql_json)) + response = client.get( + f"/cql?filter={query_string}&_mediatype=application/sparql-query" + ) + assert response.status_code == 200 diff --git a/tests/test_node_selection_shacl.py b/tests/test_node_selection_shacl.py index 77f67793..e0bd42d5 100755 --- a/tests/test_node_selection_shacl.py +++ b/tests/test_node_selection_shacl.py @@ -7,7 +7,7 @@ from temp.grammar import Var endpoints_graph = Graph().parse( - "prez/reference_data/endpoints/endpoint_node_selection_shapes.ttl", format="turtle" + "prez/reference_data/endpoints/endpoint_nodeshapes.ttl", format="turtle" ) diff --git a/tests/test_query_construction.py b/tests/test_query_construction.py index 58c3059f..ff07f623 100644 --- a/tests/test_query_construction.py +++ b/tests/test_query_construction.py @@ -45,7 +45,7 @@ def test_basic_listing(): object=Var(value="propValue"), ), ], - inner_select_triples=[ + inner_select_tssp_list=[ TriplesSameSubjectPath.from_spo( subject=Var(value="focus_node"), predicate=IRI(value=str(RDF.type)), @@ -80,10 +80,9 @@ def test_search_query_regex(): object=Var(value="propValue"), ), ], - profile_construct_triples=ConstructTriples.from_tss_list( - [TriplesSameSubject.from_spo(IRI(value="https://s"), IRI(value="https://p"), IRI(value="https://o"))] - ), - additional_construct_triples=sq.construct_triples, + construct_tss_list=sq.construct_triples.to_tss_list() + [ + TriplesSameSubject.from_spo(IRI(value="https://s"), IRI(value="https://p"), IRI(value="https://o")) + ], inner_select_vars=sq.inner_select_vars, inner_select_gpnt=[sq.inner_select_gpnt], limit=sq.limit,