From e97b0fb7f4f59f0954c893a0ab852c7b89d83c1e Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 2 Dec 2024 13:53:38 -0500 Subject: [PATCH 1/9] FIX: pass pagination query parameters as kwargs --- tiled/client/base.py | 18 ++++++++++++------ tiled/client/container.py | 19 ++++++++++++++++--- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/tiled/client/base.py b/tiled/client/base.py index b9a489342..1eac99020 100644 --- a/tiled/client/base.py +++ b/tiled/client/base.py @@ -3,6 +3,7 @@ from copy import copy, deepcopy from dataclasses import asdict from pathlib import Path +from urllib.parse import parse_qs, urlparse import json_merge_patch import jsonpatch @@ -65,25 +66,30 @@ def __getitem__(self, item_): if offset is None: offset = 0 if item_.stop is None: - params = f"?page[offset]={offset}" + params = {"page[offset]": offset} else: limit = item_.stop - offset - params = f"?page[offset]={offset}&page[limit]={limit}" + params = {"page[offset]": offset, "page[limit]": limit} - next_page = self._link + params + next_page_url = self._link result = [] - while next_page is not None: + while next_page_url is not None: content = handle_error( self.context.http_client.get( - next_page, + next_page_url, headers={"Accept": MSGPACK_MIME_TYPE}, + params=params, ) ).json() if len(result) == 0: result = content.copy() else: result["data"].append(content["data"]) - next_page = content["links"]["next"] + next_page_url = content["links"]["next"] + if next_page_url: + parsed_url = urlparse(next_page_url) + params = parse_qs(parsed_url.query) + next_page_url = parsed_url._replace(query="").geturl() return result["data"] diff --git a/tiled/client/container.py b/tiled/client/container.py index 7a87ce507..cd25adaed 100644 --- a/tiled/client/container.py +++ b/tiled/client/container.py @@ -6,6 +6,7 @@ import time import warnings from dataclasses import asdict +from urllib.parse import parse_qs, urlparse import entrypoints import httpx @@ -364,7 +365,8 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False): sorting_params = self._reversed_sorting_params assert start >= 0 assert (stop is None) or (stop >= 0) - next_page_url = f"{self.item['links']['search']}?page[offset]={start}" + next_page_url = f"{self.item['links']['search']}" + pagination_params = {"page[offset]": start} item_counter = itertools.count(start) while next_page_url is not None: content = handle_error( @@ -373,6 +375,7 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False): headers={"Accept": MSGPACK_MIME_TYPE}, params={ "fields": "", + **pagination_params, **self._queries_as_params, **sorting_params, }, @@ -387,6 +390,10 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False): return yield item["id"] next_page_url = content["links"]["next"] + if next_page_url: + parsed_url = urlparse(next_page_url) + pagination_params = parse_qs(parsed_url.query) + next_page_url = parsed_url._replace(query="").geturl() def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): # If the contents of this node was provided in-line, and we don't need @@ -416,15 +423,18 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): sorting_params = self._reversed_sorting_params assert start >= 0 assert (stop is None) or (stop >= 0) - next_page_url = f"{self.item['links']['search']}?page[offset]={start}" + next_page_url = f"{self.item['links']['search']}" + pagination_params = {"page[offset]": start} item_counter = itertools.count(start) while next_page_url is not None: params = { + **pagination_params, **self._queries_as_params, **sorting_params, } if self._include_data_sources: params["include_data_sources"] = True + print(f"{next_page_url=}, {MSGPACK_MIME_TYPE=}, {params=}") content = handle_error( self.context.http_client.get( next_page_url, @@ -436,7 +446,6 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): content["meta"]["count"], time.monotonic() + LENGTH_CACHE_TTL, ) - for item in content["data"]: if stop is not None and next(item_counter) == stop: return @@ -448,6 +457,10 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): include_data_sources=self._include_data_sources, ) next_page_url = content["links"]["next"] + if next_page_url: + parsed_url = urlparse(next_page_url) + pagination_params = parse_qs(parsed_url.query) + next_page_url = parsed_url._replace(query="").geturl() def keys(self): return KeysView(lambda: len(self), self._keys_slice) From c3a31a339ac16faaed249dbde542b1c13630a7ad Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 2 Dec 2024 14:03:29 -0500 Subject: [PATCH 2/9] MNT: update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 28675aa9d..57ce896ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ Write the date in place of the "Unreleased" in the case a new version is release ## Unreleased - Fix curl and httpie installation in docker image. +- Fix the construction of urls by passing query parameters as kwargs. ### Added From 7acab934ec882bea1d24a31c7d4460a7cabf0bfb Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 2 Dec 2024 14:33:18 -0500 Subject: [PATCH 3/9] MNT: clean up --- tiled/client/container.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tiled/client/container.py b/tiled/client/container.py index cd25adaed..bde8a9bc8 100644 --- a/tiled/client/container.py +++ b/tiled/client/container.py @@ -434,7 +434,6 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): } if self._include_data_sources: params["include_data_sources"] = True - print(f"{next_page_url=}, {MSGPACK_MIME_TYPE=}, {params=}") content = handle_error( self.context.http_client.get( next_page_url, From 76ffe554b29d8263d5c55a710d72724c8a9872dd Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 2 Dec 2024 17:45:17 -0500 Subject: [PATCH 4/9] FIX: keep query parameters in all container endpoints --- tiled/client/base.py | 8 ++------ tiled/client/container.py | 34 +++++++++++++++------------------- 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/tiled/client/base.py b/tiled/client/base.py index 1eac99020..03ef52827 100644 --- a/tiled/client/base.py +++ b/tiled/client/base.py @@ -55,7 +55,7 @@ def __getitem__(self, item_): self.context.http_client.get( self._link, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"page[offset]": offset, "page[limit]": limit}, + params={**parse_qs(urlparse(self._link).query), "page[offset]": offset, "page[limit]": limit}, ) ).json() (result,) = content["data"] @@ -78,7 +78,7 @@ def __getitem__(self, item_): self.context.http_client.get( next_page_url, headers={"Accept": MSGPACK_MIME_TYPE}, - params=params, + params={**parse_qs(urlparse(next_page_url).query), **params}, ) ).json() if len(result) == 0: @@ -86,10 +86,6 @@ def __getitem__(self, item_): else: result["data"].append(content["data"]) next_page_url = content["links"]["next"] - if next_page_url: - parsed_url = urlparse(next_page_url) - params = parse_qs(parsed_url.query) - next_page_url = parsed_url._replace(query="").geturl() return result["data"] diff --git a/tiled/client/container.py b/tiled/client/container.py index bde8a9bc8..30e227a54 100644 --- a/tiled/client/container.py +++ b/tiled/client/container.py @@ -173,11 +173,13 @@ def __len__(self): if now < deadline: # Used the cached value and do not make any request. return length + link = self.item["links"]["search"] content = handle_error( self.context.http_client.get( - self.item["links"]["search"], + link, headers={"Accept": MSGPACK_MIME_TYPE}, params={ + **parse_qs(urlparse(link).query), "fields": "", **self._queries_as_params, **self._sorting_params, @@ -212,6 +214,7 @@ def __iter__(self, _ignore_inlined_contents=False): next_page_url, headers={"Accept": MSGPACK_MIME_TYPE}, params={ + **parse_qs(urlparse(next_page_url).query), "fields": "", **self._queries_as_params, **self._sorting_params, @@ -259,11 +262,12 @@ def __getitem__(self, keys, _ignore_inlined_contents=False): } if self._include_data_sources: params["include_data_sources"] = True + link = self.item["links"]["search"] content = handle_error( self.context.http_client.get( - self.item["links"]["search"], + link, headers={"Accept": MSGPACK_MIME_TYPE}, - params=params, + params={**parse_qs(urlparse(link).query),**params}, ) ).json() self._cached_len = ( @@ -313,11 +317,12 @@ def __getitem__(self, keys, _ignore_inlined_contents=False): params = {} if self._include_data_sources: params["include_data_sources"] = True + link = self_link + "".join(f"/{key}" for key in keys[i:]) content = handle_error( self.context.http_client.get( - self_link + "".join(f"/{key}" for key in keys[i:]), + link, headers={"Accept": MSGPACK_MIME_TYPE}, - params=params, + params={**parse_qs(urlparse(link).query), **params}, ) ).json() except ClientError as err: @@ -365,8 +370,7 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False): sorting_params = self._reversed_sorting_params assert start >= 0 assert (stop is None) or (stop >= 0) - next_page_url = f"{self.item['links']['search']}" - pagination_params = {"page[offset]": start} + next_page_url = f"{self.item['links']['search']}?page[offset]={start}" item_counter = itertools.count(start) while next_page_url is not None: content = handle_error( @@ -374,8 +378,8 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False): next_page_url, headers={"Accept": MSGPACK_MIME_TYPE}, params={ + **parse_qs(urlparse(next_page_url).query), "fields": "", - **pagination_params, **self._queries_as_params, **sorting_params, }, @@ -390,10 +394,6 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False): return yield item["id"] next_page_url = content["links"]["next"] - if next_page_url: - parsed_url = urlparse(next_page_url) - pagination_params = parse_qs(parsed_url.query) - next_page_url = parsed_url._replace(query="").geturl() def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): # If the contents of this node was provided in-line, and we don't need @@ -423,12 +423,11 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): sorting_params = self._reversed_sorting_params assert start >= 0 assert (stop is None) or (stop >= 0) - next_page_url = f"{self.item['links']['search']}" - pagination_params = {"page[offset]": start} + next_page_url = f"{self.item['links']['search']}?page[offset]={start}" item_counter = itertools.count(start) while next_page_url is not None: params = { - **pagination_params, + **parse_qs(urlparse(next_page_url).query), **self._queries_as_params, **sorting_params, } @@ -456,10 +455,6 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False): include_data_sources=self._include_data_sources, ) next_page_url = content["links"]["next"] - if next_page_url: - parsed_url = urlparse(next_page_url) - pagination_params = parse_qs(parsed_url.query) - next_page_url = parsed_url._replace(query="").geturl() def keys(self): return KeysView(lambda: len(self), self._keys_slice) @@ -507,6 +502,7 @@ def distinct( link, headers={"Accept": MSGPACK_MIME_TYPE}, params={ + **parse_qs(urlparse(link).query), "metadata": metadata_keys, "structure_families": structure_families, "specs": specs, From 10619d03a1810552201f7cd4e9882e4e5b21877e Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 2 Dec 2024 17:45:51 -0500 Subject: [PATCH 5/9] MNT: lint --- tiled/client/base.py | 6 +++++- tiled/client/container.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tiled/client/base.py b/tiled/client/base.py index 03ef52827..a4395692f 100644 --- a/tiled/client/base.py +++ b/tiled/client/base.py @@ -55,7 +55,11 @@ def __getitem__(self, item_): self.context.http_client.get( self._link, headers={"Accept": MSGPACK_MIME_TYPE}, - params={**parse_qs(urlparse(self._link).query), "page[offset]": offset, "page[limit]": limit}, + params={ + **parse_qs(urlparse(self._link).query), + "page[offset]": offset, + "page[limit]": limit, + }, ) ).json() (result,) = content["data"] diff --git a/tiled/client/container.py b/tiled/client/container.py index 30e227a54..a521ef2a9 100644 --- a/tiled/client/container.py +++ b/tiled/client/container.py @@ -267,7 +267,7 @@ def __getitem__(self, keys, _ignore_inlined_contents=False): self.context.http_client.get( link, headers={"Accept": MSGPACK_MIME_TYPE}, - params={**parse_qs(urlparse(link).query),**params}, + params={**parse_qs(urlparse(link).query), **params}, ) ).json() self._cached_len = ( From 0a83cd0a9d6209ee0ba9c7c3831da05f0de6350a Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 2 Dec 2024 17:49:32 -0500 Subject: [PATCH 6/9] FIX: revert changes to query params --- tiled/client/base.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/tiled/client/base.py b/tiled/client/base.py index a4395692f..d93511e90 100644 --- a/tiled/client/base.py +++ b/tiled/client/base.py @@ -70,19 +70,17 @@ def __getitem__(self, item_): if offset is None: offset = 0 if item_.stop is None: - params = {"page[offset]": offset} + params = f"?page[offset]={offset}" else: limit = item_.stop - offset - params = {"page[offset]": offset, "page[limit]": limit} + params = f"?page[offset]={offset}&page[limit]={limit}" - next_page_url = self._link + next_page_url = self._link + params result = [] while next_page_url is not None: content = handle_error( self.context.http_client.get( - next_page_url, - headers={"Accept": MSGPACK_MIME_TYPE}, - params={**parse_qs(urlparse(next_page_url).query), **params}, + next_page_url, headers={"Accept": MSGPACK_MIME_TYPE} ) ).json() if len(result) == 0: From 0c26cfad955411aeb5a326112bc46f597cc2116b Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 9 Dec 2024 10:24:30 -0500 Subject: [PATCH 7/9] FIX: fix remaining query parameters passed to httpx --- tiled/_tests/test_dataframe.py | 5 +++++ tiled/client/array.py | 18 ++++++++++++++---- tiled/client/base.py | 34 +++++++++++++++++++++++++++++----- tiled/client/constructors.py | 8 ++++++-- tiled/client/context.py | 33 +++++++++++++++++++++++---------- tiled/client/dataframe.py | 14 +++++++++++--- tiled/client/sparse.py | 12 ++++++++---- tiled/client/utils.py | 15 +++++++++++++-- tiled/client/xarray.py | 12 ++++++++++-- 9 files changed, 119 insertions(+), 32 deletions(-) diff --git a/tiled/_tests/test_dataframe.py b/tiled/_tests/test_dataframe.py index 9a0d3250e..1df2163bf 100644 --- a/tiled/_tests/test_dataframe.py +++ b/tiled/_tests/test_dataframe.py @@ -1,3 +1,5 @@ +from urllib.parse import parse_qs, urlparse + import numpy import pandas.testing import pytest @@ -150,6 +152,7 @@ def test_http_fetch_columns(context, http_method, link): original_df = tree["wide"].read() columns = list(original_df.columns)[::2] # Pick a subset of columns params = { + **parse_qs(urlparse(url_path).query), "partition": 0, # Used by /table/partition; ignored by /table/full "column": columns, } @@ -176,6 +179,7 @@ def test_deprecated_query_parameter(context): client = from_context(context) url_path = client["basic"].item["links"]["partition"] params = { + **parse_qs(urlparse(url_path).query), "partition": 0, "field": "x", } @@ -189,6 +193,7 @@ def test_redundant_query_parameters(context): client = from_context(context) url_path = client["basic"].item["links"]["partition"] original_params = { + **parse_qs(urlparse(url_path).query), "partition": 0, "field": "x", "column": "y", diff --git a/tiled/client/array.py b/tiled/client/array.py index dd12cbe39..063e7efbd 100644 --- a/tiled/client/array.py +++ b/tiled/client/array.py @@ -1,5 +1,6 @@ import itertools from typing import Union +from urllib.parse import parse_qs, urlparse import dask import dask.array @@ -92,11 +93,13 @@ def _get_block(self, block, dtype, shape, slice=None): expected_shape = ",".join(map(str, shape)) else: expected_shape = "scalar" + url_path = self.item["links"]["block"] content = handle_error( self.context.http_client.get( - self.item["links"]["block"], + url_path, headers={"Accept": media_type}, params={ + **parse_qs(urlparse(url_path).query), "block": ",".join(map(str, block)), "expected_shape": expected_shape, }, @@ -172,12 +175,17 @@ def write(self, array): ) def write_block(self, array, block, slice=...): + url_path = self.item["links"]["block"].format(*block) + query_params = { + **parse_qs(urlparse(url_path).query), + **params_from_slice(slice), + } handle_error( self.context.http_client.put( - self.item["links"]["block"].format(*block), + url_path, content=array.tobytes(), headers={"Content-Type": "application/octet-stream"}, - params=params_from_slice(slice), + params=query_params, ) ) @@ -241,13 +249,15 @@ def patch(self, array: NDArray, offset: Union[int, tuple[int, ...]], extend=Fals array_ = numpy.ascontiguousarray(array) if isinstance(offset, int): offset = (offset,) + url_path = self.item["links"]["full"] params = { + **parse_qs(urlparse(url_path).query), "offset": ",".join(map(str, offset)), "shape": ",".join(map(str, array_.shape)), "extend": bool(extend), } response = self.context.http_client.patch( - self.item["links"]["full"], + url_path, content=array_.tobytes(), headers={"Content-Type": "application/octet-stream"}, params=params, diff --git a/tiled/client/base.py b/tiled/client/base.py index d93511e90..bc47414f6 100644 --- a/tiled/client/base.py +++ b/tiled/client/base.py @@ -37,7 +37,11 @@ def __len__(self): self.context.http_client.get( self._link, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"page[offset]": 0, "page[limit]": 0}, + params={ + **parse_qs(urlparse(self._link).query), + "page[offset]": 0, + "page[limit]": 0, + }, ) ).json() length = content["meta"]["count"] @@ -92,7 +96,11 @@ def __getitem__(self, item_): return result["data"] def delete_revision(self, n): - handle_error(self.context.http_client.delete(self._link, params={"number": n})) + handle_error( + self.context.http_client.delete( + self._link, params={**parse_qs(urlparse(self._link).query), "number": n} + ) + ) class BaseClient: @@ -184,7 +192,10 @@ def refresh(self): self.context.http_client.get( self.uri, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"include_data_sources": self._include_data_sources}, + params={ + **parse_qs(urlparse(self.uri).query), + "include_data_sources": self._include_data_sources, + }, ) ).json() self._item = content["data"] @@ -303,7 +314,11 @@ def asset_manifest(self, data_sources): if asset.is_directory: manifest = handle_error( self.context.http_client.get( - manifest_link, params={"id": asset.id} + manifest_link, + params={ + **parse_qs(urlparse(manifest_link).query), + "id": asset.id, + }, ) ).json()["manifest"] else: @@ -364,6 +379,7 @@ def raw_export(self, destination_directory=None, max_workers=4): URL( bytes_link, params={ + **parse_qs(urlparse(bytes_link).query), "id": asset.id, "relative_path": relative_path, }, @@ -378,7 +394,15 @@ def raw_export(self, destination_directory=None, max_workers=4): ] ) else: - urls.append(URL(bytes_link, params={"id": asset.id})) + urls.append( + URL( + bytes_link, + params={ + **parse_qs(urlparse(bytes_link).query), + "id": asset.id, + }, + ) + ) paths.append(Path(base_path, ATTACHMENT_FILENAME_PLACEHOLDER)) return download(self.context.http_client, urls, paths, max_workers=max_workers) diff --git a/tiled/client/constructors.py b/tiled/client/constructors.py index 8b2bc0104..bcb38a253 100644 --- a/tiled/client/constructors.py +++ b/tiled/client/constructors.py @@ -1,5 +1,6 @@ import collections import collections.abc +from urllib.parse import parse_qs, urlparse import httpx @@ -140,7 +141,10 @@ def from_context( context.http_client.get( item_uri, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"include_data_sources": include_data_sources}, + params={ + **parse_qs(urlparse(item_uri).query), + "include_data_sources": include_data_sources, + }, ) ).json() except ClientError as err: @@ -150,7 +154,7 @@ def from_context( and (context.http_client.auth is None) ): context.authenticate() - params = {} + params = (parse_qs(urlparse(item_uri).query),) if include_data_sources: params["include_data_sources"] = True content = handle_error( diff --git a/tiled/client/context.py b/tiled/client/context.py index 3dc54feda..1c336d464 100644 --- a/tiled/client/context.py +++ b/tiled/client/context.py @@ -8,6 +8,7 @@ import warnings from pathlib import Path from typing import Callable, Optional, Union +from urllib.parse import parse_qs, urlparse import httpx import platformdirs @@ -447,11 +448,15 @@ def revoke_api_key(self, first_eight): Identify the API key to be deleted by passing its first 8 characters. (Any additional characters passed will be truncated.) """ + url_path = self.server_info["authentication"]["links"]["apikey"] handle_error( self.http_client.delete( - self.server_info["authentication"]["links"]["apikey"], + url_path, headers={"x-csrf": self.http_client.cookies["tiled_csrf"]}, - params={"first_eight": first_eight[:8]}, + params={ + **parse_qs(urlparse(url_path).query), + "first_eight": first_eight[:8], + }, ) ) @@ -818,11 +823,14 @@ def __init__(self, context): def list_principals(self, offset=0, limit=100): "List Principals (users and services) in the authenticaiton database." - params = dict(offset=offset, limit=limit) + url_path = f"{self.base_url}/auth/principal" + params = { + **parse_qs(urlparse(url_path).query), + "offset": offset, + "limit": limit, + } return handle_error( - self.context.http_client.get( - f"{self.base_url}/auth/principal", params=params - ) + self.context.http_client.get(url_path, params=params) ).json() def show_principal(self, uuid): @@ -869,11 +877,12 @@ def create_service_principal( role : str Specify the role (e.g. user or admin) """ + url_path = f"{self.base_url}/auth/principal" return handle_error( self.context.http_client.post( - f"{self.base_url}/auth/principal", + url_path, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"role": role}, + params={**parse_qs(urlparse(url_path).query), "role": role}, ) ).json() @@ -891,11 +900,15 @@ def revoke_api_key(self, uuid, first_eight=None): Identify the API key to be deleted by passing its first 8 characters. (Any additional characters passed will be truncated.) """ + url_path = f"{self.base_url}/auth/principal/{uuid}/apikey" return handle_error( self.context.http_client.delete( - f"{self.base_url}/auth/principal/{uuid}/apikey", + url_path, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"first_eight": first_eight[:8]}, + params={ + **parse_qs(urlparse(url_path).query), + "first_eight": first_eight[:8], + }, ) ) diff --git a/tiled/client/dataframe.py b/tiled/client/dataframe.py index c3fc1bc76..607487be5 100644 --- a/tiled/client/dataframe.py +++ b/tiled/client/dataframe.py @@ -1,3 +1,5 @@ +from urllib.parse import parse_qs, urlparse + import dask import dask.dataframe.core import httpx @@ -42,7 +44,10 @@ def _repr_pretty_(self, p, cycle): self.context.http_client.get( self.uri, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"fields": "structure"}, + params={ + **parse_qs(urlparse(self.uri).query), + "fields": "structure", + }, timeout=TIMEOUT, ) ).json() @@ -79,7 +84,10 @@ def _ipython_key_completions_(self): self.context.http_client.get( self.uri, headers={"Accept": MSGPACK_MIME_TYPE}, - params={"fields": "structure"}, + params={ + **parse_qs(urlparse(self.uri).query), + "fields": "structure", + }, ) ).json() columns = content["data"]["attributes"]["structure"]["columns"] @@ -98,8 +106,8 @@ def _get_partition(self, partition, columns): See read_partition for a public version of this. """ - params = {"partition": partition} URL_PATH = self.item["links"]["partition"] + params = {**parse_qs(urlparse(URL_PATH).query), "partition": partition} url_length_for_get_request = len(URL_PATH) + sum( _EXTRA_CHARS_PER_ITEM + len(column) for column in (columns or ()) ) diff --git a/tiled/client/sparse.py b/tiled/client/sparse.py index 6383db40a..aec42ead6 100644 --- a/tiled/client/sparse.py +++ b/tiled/client/sparse.py @@ -1,3 +1,5 @@ +from urllib.parse import parse_qs, urlparse + import numpy import sparse from ndindex import ndindex @@ -47,11 +49,12 @@ def read_block(self, block, slice=None): # Fetch the data as an Apache Arrow table # with columns named dim0, dim1, ..., dimN, data. structure = self.structure() - params = params_from_slice(slice) + url_path = self.item["links"]["block"] + params = {**parse_qs(urlparse(url_path).query), **params_from_slice(slice)} params["block"] = ",".join(map(str, block)) content = handle_error( self.context.http_client.get( - self.item["links"]["block"], + url_path, headers={"Accept": APACHE_ARROW_FILE_MIME_TYPE}, params=params, ) @@ -74,10 +77,11 @@ def read(self, slice=None): # Fetch the data as an Apache Arrow table # with columns named dim0, dim1, ..., dimN, data. structure = self.structure() - params = params_from_slice(slice) + url_path = self.item["links"]["full"] + params = {**parse_qs(urlparse(url_path).query), **params_from_slice(slice)} content = handle_error( self.context.http_client.get( - self.item["links"]["full"], + url_path, headers={"Accept": APACHE_ARROW_FILE_MIME_TYPE}, params=params, ) diff --git a/tiled/client/utils.py b/tiled/client/utils.py index 8e3ae08a1..183999816 100644 --- a/tiled/client/utils.py +++ b/tiled/client/utils.py @@ -3,6 +3,7 @@ from collections.abc import Hashable from pathlib import Path from threading import Lock +from urllib.parse import parse_qs, urlparse from weakref import WeakValueDictionary import httpx @@ -139,7 +140,12 @@ def export_util(file, format, get, link, params): format = ".".join( suffix[1:] for suffix in Path(file).suffixes ) # e.g. "csv" - content = handle_error(get(link, params={"format": format, **params})).read() + content = handle_error( + get( + link, + params={**parse_qs(urlparse(link).query), "format": format, **params}, + ) + ).read() with open(file, "wb") as buffer: buffer.write(content) else: @@ -147,7 +153,12 @@ def export_util(file, format, get, link, params): if format is None: # We have no filepath to infer to format from. raise ValueError("format must be specified when file is writeable buffer") - content = handle_error(get(link, params={"format": format, **params})).read() + content = handle_error( + get( + link, + params={**parse_qs(urlparse(link).query), "format": format, **params}, + ) + ).read() file.write(content) diff --git a/tiled/client/xarray.py b/tiled/client/xarray.py index 399c2a5ac..866d766ee 100644 --- a/tiled/client/xarray.py +++ b/tiled/client/xarray.py @@ -1,4 +1,5 @@ import threading +from urllib.parse import parse_qs, urlparse import dask import dask.array @@ -176,7 +177,11 @@ def _fetch_variables__get(self, variables): content = handle_error( self.http_client.get( self.link, - params={"format": APACHE_ARROW_FILE_MIME_TYPE, "field": variables}, + params={ + **parse_qs(urlparse(self.link).query), + "format": APACHE_ARROW_FILE_MIME_TYPE, + "field": variables, + }, ) ).read() return deserialize_arrow(content) @@ -186,7 +191,10 @@ def _fetch_variables__post(self, variables): self.http_client.post( self.link, json=variables, - params={"format": APACHE_ARROW_FILE_MIME_TYPE}, + params={ + **parse_qs(urlparse(self.link).query), + "format": APACHE_ARROW_FILE_MIME_TYPE, + }, ) ).read() return deserialize_arrow(content) From 798b96f73022d4c48d2465b6e8a3e216c0072f13 Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 9 Dec 2024 10:46:51 -0500 Subject: [PATCH 8/9] FIX: initialization of httpx.AsyncClient --- tiled/_tests/test_routes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tiled/_tests/test_routes.py b/tiled/_tests/test_routes.py index f8a0b78c5..45b7dc8f5 100644 --- a/tiled/_tests/test_routes.py +++ b/tiled/_tests/test_routes.py @@ -1,5 +1,5 @@ import pytest -from httpx import AsyncClient +from httpx import ASGITransport, AsyncClient from starlette.status import HTTP_200_OK from ..server.app import build_app @@ -8,7 +8,7 @@ @pytest.mark.parametrize("path", ["/", "/docs", "/healthz"]) @pytest.mark.asyncio async def test_meta_routes(path): - app = build_app({}) - async with AsyncClient(app=app, base_url="http://test") as client: + transport = ASGITransport(app=build_app({})) + async with AsyncClient(transport=transport, base_url="http://test") as client: response = await client.get(path) assert response.status_code == HTTP_200_OK From afe91401b05d6e370870068075c934185771b2f5 Mon Sep 17 00:00:00 2001 From: Eugene M Date: Mon, 9 Dec 2024 10:53:33 -0500 Subject: [PATCH 9/9] MNT: rename variable for consistency --- tiled/client/array.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tiled/client/array.py b/tiled/client/array.py index 063e7efbd..f5f403614 100644 --- a/tiled/client/array.py +++ b/tiled/client/array.py @@ -176,7 +176,7 @@ def write(self, array): def write_block(self, array, block, slice=...): url_path = self.item["links"]["block"].format(*block) - query_params = { + params = { **parse_qs(urlparse(url_path).query), **params_from_slice(slice), } @@ -185,7 +185,7 @@ def write_block(self, array, block, slice=...): url_path, content=array.tobytes(), headers={"Content-Type": "application/octet-stream"}, - params=query_params, + params=params, ) )