Skip to content

Commit

Permalink
Fix: Regression due to httpx updates (#823)
Browse files Browse the repository at this point in the history
* FIX: pass pagination query parameters as kwargs

* MNT: update changelog

* MNT: clean up

* FIX: keep query parameters in all container endpoints

* MNT: lint

* FIX: revert changes to query params

* FIX: fix remaining query parameters passed to httpx

* FIX: initialization of httpx.AsyncClient

* MNT: rename variable for consistency
  • Loading branch information
genematx authored Dec 9, 2024
1 parent d0e8ac1 commit 035b130
Show file tree
Hide file tree
Showing 12 changed files with 147 additions and 47 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ Write the date in place of the "Unreleased" in the case a new version is release
## Unreleased

- Fix curl and httpie installation in docker image.
- Fix the construction of urls by passing query parameters as kwargs.

### Added

Expand Down
5 changes: 5 additions & 0 deletions tiled/_tests/test_dataframe.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from urllib.parse import parse_qs, urlparse

import numpy
import pandas.testing
import pytest
Expand Down Expand Up @@ -150,6 +152,7 @@ def test_http_fetch_columns(context, http_method, link):
original_df = tree["wide"].read()
columns = list(original_df.columns)[::2] # Pick a subset of columns
params = {
**parse_qs(urlparse(url_path).query),
"partition": 0, # Used by /table/partition; ignored by /table/full
"column": columns,
}
Expand All @@ -176,6 +179,7 @@ def test_deprecated_query_parameter(context):
client = from_context(context)
url_path = client["basic"].item["links"]["partition"]
params = {
**parse_qs(urlparse(url_path).query),
"partition": 0,
"field": "x",
}
Expand All @@ -189,6 +193,7 @@ def test_redundant_query_parameters(context):
client = from_context(context)
url_path = client["basic"].item["links"]["partition"]
original_params = {
**parse_qs(urlparse(url_path).query),
"partition": 0,
"field": "x",
"column": "y",
Expand Down
6 changes: 3 additions & 3 deletions tiled/_tests/test_routes.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest
from httpx import AsyncClient
from httpx import ASGITransport, AsyncClient
from starlette.status import HTTP_200_OK

from ..server.app import build_app
Expand All @@ -8,7 +8,7 @@
@pytest.mark.parametrize("path", ["/", "/docs", "/healthz"])
@pytest.mark.asyncio
async def test_meta_routes(path):
app = build_app({})
async with AsyncClient(app=app, base_url="http://test") as client:
transport = ASGITransport(app=build_app({}))
async with AsyncClient(transport=transport, base_url="http://test") as client:
response = await client.get(path)
assert response.status_code == HTTP_200_OK
18 changes: 14 additions & 4 deletions tiled/client/array.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import itertools
from typing import Union
from urllib.parse import parse_qs, urlparse

import dask
import dask.array
Expand Down Expand Up @@ -92,11 +93,13 @@ def _get_block(self, block, dtype, shape, slice=None):
expected_shape = ",".join(map(str, shape))
else:
expected_shape = "scalar"
url_path = self.item["links"]["block"]
content = handle_error(
self.context.http_client.get(
self.item["links"]["block"],
url_path,
headers={"Accept": media_type},
params={
**parse_qs(urlparse(url_path).query),
"block": ",".join(map(str, block)),
"expected_shape": expected_shape,
},
Expand Down Expand Up @@ -172,12 +175,17 @@ def write(self, array):
)

def write_block(self, array, block, slice=...):
url_path = self.item["links"]["block"].format(*block)
params = {
**parse_qs(urlparse(url_path).query),
**params_from_slice(slice),
}
handle_error(
self.context.http_client.put(
self.item["links"]["block"].format(*block),
url_path,
content=array.tobytes(),
headers={"Content-Type": "application/octet-stream"},
params=params_from_slice(slice),
params=params,
)
)

Expand Down Expand Up @@ -241,13 +249,15 @@ def patch(self, array: NDArray, offset: Union[int, tuple[int, ...]], extend=Fals
array_ = numpy.ascontiguousarray(array)
if isinstance(offset, int):
offset = (offset,)
url_path = self.item["links"]["full"]
params = {
**parse_qs(urlparse(url_path).query),
"offset": ",".join(map(str, offset)),
"shape": ",".join(map(str, array_.shape)),
"extend": bool(extend),
}
response = self.context.http_client.patch(
self.item["links"]["full"],
url_path,
content=array_.tobytes(),
headers={"Content-Type": "application/octet-stream"},
params=params,
Expand Down
50 changes: 39 additions & 11 deletions tiled/client/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from copy import copy, deepcopy
from dataclasses import asdict
from pathlib import Path
from urllib.parse import parse_qs, urlparse

import json_merge_patch
import jsonpatch
Expand Down Expand Up @@ -36,7 +37,11 @@ def __len__(self):
self.context.http_client.get(
self._link,
headers={"Accept": MSGPACK_MIME_TYPE},
params={"page[offset]": 0, "page[limit]": 0},
params={
**parse_qs(urlparse(self._link).query),
"page[offset]": 0,
"page[limit]": 0,
},
)
).json()
length = content["meta"]["count"]
Expand All @@ -54,7 +59,11 @@ def __getitem__(self, item_):
self.context.http_client.get(
self._link,
headers={"Accept": MSGPACK_MIME_TYPE},
params={"page[offset]": offset, "page[limit]": limit},
params={
**parse_qs(urlparse(self._link).query),
"page[offset]": offset,
"page[limit]": limit,
},
)
).json()
(result,) = content["data"]
Expand All @@ -70,25 +79,28 @@ def __getitem__(self, item_):
limit = item_.stop - offset
params = f"?page[offset]={offset}&page[limit]={limit}"

next_page = self._link + params
next_page_url = self._link + params
result = []
while next_page is not None:
while next_page_url is not None:
content = handle_error(
self.context.http_client.get(
next_page,
headers={"Accept": MSGPACK_MIME_TYPE},
next_page_url, headers={"Accept": MSGPACK_MIME_TYPE}
)
).json()
if len(result) == 0:
result = content.copy()
else:
result["data"].append(content["data"])
next_page = content["links"]["next"]
next_page_url = content["links"]["next"]

return result["data"]

def delete_revision(self, n):
handle_error(self.context.http_client.delete(self._link, params={"number": n}))
handle_error(
self.context.http_client.delete(
self._link, params={**parse_qs(urlparse(self._link).query), "number": n}
)
)


class BaseClient:
Expand Down Expand Up @@ -180,7 +192,10 @@ def refresh(self):
self.context.http_client.get(
self.uri,
headers={"Accept": MSGPACK_MIME_TYPE},
params={"include_data_sources": self._include_data_sources},
params={
**parse_qs(urlparse(self.uri).query),
"include_data_sources": self._include_data_sources,
},
)
).json()
self._item = content["data"]
Expand Down Expand Up @@ -299,7 +314,11 @@ def asset_manifest(self, data_sources):
if asset.is_directory:
manifest = handle_error(
self.context.http_client.get(
manifest_link, params={"id": asset.id}
manifest_link,
params={
**parse_qs(urlparse(manifest_link).query),
"id": asset.id,
},
)
).json()["manifest"]
else:
Expand Down Expand Up @@ -360,6 +379,7 @@ def raw_export(self, destination_directory=None, max_workers=4):
URL(
bytes_link,
params={
**parse_qs(urlparse(bytes_link).query),
"id": asset.id,
"relative_path": relative_path,
},
Expand All @@ -374,7 +394,15 @@ def raw_export(self, destination_directory=None, max_workers=4):
]
)
else:
urls.append(URL(bytes_link, params={"id": asset.id}))
urls.append(
URL(
bytes_link,
params={
**parse_qs(urlparse(bytes_link).query),
"id": asset.id,
},
)
)
paths.append(Path(base_path, ATTACHMENT_FILENAME_PLACEHOLDER))
return download(self.context.http_client, urls, paths, max_workers=max_workers)

Expand Down
8 changes: 6 additions & 2 deletions tiled/client/constructors.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import collections
import collections.abc
from urllib.parse import parse_qs, urlparse

import httpx

Expand Down Expand Up @@ -140,7 +141,10 @@ def from_context(
context.http_client.get(
item_uri,
headers={"Accept": MSGPACK_MIME_TYPE},
params={"include_data_sources": include_data_sources},
params={
**parse_qs(urlparse(item_uri).query),
"include_data_sources": include_data_sources,
},
)
).json()
except ClientError as err:
Expand All @@ -150,7 +154,7 @@ def from_context(
and (context.http_client.auth is None)
):
context.authenticate()
params = {}
params = (parse_qs(urlparse(item_uri).query),)
if include_data_sources:
params["include_data_sources"] = True
content = handle_error(
Expand Down
20 changes: 14 additions & 6 deletions tiled/client/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import time
import warnings
from dataclasses import asdict
from urllib.parse import parse_qs, urlparse

import entrypoints
import httpx
Expand Down Expand Up @@ -172,11 +173,13 @@ def __len__(self):
if now < deadline:
# Used the cached value and do not make any request.
return length
link = self.item["links"]["search"]
content = handle_error(
self.context.http_client.get(
self.item["links"]["search"],
link,
headers={"Accept": MSGPACK_MIME_TYPE},
params={
**parse_qs(urlparse(link).query),
"fields": "",
**self._queries_as_params,
**self._sorting_params,
Expand Down Expand Up @@ -211,6 +214,7 @@ def __iter__(self, _ignore_inlined_contents=False):
next_page_url,
headers={"Accept": MSGPACK_MIME_TYPE},
params={
**parse_qs(urlparse(next_page_url).query),
"fields": "",
**self._queries_as_params,
**self._sorting_params,
Expand Down Expand Up @@ -258,11 +262,12 @@ def __getitem__(self, keys, _ignore_inlined_contents=False):
}
if self._include_data_sources:
params["include_data_sources"] = True
link = self.item["links"]["search"]
content = handle_error(
self.context.http_client.get(
self.item["links"]["search"],
link,
headers={"Accept": MSGPACK_MIME_TYPE},
params=params,
params={**parse_qs(urlparse(link).query), **params},
)
).json()
self._cached_len = (
Expand Down Expand Up @@ -312,11 +317,12 @@ def __getitem__(self, keys, _ignore_inlined_contents=False):
params = {}
if self._include_data_sources:
params["include_data_sources"] = True
link = self_link + "".join(f"/{key}" for key in keys[i:])
content = handle_error(
self.context.http_client.get(
self_link + "".join(f"/{key}" for key in keys[i:]),
link,
headers={"Accept": MSGPACK_MIME_TYPE},
params=params,
params={**parse_qs(urlparse(link).query), **params},
)
).json()
except ClientError as err:
Expand Down Expand Up @@ -372,6 +378,7 @@ def _keys_slice(self, start, stop, direction, _ignore_inlined_contents=False):
next_page_url,
headers={"Accept": MSGPACK_MIME_TYPE},
params={
**parse_qs(urlparse(next_page_url).query),
"fields": "",
**self._queries_as_params,
**sorting_params,
Expand Down Expand Up @@ -420,6 +427,7 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False):
item_counter = itertools.count(start)
while next_page_url is not None:
params = {
**parse_qs(urlparse(next_page_url).query),
**self._queries_as_params,
**sorting_params,
}
Expand All @@ -436,7 +444,6 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False):
content["meta"]["count"],
time.monotonic() + LENGTH_CACHE_TTL,
)

for item in content["data"]:
if stop is not None and next(item_counter) == stop:
return
Expand Down Expand Up @@ -495,6 +502,7 @@ def distinct(
link,
headers={"Accept": MSGPACK_MIME_TYPE},
params={
**parse_qs(urlparse(link).query),
"metadata": metadata_keys,
"structure_families": structure_families,
"specs": specs,
Expand Down
Loading

0 comments on commit 035b130

Please sign in to comment.