diff --git a/.env-sample b/.env-sample index 84aba3084..7acbffd53 100644 --- a/.env-sample +++ b/.env-sample @@ -13,14 +13,4 @@ export POSTGRES_PORT= # CHORD-specific export CHORD_URL= -export CHORD_PERMISSIONS= export SERVICE_ID= - -# CanDIG-specific -export INSIDE_CANDIG=true -export CANDIG_AUTHORIZATION=OPA -export CANDIG_OPA_URL=http://0.0.0.0:8181 -export CACHE_TIME=0 -export ROOT_CA= -export CANDIG_OPA_VERSION= -export PERMISSIONS_SECRET= diff --git a/README.md b/README.md index 68c5c4f6e..86b8e8f61 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,6 @@ A Phenopackets-based clinical and phenotypic metadata service for the Bento plat - [Standalone PostGres db and AdMiner](#standalone-postgres-db-and-adminer) - [Authentication](#authentication) - [Note on Permissions](#note-on-permissions) - - [Authorization inside CanDIG](#authorization-inside-candig) - [Developing](#developing) - [Branching](#branching) - [Tests](#tests) @@ -153,16 +152,6 @@ POSTGRES_PORT=5432 # CHORD/Bento-specific variables: # - If set, used for setting an allowed host & other API-calling purposes CHORD_URL= -# - If true, will enforce permissions. Do not run with this not set to true in production! -# Defaults to (not DEBUG) -CHORD_PERMISSIONS= - -# CanDIG-specific variables: -CANDIG_AUTHORIZATION= -CANDIG_OPA_URL= -CANDIG_OPA_SECRET= -CANDIG_OPA_SITE_ADMIN_KEY= -INSIDE_CANDIG= ``` ## Standalone Postgres db and Adminer @@ -223,15 +212,6 @@ functions as follows: This can be turned off with the `CHORD_PERMISSIONS` environment variable and/or Django setting, or with the `AUTH_OVERRIDE` Django setting. -### Authorization inside CanDIG - -When ran inside the CanDIG context, to properly implement authorization you'll -have to do the following: - -1. Make sure the CHORD_PERMISSIONS is set to "false". -2. Set CANDIG_AUTHORIZATION to "OPA". -3. Configure CANDIG_OPA_URL and CANDIG_OPA_SECRET. - ## Developing diff --git a/chord_metadata_service/authz/middleware.py b/chord_metadata_service/authz/middleware.py index d4abda9a7..644b385c6 100644 --- a/chord_metadata_service/authz/middleware.py +++ b/chord_metadata_service/authz/middleware.py @@ -1,5 +1,3 @@ -import re - from bento_lib.auth.middleware.django import DjangoAuthMiddleware from django.conf import settings @@ -10,32 +8,10 @@ "AuthzMiddleware", ] -pattern_get = re.compile(r"^GET$") - -# --- List of patterns to apply authz middleware to -------------------------------------------------------------------- -# - Note: as we gradually roll out authz across Katus, this list will expand. Anything not covered here is assumed to -# be protected by the gateway. -include_pattern_public = ( - re.compile(r"^(GET|POST|PUT|DELETE)$"), - re.compile(r"^/api/(projects|datasets|public|public_overview|public_search_fields|public_rules)$"), -) -include_pattern_workflows = (pattern_get, re.compile(r"^(/workflows$|/workflows/)")) -include_pattern_si = (pattern_get, re.compile(r"^/service-info")) -include_pattern_schemas = (pattern_get, re.compile(r"^/schemas/.+$")) -include_pattern_schema_types = (pattern_get, re.compile(r"^/extra_properties_schema_types$")) -# ---------------------------------------------------------------------------------------------------------------------- - authz_middleware = DjangoAuthMiddleware( bento_authz_service_url=settings.BENTO_AUTHZ_SERVICE_URL, debug_mode=settings.DEBUG, enabled=settings.BENTO_AUTHZ_ENABLED, - include_request_patterns=( - include_pattern_public, - include_pattern_workflows, - include_pattern_si, - include_pattern_schemas, - include_pattern_schema_types, - ), logger=logger, ) diff --git a/chord_metadata_service/authz/permissions.py b/chord_metadata_service/authz/permissions.py index 25b46d2c1..9ab38cd73 100644 --- a/chord_metadata_service/authz/permissions.py +++ b/chord_metadata_service/authz/permissions.py @@ -1,5 +1,9 @@ -from django.conf import settings +from asgiref.sync import async_to_sync from rest_framework.permissions import BasePermission, SAFE_METHODS +from rest_framework.request import Request as DrfRequest + +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel + from .middleware import authz_middleware @@ -7,8 +11,7 @@ "BentoAllowAny", "BentoAllowAnyReadOnly", "BentoDeferToHandler", - "ReadOnly", - "OverrideOrSuperUserOnly", + "BentoDataTypePermission", ] @@ -36,13 +39,17 @@ def has_permission(self, _request, _view): return True # we return true, like AllowAny, but we don't mark authz as done - so we defer it to the handler -class ReadOnly(BasePermission): - def has_permission(self, request, view): - return request.method in SAFE_METHODS - - -class OverrideOrSuperUserOnly(BasePermission): - def has_permission(self, request, view): - # If in CHORD production, is_superuser will be set by remote user headers. - # TODO: Configuration: Allow configurable read-only APIs or other external access - return settings.AUTH_OVERRIDE or request.user.is_superuser +class BentoDataTypePermission(BasePermission): + @async_to_sync + async def has_permission(self, request: DrfRequest, view) -> bool: + # view: BentoAuthzScopedModelViewSet (cannot annotate due to circular import) + if view.data_type is None: + raise NotImplementedError("BentoAuthzScopedModelViewSet DATA_TYPE must be set") + return await view.request_has_data_type_permissions(request) + + @async_to_sync + async def has_object_permission(self, request: DrfRequest, view, obj: BaseScopeableModel): + # view: BentoAuthzScopedModelViewSet (cannot annotate due to circular import) + # if this is called, has_data_type_permission has already been called and handled the overall action type + # TODO: eliminate duplicate scope check somehow without enabling permissions on objects outside of scope + return await view.obj_is_in_request_scope(request, obj) diff --git a/chord_metadata_service/authz/tests/helpers.py b/chord_metadata_service/authz/tests/helpers.py index 9ec625723..a0f2be332 100644 --- a/chord_metadata_service/authz/tests/helpers.py +++ b/chord_metadata_service/authz/tests/helpers.py @@ -1,32 +1,24 @@ +import json + from aioresponses import aioresponses from bento_lib.auth.types import EvaluationResultMatrix -from rest_framework.test import APITestCase +from rest_framework.test import APITransactionTestCase from typing import Literal from ..types import DataPermissionsDict __all__ = [ - "mock_authz_eval_one_result", - "mock_authz_eval_result", "DTAccessLevel", "AuthzAPITestCase", "PermissionsTestCaseMixin", ] -def mock_authz_eval_one_result(m: aioresponses, result: bool): - m.post("http://authz.local/policy/evaluate", payload={"result": [[result]]}) - - -def mock_authz_eval_result(m: aioresponses, result: EvaluationResultMatrix | list[list[bool]]): - m.post("http://authz.local/policy/evaluate", payload={"result": result}) - - DTAccessLevel = Literal["none", "bool", "counts", "full"] -class AuthzAPITestCase(APITestCase): +class AuthzAPITestCase(APITransactionTestCase): # data type permissions: bool, counts, data dt_none_eval_res = [[False, False, False]] dt_bool_eval_res = [[True, False, False]] @@ -42,44 +34,65 @@ class AuthzAPITestCase(APITestCase): # ------------------------------------------------------------------------------------------------------------------ - def _one_authz_post(self, authz_res: bool, url: str, *args, **kwargs): + @staticmethod + def mock_authz_eval_one_result(m: aioresponses, result: bool): + m.post("http://authz.local/policy/evaluate", payload={"result": [[result]]}) + + @staticmethod + def mock_authz_eval_result(m: aioresponses, result: EvaluationResultMatrix | list[list[bool]]): + m.post("http://authz.local/policy/evaluate", payload={"result": result}) + + # ------------------------------------------------------------------------------------------------------------------ + + def _one_authz_generic( + self, method: Literal["get", "post", "put", "patch", "delete"], authz_res: bool, url: str, *args, **kwargs + ): + if "json" in kwargs: + kwargs["data"] = json.dumps(kwargs["json"]) + del kwargs["json"] + + if method in ("post", "put", "patch") and "format" not in kwargs: + kwargs["content_type"] = "application/json" + with aioresponses() as m: - mock_authz_eval_one_result(m, authz_res) - return self.client.post(url, *args, content_type="application/json", **kwargs) + self.mock_authz_eval_one_result(m, authz_res) + return getattr(self.client, method)(url, *args, **kwargs) + + def _one_authz_get(self, authz_res: bool, url: str, *args, **kwargs): + return self._one_authz_generic("get", authz_res, url, *args, **kwargs) + + def one_authz_get(self, url: str, *args, **kwargs): + """Mocks a single True response from the authorization service and executes a GET request.""" + return self._one_authz_get(True, url, *args, **kwargs) + + def one_no_authz_get(self, url: str, *args, **kwargs): + """Mocks a single False response from the authorization service and executes a GET request.""" + return self._one_authz_get(False, url, *args, **kwargs) + + def _one_authz_post(self, authz_res: bool, url: str, *args, **kwargs): + return self._one_authz_generic("post", authz_res, url, *args, **kwargs) def one_authz_post(self, url: str, *args, **kwargs): - """ - Mocks a single True response from the authorization service and executes a JSON POST request. - """ + """Mocks a single True response from the authorization service and executes a JSON POST request.""" return self._one_authz_post(True, url, *args, **kwargs) def one_no_authz_post(self, url: str, *args, **kwargs): - """ - Mocks a single False response from the authorization service and executes a JSON POST request. - """ + """Mocks a single False response from the authorization service and executes a JSON POST request.""" return self._one_authz_post(False, url, *args, **kwargs) def _one_authz_put(self, authz_res: bool, url: str, *args, **kwargs): - with aioresponses() as m: - mock_authz_eval_one_result(m, authz_res) - return self.client.put(url, *args, content_type="application/json", **kwargs) + return self._one_authz_generic("put", authz_res, url, *args, **kwargs) def one_authz_put(self, url: str, *args, **kwargs): - """ - Mocks a single True response from the authorization service and executes a JSON PUT request. - """ + """Mocks a single True response from the authorization service and executes a JSON PUT request.""" return self._one_authz_put(True, url, *args, **kwargs) def one_no_authz_put(self, url: str, *args, **kwargs): - """ - Mocks a single False response from the authorization service and executes a JSON PUT request. - """ + """Mocks a single False response from the authorization service and executes a JSON PUT request.""" return self._one_authz_put(False, url, *args, **kwargs) def _one_authz_patch(self, authz_res: bool, url: str, *args, **kwargs): - with aioresponses() as m: - mock_authz_eval_one_result(m, authz_res) - return self.client.patch(url, *args, content_type="application/json", **kwargs) + return self._one_authz_generic("patch", authz_res, url, *args, **kwargs) def one_authz_patch(self, url: str, *args, **kwargs): """ @@ -89,12 +102,12 @@ def one_authz_patch(self, url: str, *args, **kwargs): def _one_authz_delete(self, authz_res: bool, url: str, *args, **kwargs): with aioresponses() as m: - mock_authz_eval_one_result(m, authz_res) + self.mock_authz_eval_one_result(m, authz_res) return self.client.delete(url, *args, **kwargs) async def _async_one_authz_delete(self, authz_res: bool, url: str, *args, **kwargs): with aioresponses() as m: - mock_authz_eval_one_result(m, authz_res) + self.mock_authz_eval_one_result(m, authz_res) return await self.async_client.delete(url, *args, **kwargs) def one_authz_delete(self, url: str, *args, **kwargs): @@ -119,12 +132,12 @@ def one_no_authz_delete(self, url: str, *args, **kwargs): def dt_get(self, level: Literal["none", "bool", "counts", "full"], url: str, *args, **kwargs): with aioresponses() as m: - mock_authz_eval_result(m, self.dt_levels[level]) # data type permissions: bool, counts, data + self.mock_authz_eval_result(m, self.dt_levels[level]) # data type permissions: bool, counts, data return self.client.get(url, *args, **kwargs) def dt_post(self, level: Literal["none", "bool", "counts", "full"], url: str, *args, **kwargs): with aioresponses() as m: - mock_authz_eval_result(m, self.dt_levels[level]) # data type permissions: bool, counts, data + self.mock_authz_eval_result(m, self.dt_levels[level]) # data type permissions: bool, counts, data return self.client.post(url, *args, **kwargs) def dt_authz_none_get(self, url: str, *args, **kwargs): @@ -136,6 +149,9 @@ def dt_authz_bool_get(self, url: str, *args, **kwargs): def dt_authz_counts_get(self, url: str, *args, **kwargs): return self.dt_get("counts", url, *args, **kwargs) + def dt_authz_counts_post(self, url: str, *args, **kwargs): + return self.dt_post("counts", url, *args, **kwargs) + def dt_authz_full_get(self, url: str, *args, **kwargs): return self.dt_get("full", url, *args, **kwargs) diff --git a/chord_metadata_service/authz/tests/test_viewset.py b/chord_metadata_service/authz/tests/test_viewset.py new file mode 100644 index 000000000..898ac762d --- /dev/null +++ b/chord_metadata_service/authz/tests/test_viewset.py @@ -0,0 +1,88 @@ +import uuid + +from aioresponses import aioresponses +from django.http.request import HttpRequest +from rest_framework.request import Request as DrfRequest + +from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase +from chord_metadata_service.authz.viewset import BentoAuthzScopedModelGenericListViewSet +from chord_metadata_service.chord.tests.helpers import ProjectTestCase +from chord_metadata_service.discovery.exceptions import DiscoveryScopeException +from chord_metadata_service.phenopackets import models as ph_m +from chord_metadata_service.phenopackets.tests import constants as ph_c + + +class TestNotImplViewSet(BentoAuthzScopedModelGenericListViewSet): + pass + + +class AuthzBaseViewsetTest(AuthzAPITestCase, ProjectTestCase): + + def setUp(self): + super().setUp() + self.individual = ph_m.Individual.objects.create(**ph_c.VALID_INDIVIDUAL_1) + + self.mock_project_req = HttpRequest() + self.mock_project_req.GET["project"] = str(self.project.identifier) + self.mock_project_drf_req = DrfRequest(self.mock_project_req) + + def test_get_queryset_not_impl(self): + with self.assertRaises(NotImplementedError): + TestNotImplViewSet().get_queryset() + + def test_permission_from_request_none(self): + vs = TestNotImplViewSet() + vs.action = "fubar" + mock_drf_req = DrfRequest(HttpRequest()) + self.assertIsNone(vs.permission_from_request(mock_drf_req)) + + async def test_obj_is_in_request_scope(self): + mock_req = HttpRequest() + mock_req.GET["project"] = "does-not-exist" + mock_drf_req = DrfRequest(mock_req) + + with self.assertRaises(DiscoveryScopeException): + await TestNotImplViewSet.obj_is_in_request_scope(mock_drf_req, self.individual) + + mock_req_2 = HttpRequest() + mock_req_2.GET["project"] = str(uuid.uuid4()) + mock_drf_req_2 = DrfRequest(mock_req_2) + + with self.assertRaises(DiscoveryScopeException): + await TestNotImplViewSet.obj_is_in_request_scope(mock_drf_req_2, self.individual) + + async def test_request_has_data_type_permissions(self): + vs = TestNotImplViewSet() + vs.action = "list" + with aioresponses() as m: + self.mock_authz_eval_one_result(m, True) + self.assertTrue(await vs.request_has_data_type_permissions(self.mock_project_drf_req, None)) + + async def test_request_has_data_type_permissions_false(self): + vs = TestNotImplViewSet() + vs.action = "list" + with aioresponses() as m: + self.mock_authz_eval_one_result(m, False) + self.assertFalse(await vs.request_has_data_type_permissions(self.mock_project_drf_req, None)) + + async def test_request_has_data_type_permissions_action_dne(self): + vs = TestNotImplViewSet() + vs.action = "does-not-exist" # no permissions implemented for this action + self.assertFalse(await vs.request_has_data_type_permissions(self.mock_project_drf_req, None)) + + async def test_request_has_data_type_permissions_scope_dne(self): + mock_req = HttpRequest() + mock_req.GET["project"] = "does-not-exist" + mock_drf_req = DrfRequest(mock_req) + + vs = TestNotImplViewSet() + + with self.assertRaises(DiscoveryScopeException): + await vs.request_has_data_type_permissions(mock_drf_req, None) + + mock_req_2 = HttpRequest() + mock_req_2.GET["project"] = str(uuid.uuid4()) + mock_drf_req_2 = DrfRequest(mock_req_2) + + with self.assertRaises(DiscoveryScopeException): + await vs.request_has_data_type_permissions(mock_drf_req_2, None) diff --git a/chord_metadata_service/authz/viewset.py b/chord_metadata_service/authz/viewset.py new file mode 100644 index 000000000..f104c59ed --- /dev/null +++ b/chord_metadata_service/authz/viewset.py @@ -0,0 +1,79 @@ +from bento_lib.auth.permissions import P_QUERY_DATA, Permission, P_INGEST_DATA, P_DELETE_DATA +from rest_framework import mixins, viewsets +from rest_framework.request import Request as DrfRequest + +from chord_metadata_service.discovery.scope import get_request_discovery_scope, ValidatedDiscoveryScope +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel +from chord_metadata_service.logger import logger + +from .middleware import authz_middleware +from .permissions import BentoDataTypePermission + +__all__ = [ + "BentoAuthzScopedModelGenericListViewSet", + "BentoAuthzScopedModelViewSet", +] + + +class BentoAuthzScopedModelGenericListViewSet(viewsets.GenericViewSet, mixins.ListModelMixin): + """ + An extension of the DRF generic viewset which adds utility functions for Bento Django permissions classes. + These work together to properly implement scoped Bento permissions based on the request being made. + + + Security note: Subclasses MUST implement a get_queryset(...) which returns a model-scoped, request-based queryset! + + """ + + data_type: str | None = None + permission_classes = (BentoDataTypePermission,) + + def get_queryset(self): + raise NotImplementedError("Subclasses must implement scoped get_queryset") + + @staticmethod + async def obj_is_in_request_scope(request: DrfRequest, obj: BaseScopeableModel) -> bool: + # DiscoveryScopeException - project/dataset does not exist, or non-UUID request for a project/dataset + # - will be an API exception and handled by the katsu exception handler + return await obj.scope_contains_object(await get_request_discovery_scope(request)) + + def permission_from_request(self, request: DrfRequest) -> Permission | None: + if self.action in ("list", "retrieve"): + return P_QUERY_DATA + elif self.action in ("create", "update"): + return P_INGEST_DATA + elif self.action == "destroy": + return P_DELETE_DATA + else: + logger.error("viewset permission_from_request(...) is not implemented for action: %s", self.action) + return None + + async def request_has_data_type_permissions( + self, request: DrfRequest, scope: ValidatedDiscoveryScope | None = None + ): + # DiscoveryScopeException - project/dataset does not exist, or non-UUID request for a project/dataset + # - will be an API exception and handled by the katsu exception handler + scope_: ValidatedDiscoveryScope = scope or await get_request_discovery_scope(request) + + p: Permission | None = self.permission_from_request(request) + if p is None: + return False + + return await authz_middleware.async_evaluate_one( + request, scope_.as_authz_resource(data_type=self.data_type), p, mark_authz_done=True + ) + + +class BentoAuthzScopedModelViewSet( + mixins.CreateModelMixin, + mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + mixins.DestroyModelMixin, + BentoAuthzScopedModelGenericListViewSet +): + """ + This class is equivalent to the DRF viewsets.ModelViewSet class, except with our BentoAuthzModelGenericViewSet + replacing the base viewsets.GenericViewSet. In this way, we get all the scoping / permissions helper functions. + Security note: Subclasses MUST implement a get_queryset(...) which returns a model-scoped queryset! + """ + pass diff --git a/chord_metadata_service/chord/api_views.py b/chord_metadata_service/chord/api_views.py index 31e3dde86..247ec2ae0 100644 --- a/chord_metadata_service/chord/api_views.py +++ b/chord_metadata_service/chord/api_views.py @@ -36,7 +36,6 @@ ProjectSerializer, DatasetSerializer, ) -from .filters import AuthorizedDatasetFilter logger = logging.getLogger(__name__) @@ -80,12 +79,6 @@ class ProjectViewSet(CHORDPublicModelViewSet): queryset = Project.objects.all().order_by("identifier") serializer_class = ProjectSerializer - def list(self, request, *args, **kwargs): - # For now, we don't have a view:project type permission - we can always view - # TODO: check permissions for project viewing instead - authz.mark_authz_done(request) - return super().list(request, *args, **kwargs) - @async_to_sync async def create(self, request, *args, **kwargs): if not (await authz.async_evaluate_one(request, RESOURCE_EVERYTHING, P_CREATE_PROJECT)): @@ -135,7 +128,6 @@ class DatasetViewSet(CHORDPublicModelViewSet): """ filter_backends = [DjangoFilterBackend] - filterset_class = AuthorizedDatasetFilter lookup_url_kwarg = "dataset_id" serializer_class = DatasetSerializer @@ -275,3 +267,42 @@ class ProjectJsonSchemaViewSet(CHORDPublicModelViewSet): queryset = ProjectJsonSchema.objects.all().order_by("project_id") serializer_class = ProjectJsonSchemaSerializer + + @async_to_sync + async def create(self, request, *args, **kwargs): + project_id = request.data.get("project") + + if project_id is None: + return bad_request(request, "No project ID in request body") # side effect: sets authz done flag + + if not (await authz.async_evaluate_one(request, build_resource(project=project_id), P_EDIT_PROJECT)): + return forbidden(request) # side effect: sets authz done flag + + authz.mark_authz_done(request) + return await sync_to_async(super().create)(request, *args, **kwargs) + + @async_to_sync + async def update(self, request, *args, **kwargs): + try: + pjs = await self.get_obj_async() + except Http404: + return not_found(request) # side effect: sets authz done flag + + if not (await authz.async_evaluate_one(request, build_resource(project=str(pjs.project_id)), P_EDIT_PROJECT)): + return forbidden(request) # side effect: sets authz done flag + + authz.mark_authz_done(request) + return await sync_to_async(super().update)(request, *args, **kwargs) + + @async_to_sync + async def destroy(self, request, *args, **kwargs): + try: + pjs = await self.get_obj_async() + except Http404: + return not_found(request) # side effect: sets authz done flag + + if not (await authz.async_evaluate_one(request, build_resource(project=str(pjs.project_id)), P_EDIT_PROJECT)): + return forbidden(request) # side effect: sets authz done flag + + authz.mark_authz_done(request) + return await sync_to_async(super().destroy)(request, *args, **kwargs) diff --git a/chord_metadata_service/chord/export/cbioportal.py b/chord_metadata_service/chord/export/cbioportal.py index 22692514b..6662dfce9 100644 --- a/chord_metadata_service/chord/export/cbioportal.py +++ b/chord_metadata_service/chord/export/cbioportal.py @@ -72,12 +72,12 @@ REGEXP_INVALID_FOR_ID = re.compile(r"[^a-zA-Z0-9_\.\-]") -def study_export(get_path: Callable[[str], str], dataset_id: str): +async def study_export(get_path: Callable[[str], str], dataset_id: str): """Export a given Project as a cBioPortal study""" # TODO: a Dataset is a Study (associated with a publication), not a Project! try: - dataset = Dataset.objects.get(identifier=dataset_id) + dataset = await Dataset.objects.aget(identifier=dataset_id) except Dataset.DoesNotExist: raise ExportError(f"no dataset exists with ID {dataset_id}") @@ -90,16 +90,18 @@ def study_export(get_path: Callable[[str], str], dataset_id: str): # Export patients. with open(get_path(PATIENT_DATA_FILENAME), "w", newline="\n") as file_patient: # Note: plural in `phenopackets` is intentional (related_name property in model) - indiv = Individual.objects.filter(phenopackets__dataset_id=dataset.identifier) - individual_export(indiv, file_patient) + indiv = Individual.objects.filter(phenopackets__dataset_id=dataset.identifier).prefetch_related("phenopackets") + await individual_export(indiv, file_patient) with open(get_path(PATIENT_META_FILENAME), "w", newline="\n") as file_patient_meta: clinical_meta_export(cbio_study_id, PATIENT_DATATYPE, file_patient_meta) # Export samples with open(get_path(SAMPLE_DATA_FILENAME), "w", newline="\n") as file_sample: - sampl = pm.Biosample.objects.filter(phenopacket__dataset_id=dataset.identifier) - sample_export(sampl, file_sample) + biosamples = ( + pm.Biosample.objects.filter(phenopacket__dataset_id=dataset.identifier).prefetch_related("phenopacket_set") + ) + await sample_export(biosamples, file_sample) with open(get_path(SAMPLE_META_FILENAME), "w", newline="\n") as file_sample_meta: clinical_meta_export(cbio_study_id, SAMPLE_DATATYPE, file_sample_meta) @@ -109,12 +111,15 @@ def study_export(get_path: Callable[[str], str], dataset_id: str): open(get_path(CASE_LIST_SEQUENCED), "w", newline="\n") as file_case_list: exp_res = ( ExperimentResult.objects + .prefetch_related("experiment_set") .filter(experiment__dataset_id=dataset.identifier, file_format="MAF") .annotate(biosample_id=F("experiment__biosample")) ) - write_maf_list(exp_res, file_maf_list) - case_list_export(cbio_study_id, exp_res, file_case_list) + exp_res_list = [r async for r in exp_res] + + write_maf_list(exp_res_list, file_maf_list) + case_list_export(cbio_study_id, exp_res_list, file_case_list) with open(get_path(MUTATION_META_FILENAME), 'w', newline='\n') as file_mutation_meta: mutation_meta_export(cbio_study_id, file_mutation_meta) @@ -171,7 +176,7 @@ def clinical_meta_export(study_id: str, datatype: str, file_handle: TextIO): write_dict_in_cbioportal_format(lines, file_handle) -def individual_export(results, file_handle: TextIO): +async def individual_export(results, file_handle: TextIO): """ Renders Individuals as a clinical_patient text file suitable for importing by cBioPortal. @@ -192,7 +197,7 @@ def individual_export(results, file_handle: TextIO): individuals = [{ 'id': sanitize_id(individual.id), 'sex': individual.sex, - } for individual in results] + } async for individual in results] columns = list(individuals[0].keys()) headers = individual_to_patient_header(columns) @@ -202,7 +207,7 @@ def individual_export(results, file_handle: TextIO): dict_writer.writerows(individuals) -def sample_export(results, file_handle: TextIO): +async def sample_export(results, file_handle: TextIO): """ Renders Biosamples as a clinical_sample text file suitable for importing by cBioPortal. @@ -238,11 +243,11 @@ def sample_export(results, file_handle: TextIO): """ samples = [] - for sample in results: - if sample.individual is None: + async for sample in results: + if sample.individual_id is None: continue - subject_id = sample.individual + subject_id = sample.individual_id sample_obj = { "individual_id": sanitize_id(subject_id), diff --git a/chord_metadata_service/chord/export/views.py b/chord_metadata_service/chord/export/views.py index fe66c0eb2..2386a7c35 100644 --- a/chord_metadata_service/chord/export/views.py +++ b/chord_metadata_service/chord/export/views.py @@ -2,32 +2,33 @@ import logging import traceback +from adrf.decorators import api_view as async_api_view +from bento_lib.auth.permissions import P_EXPORT_DATA +from bento_lib.auth.resources import RESOURCE_EVERYTHING from django.http import FileResponse from jsonschema import Draft7Validator -from rest_framework.decorators import api_view, permission_classes -from rest_framework.permissions import AllowAny +from rest_framework import status +from rest_framework.decorators import permission_classes from rest_framework.response import Response -from rest_framework.request import Request +from rest_framework.request import Request as DrfRequest - -from chord_metadata_service.chord.schemas import EXPORT_SCHEMA from bento_lib.responses import errors +from chord_metadata_service.authz.middleware import authz_middleware +from chord_metadata_service.authz.permissions import BentoDeferToHandler +from chord_metadata_service.chord.schemas import EXPORT_SCHEMA from .metadata import EXPORT_FORMAT_FUNCTION_MAP, EXPORT_FORMAT_OBJECT_TYPE_MAP, EXPORT_FORMATS, EXPORT_OBJECT_TYPE from .utils import ExportError, ExportFileContext - BENTO_EXPORT_SCHEMA_VALIDATOR = Draft7Validator(EXPORT_SCHEMA) logger = logging.getLogger(__name__) -# Mounted on /private/, so will get protected anyway; this allows for access from WES -# TODO: Ugly and misleading permissions -@api_view(["POST"]) -@permission_classes([AllowAny]) -def export(request: Request): +@async_api_view(["POST"]) +@permission_classes([BentoDeferToHandler]) +async def export(request: DrfRequest): """Export data from Katsu Exports the requested data object (e.g. a Dataset or a Project) in the given @@ -41,6 +42,10 @@ def export(request: Request): """ # Private endpoints are protected by URL namespace, not by Django permissions. + res = await authz_middleware.async_evaluate_one(request, RESOURCE_EVERYTHING, P_EXPORT_DATA, mark_authz_done=True) + if not res: + return Response(errors.forbidden_error("Fobidden"), status=status.HTTP_403_FORBIDDEN) + # TODO: Schema for OpenAPI doc logger.info(f"Received export request: {json.dumps(request.data)}") @@ -56,10 +61,10 @@ def export(request: Request): object_type: str = request.data["object_type"] # 'project', 'dataset',... model = EXPORT_OBJECT_TYPE[object_type]["model"] - if not model.objects.filter(identifier=object_id).exists(): + if not await model.objects.filter(identifier=object_id).aexists(): return Response(errors.bad_request_error( f"{object_type.capitalize()} with ID {object_id} does not exist"), - status=400 + status=status.HTTP_400_BAD_REQUEST, ) fmt = request.data["format"].strip() @@ -68,13 +73,13 @@ def export(request: Request): if fmt not in EXPORT_FORMATS: # Check that the workflow exists return Response(errors.bad_request_error( f"Export in format {fmt} is not implemented"), - status=400 + status=status.HTTP_400_BAD_REQUEST, ) if object_type not in EXPORT_FORMAT_OBJECT_TYPE_MAP[fmt]: return Response(errors.bad_request_error( f"Exporting entities of type {object_type} in format {fmt} is not implemented"), - status=400 + status=status.HTTP_400_BAD_REQUEST, ) # TODO: secure the output_path value @@ -82,7 +87,7 @@ def export(request: Request): try: with ExportFileContext(output_path, object_id) as file_export: # Pass a callable to generate the proper file paths within the export context. - EXPORT_FORMAT_FUNCTION_MAP[fmt](file_export.get_path, object_id) + await EXPORT_FORMAT_FUNCTION_MAP[fmt](file_export.get_path, object_id) # If no output path parameter has been provided, the generated export # is returned as an attachment to the Response and everything will @@ -95,14 +100,14 @@ def export(request: Request): return FileResponse(open(tarfile, "rb"), as_attachment=True) except ExportError as e: - return Response(errors.bad_request_error(f"Encountered export error: {e}"), status=400) + return Response(errors.bad_request_error(f"Encountered export error: {e}"), status=status.HTTP_400_BAD_REQUEST) except Exception as e: # Encountered some other error from the export attempt, return a somewhat detailed message logger.error(f"Encountered an exception while processing an export attempt:\n{traceback.format_exc()}") return Response(errors.internal_server_error( f"Encountered an exception while processing an export attempt (error: {repr(e)}"), - status=500 + status=status.HTTP_500_INTERNAL_SERVER_ERROR ) - return Response(status=204) + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/chord_metadata_service/chord/filters.py b/chord_metadata_service/chord/filters.py deleted file mode 100644 index 0524d8faa..000000000 --- a/chord_metadata_service/chord/filters.py +++ /dev/null @@ -1,54 +0,0 @@ -import django_filters -import logging - -logger = logging.getLogger(__name__) - -# HELPERS - - -def filter_datasets(qs, name, value): - """ - Filters by datasets. - If value is None, returns all objects regardless of datasets. - Otherwise, return objects that are in the specified datasets. - """ - if value: - lookup = "__".join([name, "in"]) - return qs.filter(**{lookup: value.split(",")}).distinct() - else: - return qs - - -# TODO authorize_datasets(): remove the code == GRU filter, urgently. -def authorize_datasets(qs, name, value): - """ - Filter by authorized datasets. - If value is 'NO_DATASETS_AUTHORIZED', returns no objects. - Otherwise, returns objects that are in the specified datasets. - """ - logger.warn(f"value is {value}") - if value == "NO_DATASETS_AUTHORIZED": - lookup = "__".join([name, "in"]) - return qs.filter(**{lookup: []}) - else: - lookup = "__".join([name, "in"]) - - # TODO THE FILTER BELOW IS JANKY; NEEDS TO BE REMOVED. - # It is only here for the ClinDIG 4.3 demo. - temp = qs.filter(**{lookup: value.split(",")}).distinct()\ - .filter(data_use__consent_code__primary_category__code='GRU') - for t in temp: - logger.warn(str(t.data_use)) - - return temp - - -class AuthorizedDatasetFilter(django_filters.rest_framework.FilterSet): - datasets = django_filters.CharFilter( - method=filter_datasets, field_name="dataset__title", - label="Datasets" - ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, field_name="dataset__title", - label="Authorized datasets" - ) diff --git a/chord_metadata_service/chord/ingest/views.py b/chord_metadata_service/chord/ingest/views.py index 06939517e..200f4a4ca 100644 --- a/chord_metadata_service/chord/ingest/views.py +++ b/chord_metadata_service/chord/ingest/views.py @@ -3,21 +3,32 @@ import traceback import uuid +from adrf.decorators import api_view +from asgiref.sync import sync_to_async +from bento_lib.auth.permissions import P_INGEST_DATA +from bento_lib.auth.resources import build_resource from django.core.exceptions import ValidationError from django.db import transaction -from rest_framework.decorators import api_view, permission_classes -from rest_framework.permissions import AllowAny +from rest_framework import status +from rest_framework.decorators import permission_classes from rest_framework.request import Request as DrfRequest from rest_framework.response import Response from typing import Any, Callable from bento_lib.responses import errors -from chord_metadata_service.logger import logger +from chord_metadata_service.authz.middleware import authz_middleware +from chord_metadata_service.authz.permissions import BentoDeferToHandler from chord_metadata_service.chord.models import Dataset +from chord_metadata_service.logger import logger from . import experiments from . import WORKFLOW_INGEST_FUNCTION_MAP from .exceptions import IngestError +from ..data_types import DATA_TYPE_EXPERIMENT +from ..workflows.metadata import workflow_set + + +DATASET_DNE = "Dataset does not exist" def call_ingest_function_and_handle(fn: Callable[[Any, str], Any], data, dataset_id: str) -> Response: @@ -29,7 +40,7 @@ def call_ingest_function_and_handle(fn: Callable[[Any, str], Any], data, dataset except IngestError as e: err = f"Encountered ingest error: {e}\n{traceback.format_exc()}" logger.error(err) - return Response(errors.bad_request_error(err), status=400) + return Response(errors.bad_request_error(err), status=status.HTTP_400_BAD_REQUEST) except ValidationError as e: validation_errors = tuple(e.error_list if hasattr(e, "error_list") else e.error_dict.items()) @@ -44,31 +55,65 @@ def call_ingest_function_and_handle(fn: Callable[[Any, str], Any], data, dataset logger.error(f"Encountered an exception while processing an ingest attempt:\n{traceback.format_exc()}") return Response(errors.internal_server_error(f"Encountered an exception while processing an ingest attempt " f"(error: {repr(e)}"), status=500) - return Response(status=204) + return Response(status=status.HTTP_204_NO_CONTENT) @api_view(["POST"]) -@permission_classes([AllowAny]) -def ingest_derived_experiment_results(request: DrfRequest, dataset_id: str): - return call_ingest_function_and_handle(experiments.ingest_derived_experiment_results, request.data, dataset_id) +@permission_classes([BentoDeferToHandler]) +async def ingest_derived_experiment_results(request: DrfRequest, dataset_id: str): + dataset = await Dataset.objects.filter(identifier=dataset_id).afirst() + + if not dataset: + logger.error(f"Error encountered while ingesting derived experiment results: {DATASET_DNE}") + authz_middleware.mark_authz_done(request) + return Response(errors.bad_request_error(DATASET_DNE), status=status.HTTP_400_BAD_REQUEST) + + if not await authz_middleware.async_evaluate_one( + request, + build_resource(str(dataset.project_id), str(dataset.identifier), DATA_TYPE_EXPERIMENT), + P_INGEST_DATA, + mark_authz_done=True, + ): + return Response(errors.forbidden_error("Forbidden"), status=status.HTTP_403_FORBIDDEN) + + return await sync_to_async(call_ingest_function_and_handle)( + experiments.ingest_derived_experiment_results, request.data, dataset_id + ) @api_view(["POST"]) -@permission_classes([AllowAny]) -def ingest_into_dataset(request: DrfRequest, dataset_id: str, workflow_id: str): +@permission_classes([BentoDeferToHandler]) +async def ingest_into_dataset(request: DrfRequest, dataset_id: str, workflow_id: str): logger.info(f"Received a {workflow_id} ingest request for dataset {dataset_id}.") # Check that the workflow exists if workflow_id not in WORKFLOW_INGEST_FUNCTION_MAP: - err = f"Ingestion workflow ID {workflow_id} does not exist" - logger.error(f"Error encountered while ingesting into dataset {dataset_id}: {err}") - return Response(errors.bad_request_error(err), status=400) + err = "Ingestion workflow ID does not exist" + logger.error(f"Error encountered while ingesting into dataset: {err}") + authz_middleware.mark_authz_done(request) + return Response(errors.bad_request_error(err), status=status.HTTP_400_BAD_REQUEST) + + dataset = await Dataset.objects.filter(identifier=dataset_id).afirst() - if not Dataset.objects.filter(identifier=dataset_id).exists(): - err = f"Dataset with ID {dataset_id} does not exist" + if not dataset: logger.error( - f"Error encountered while ingesting into dataset {dataset_id} with workflow {workflow_id}: {err}") - return Response(errors.bad_request_error(err), status=400) - dataset_id = str(uuid.UUID(dataset_id)) # Normalize dataset ID to UUID's str format. + f"Error encountered while ingesting into dataset with workflow {workflow_id}: {DATASET_DNE}") + authz_middleware.mark_authz_done(request) + return Response(errors.bad_request_error(DATASET_DNE), status=status.HTTP_400_BAD_REQUEST) - return call_ingest_function_and_handle(WORKFLOW_INGEST_FUNCTION_MAP[workflow_id], request.data, dataset_id) + workflow = workflow_set.get_workflow(workflow_id) + + dataset_id = str(uuid.UUID(dataset_id)) # Normalize dataset ID to UUID's str format. + if not ( + await authz_middleware.async_evaluate_one( + request, + build_resource(str(dataset.project_id), dataset_id, workflow.data_type), + P_INGEST_DATA, + mark_authz_done=True, + ) + ): + return Response(errors.forbidden_error("Forbidden"), status=status.HTTP_403_FORBIDDEN) + + return await sync_to_async(call_ingest_function_and_handle)( + WORKFLOW_INGEST_FUNCTION_MAP[workflow_id], request.data, dataset_id + ) diff --git a/chord_metadata_service/chord/models.py b/chord_metadata_service/chord/models.py index f09add395..9096640ea 100644 --- a/chord_metadata_service/chord/models.py +++ b/chord_metadata_service/chord/models.py @@ -3,12 +3,12 @@ from django.core.exceptions import ValidationError from django.db import models from django.utils import timezone +from chord_metadata_service.discovery.schemas import DISCOVERY_SCHEMA from chord_metadata_service.patients.models import Individual from chord_metadata_service.phenopackets.models import Biosample, Phenopacket from chord_metadata_service.resources.models import Resource from chord_metadata_service.restapi.validators import JsonSchemaValidator from chord_metadata_service.restapi.models import SchemaType -from chord_metadata_service.discovery.schemas import DISCOVERY_SCHEMA __all__ = ["Project", "Dataset", "ProjectJsonSchema"] diff --git a/chord_metadata_service/chord/tests/constants.py b/chord_metadata_service/chord/tests/constants.py index 809d8fb98..6df1c3808 100644 --- a/chord_metadata_service/chord/tests/constants.py +++ b/chord_metadata_service/chord/tests/constants.py @@ -3,9 +3,12 @@ __all__ = [ "VALID_DATA_USE_1", "VALID_PROJECT_1", + "VALID_PROJECT_2", "VALID_DATS_CREATORS", "INVALID_DATS_CREATORS", "valid_dataset_1", + "valid_dataset_2", + "PROJECT_JSON_SCHEMA_MISSING_PROJECT", "valid_project_json_schema", "valid_phenotypic_feature", "dats_dataset", @@ -41,6 +44,11 @@ "description": "Some description", } +VALID_PROJECT_2 = { + "title": "Project 2", + "description": "Some description too", +} + VALID_DATS_CREATORS = [ { "name": "1000 Genomes Project" @@ -85,10 +93,26 @@ def valid_dataset_1(project_id): "title": "Dataset 1", "description": "Test Dataset", "data_use": VALID_DATA_USE_1, - "project": project_id + "project": project_id, + } + + +def valid_dataset_2(project_id): + return { + "title": "Dataset 2", + "description": "Test Dataset Too", + "data_use": VALID_DATA_USE_1, + "project": project_id, } +PROJECT_JSON_SCHEMA_MISSING_PROJECT = { + "required": False, + "schema_type": SchemaType.PHENOPACKET, + "json_schema": DEFAULT_PROJECT_JSON_SCHEMA, +} + + def valid_project_json_schema(project_id: str, schema_type=SchemaType.PHENOPACKET, required: bool = False, diff --git a/chord_metadata_service/chord/tests/helpers.py b/chord_metadata_service/chord/tests/helpers.py index b5fadb478..87b07841f 100644 --- a/chord_metadata_service/chord/tests/helpers.py +++ b/chord_metadata_service/chord/tests/helpers.py @@ -1,5 +1,3 @@ -import json - from django.db.models import Model from django.test import TestCase from django.urls import reverse @@ -7,7 +5,7 @@ from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase from chord_metadata_service.chord.models import Dataset, Project from chord_metadata_service.chord.tests.constants import VALID_DATA_USE_1, VALID_PROJECT_1 -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope from chord_metadata_service.restapi.utils import remove_computed_properties @@ -44,7 +42,7 @@ class ModelFieldsTestMixin(TestCase): """ def assert_model_fields_list_equal(self, db_list: list[Model], ground_truths: list[dict], - ignore_fields: list[str], field_maps={}): + ignore_fields: list[str], field_maps: dict | None = None): """ List wrapper for assert_model_fields_equal. """ @@ -59,18 +57,18 @@ def assert_model_fields_list_equal(self, db_list: list[Model], ground_truths: li ) def assert_model_fields_equal(self, db_obj: Model, ground_truth: dict, - ignore_fields: list[str], field_maps={}): + ignore_fields: list[str], field_maps: dict | None = None): """ Compares the fields of db_obj (exluding ignore_fields, if any) with the values of ground_truth. """ - MODEL_FIELDS = [f.name for f in db_obj._meta.get_fields() if f.name not in ignore_fields] - for field in MODEL_FIELDS: + model_fields = [f.name for f in db_obj._meta.get_fields() if f.name not in ignore_fields] + for field in model_fields: gt_value = ground_truth.get(field) if gt_value and field == "extra_properties": # remove non-ingested computed properties from gt to compare gt_value = remove_computed_properties(gt_value) # Apply field mapping, if any - model_field = field_maps.get(field, field) + model_field = (field_maps or {}).get(field, field) if gt_value: # we expect the db_obj to contain this ground truth value self.assertEqual(getattr(db_obj, model_field), gt_value) @@ -79,5 +77,5 @@ def assert_model_fields_equal(self, db_obj: Model, ground_truth: dict, class AuthzAPITestCaseWithProjectJSON(AuthzAPITestCase): def setUp(self) -> None: super().setUp() - r = self.one_authz_post(reverse("project-list"), data=json.dumps(VALID_PROJECT_1)) + r = self.one_authz_post(reverse("project-list"), json=VALID_PROJECT_1) self.project = r.json() diff --git a/chord_metadata_service/chord/tests/test_api.py b/chord_metadata_service/chord/tests/test_api.py index e7c5676d7..5b989e0dc 100644 --- a/chord_metadata_service/chord/tests/test_api.py +++ b/chord_metadata_service/chord/tests/test_api.py @@ -1,4 +1,3 @@ -import json import uuid from django.urls import reverse @@ -9,6 +8,7 @@ dats_dataset, VALID_DATS_CREATORS, INVALID_DATS_CREATORS, + PROJECT_JSON_SCHEMA_MISSING_PROJECT, valid_project_json_schema, ) from .helpers import ProjectTestCase, AuthzAPITestCaseWithProjectJSON @@ -40,7 +40,7 @@ def setUp(self) -> None: def test_create_project(self): for i, p in enumerate(self.valid_payloads, 1): - r = self.one_authz_post(reverse("project-list"), data=json.dumps(p)) + r = self.one_authz_post(reverse("project-list"), json=p) self.assertEqual(r.status_code, status.HTTP_201_CREATED) self.assertEqual(Project.objects.count(), i) self.assertEqual(Project.objects.get(title=p["title"]).description, p["description"]) @@ -49,14 +49,23 @@ def test_create_project(self): def test_create_project_invalid(self): for p in self.invalid_payloads: - r = self.one_authz_post(reverse("project-list"), data=json.dumps(p)) + r = self.one_authz_post(reverse("project-list"), json=p) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_create_project_forbidden(self): - r = self.one_no_authz_post(reverse("project-list"), data=json.dumps(self.valid_payloads[0])) + r = self.one_no_authz_post(reverse("project-list"), json=self.valid_payloads[0]) self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) +class ListProjectAPITest(AuthzAPITestCaseWithProjectJSON): + + def test_list_projects(self): + r = self.client.get("/api/projects") + self.assertEqual(r.status_code, status.HTTP_200_OK) + res = r.json() + self.assertEqual(len(res["results"]), 1) + + class UpdateProjectTest(AuthzAPITestCaseWithProjectJSON): def setUp(self) -> None: super().setUp() @@ -67,16 +76,16 @@ def without_times(d: dict) -> dict: return {k: v for k, v in d.items() if k not in ("updated", "created")} def test_project_update(self): - r = self.one_authz_put(f"/api/projects/{self.project['identifier']}", data=json.dumps(self.update_body)) + r = self.one_authz_put(f"/api/projects/{self.project['identifier']}", json=self.update_body) self.assertEqual(r.status_code, status.HTTP_200_OK) self.assertDictEqual(self.without_times(r.json()), self.without_times(self.update_body)) def test_project_update_not_found(self): - r = self.one_authz_put("/api/projects/not-found", data=json.dumps(self.update_body)) + r = self.one_authz_put("/api/projects/not-found", json=self.update_body) self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) def test_project_update_forbidden(self): - r = self.one_no_authz_put(f"/api/projects/{self.project['identifier']}", data=json.dumps(self.update_body)) + r = self.one_no_authz_put(f"/api/projects/{self.project['identifier']}", json=self.update_body) self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) @@ -86,7 +95,7 @@ def test_delete_project(self): self.assertEqual(r.status_code, status.HTTP_204_NO_CONTENT) def test_delete_project_not_found(self): - r = self.client.delete("/api/projects/not-found") + r = self.one_authz_delete("/api/projects/not-found") self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) def test_delete_project_forbidden(self): @@ -140,7 +149,7 @@ def setUp(self) -> None: def test_create_dataset(self): for i, d in enumerate(self.valid_payloads, 1): - r = self.one_authz_post("/api/datasets", data=json.dumps(d)) + r = self.one_authz_post("/api/datasets", json=d) self.assertEqual(r.status_code, status.HTTP_201_CREATED) self.assertEqual(Dataset.objects.count(), i) @@ -151,20 +160,20 @@ def test_create_dataset(self): def test_create_dataset_invalid(self): for d in self.invalid_payloads: - r = self.one_authz_post("/api/datasets", data=json.dumps(d)) + r = self.one_authz_post("/api/datasets", json=d) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_create_dataset_forbidden(self): - r = self.one_no_authz_post("/api/datasets", data=json.dumps(self.valid_payloads[0])) + r = self.one_no_authz_post("/api/datasets", json=self.valid_payloads[0]) self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) def test_dats(self): payload = {**self.dats_valid_payload, 'dats_file': {}} - r = self.one_authz_post('/api/datasets', data=json.dumps(payload)) + r = self.one_authz_post('/api/datasets', json=payload) self.assertEqual(r.status_code, status.HTTP_201_CREATED) - r_invalid = self.one_authz_post("/api/datasets", data=json.dumps(self.dats_invalid_payload)) + r_invalid = self.one_authz_post("/api/datasets", json=self.dats_invalid_payload) self.assertEqual(r_invalid.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(Dataset.objects.count(), 1) @@ -187,7 +196,7 @@ def test_dats(self): def test_dats_as_attachment(self): payload = {**self.dats_valid_payload, 'dats_file': {}} - r = self.one_authz_post('/api/datasets', data=json.dumps(payload)) + r = self.one_authz_post('/api/datasets', json=payload) self.assertEqual(r.status_code, status.HTTP_201_CREATED) dataset_id = Dataset.objects.first().identifier @@ -222,15 +231,15 @@ def test_resources(self): "iri_prefix": "http://purl.obolibrary.org/obo/NCBITaxon_", } - r = self.client.post("/api/resources", data=json.dumps(resource), content_type="application/json") + r = self.one_authz_post("/api/resources", json=resource) self.assertEqual(r.status_code, status.HTTP_201_CREATED) r = self.one_authz_post( "/api/datasets", - data=json.dumps({ + json={ **valid_dataset_1(self.project["identifier"]), "additional_resources": [resource["id"]], - }), + }, ) self.assertEqual(r.status_code, status.HTTP_201_CREATED) @@ -263,14 +272,14 @@ def setUp(self): } def test_update_dataset(self): - r = self.one_authz_put(f"/api/datasets/{self.dataset.identifier}", data=json.dumps(self.valid_update)) + r = self.one_authz_put(f"/api/datasets/{self.dataset.identifier}", json=self.valid_update) self.assertEqual(r.status_code, status.HTTP_200_OK) self.dataset.refresh_from_db() self.assertEqual(self.dataset.title, self.valid_update["title"]) def test_update_dataset_partial(self): r = self.one_authz_patch( - f"/api/datasets/{self.dataset.identifier}", data=json.dumps({"title": self.valid_update["title"]}) + f"/api/datasets/{self.dataset.identifier}", json={"title": self.valid_update["title"]} ) self.assertEqual(r.status_code, status.HTTP_200_OK) self.dataset.refresh_from_db() @@ -279,10 +288,10 @@ def test_update_dataset_partial(self): def test_update_dataset_changed_project(self): r = self.one_authz_put( f"/api/datasets/{self.dataset.identifier}", - data=json.dumps({ + json={ **self.valid_update, "project": str(self.project_2.identifier), - }) + } ) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) res = r.json() @@ -292,7 +301,7 @@ def test_update_dataset_changed_project(self): def test_update_dataset_bad_dats_json(self): r = self.one_authz_put( f"/api/datasets/{self.dataset.identifier}", - data=json.dumps({**self.valid_update, "dats_file": "asdf"}), # asdf is not JSON + json={**self.valid_update, "dats_file": "asdf"}, # asdf is not JSON ) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) res = r.json() @@ -306,11 +315,11 @@ def test_update_dataset_bad_dats_json(self): ) def test_update_dataset_forbidden(self): - r = self.one_no_authz_put(f"/api/datasets/{self.dataset.identifier}", data=json.dumps(self.valid_update)) + r = self.one_no_authz_put(f"/api/datasets/{self.dataset.identifier}", json=self.valid_update) self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) def test_update_dataset_not_found(self): - r = self.one_authz_put(f"/api/datasets/{uuid.uuid4()}", data=json.dumps(self.valid_update)) + r = self.one_authz_put(f"/api/datasets/{uuid.uuid4()}", json=self.valid_update) self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) @@ -344,24 +353,75 @@ def setUp(self) -> None: self.project_json_schema_invalid_payload = valid_project_json_schema(project_id="an-id-that-does-not-exist") def test_create_project_json_schema(self): - r = self.client.post('/api/project_json_schemas', - data=json.dumps(self.project_json_schema_valid_payload), - content_type="application/json") - r_invalid = self.client.post('/api/project_json_schemas', - data=json.dumps(self.project_json_schema_invalid_payload), - content_type="application/json") + r = self.one_authz_post("/api/project_json_schemas", json=self.project_json_schema_valid_payload) + r_invalid = self.one_authz_post("/api/project_json_schemas", json=self.project_json_schema_invalid_payload) self.assertEqual(r.status_code, status.HTTP_201_CREATED) self.assertEqual(r_invalid.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(ProjectJsonSchema.objects.count(), 1) + def test_create_project_json_schema_missing_project(self): + r = self.one_authz_post("/api/project_json_schemas", json=PROJECT_JSON_SCHEMA_MISSING_PROJECT) + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + + def test_create_project_json_schema_forbidden(self): + r = self.one_no_authz_post("/api/project_json_schemas", json=self.project_json_schema_valid_payload) + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + def test_create_constraint(self): - r = self.client.post('/api/project_json_schemas', - data=json.dumps(self.project_json_schema_valid_payload), - content_type="application/json") + r = self.one_authz_post("/api/project_json_schemas", json=self.project_json_schema_valid_payload) self.assertEqual(r.status_code, status.HTTP_201_CREATED) - r_duplicate = self.client.post('/api/project_json_schemas', - data=json.dumps(self.project_json_schema_valid_payload), - content_type="application/json") + r_duplicate = self.one_authz_post("/api/project_json_schemas", json=self.project_json_schema_valid_payload) # used to be an IntegrityError raised; upgrade to DRF 3.15 made this a 400: self.assertEqual(r_duplicate.status_code, status.HTTP_400_BAD_REQUEST) + + +class UpdateProjectJsonSchema(AuthzAPITestCaseWithProjectJSON): + + def setUp(self) -> None: + super().setUp() + + self.pjs = self.one_authz_post( + "/api/project_json_schemas", json=valid_project_json_schema(project_id=self.project["identifier"]) + ).json() + + upd = valid_project_json_schema(project_id=self.project["identifier"], ) + upd["required"] = True + self.upd = upd + + def test_update_project_json_schema(self): + self.assertEqual(ProjectJsonSchema.objects.get(id=self.pjs['id']).required, False) + r = self.one_authz_put(f"/api/project_json_schemas/{self.pjs['id']}", json=self.upd) + self.assertEqual(r.status_code, status.HTTP_200_OK) + self.assertEqual(ProjectJsonSchema.objects.get(id=self.pjs['id']).required, True) + + def test_update_project_json_schema_not_found(self): + # don't need auth + r = self.client.put("/api/project_json_schemas/does-not-exist", json=self.upd) + self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) + + def test_update_project_json_schema_forbidden(self): + r = self.one_no_authz_put(f"/api/project_json_schemas/{self.pjs['id']}", json=self.upd) + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + + +class DeleteProjectJsonSchema(AuthzAPITestCaseWithProjectJSON): + + def setUp(self) -> None: + super().setUp() + + self.pjs = self.one_authz_post( + "/api/project_json_schemas", json=valid_project_json_schema(project_id=self.project["identifier"]) + ).json() + + def test_delete_project_json_schema(self): + r = self.one_authz_delete(f"/api/project_json_schemas/{self.pjs['id']}") + self.assertEqual(r.status_code, status.HTTP_204_NO_CONTENT) + + def test_delete_project_json_schema_not_found(self): + r = self.one_authz_delete("/api/project_json_schemas/does-not-exist") + self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) + + def test_delete_project_json_schema_forbidden(self): + r = self.one_no_authz_delete(f"/api/project_json_schemas/{self.pjs['id']}") + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) diff --git a/chord_metadata_service/chord/tests/test_api_bento_datasets.py b/chord_metadata_service/chord/tests/test_api_bento_datasets.py index 85318ecb0..2f9329d08 100644 --- a/chord_metadata_service/chord/tests/test_api_bento_datasets.py +++ b/chord_metadata_service/chord/tests/test_api_bento_datasets.py @@ -1,4 +1,3 @@ -import json import uuid import re @@ -246,7 +245,7 @@ def test_dataset_update(self): "title": "Updated title" } - r = self.one_authz_put(url, data=json.dumps(payload)) + r = self.one_authz_put(url, json=payload) self.assertEqual(r.status_code, status.HTTP_200_OK) # Check the updated dats file diff --git a/chord_metadata_service/chord/tests/test_api_data_types.py b/chord_metadata_service/chord/tests/test_api_data_types.py index 31799c669..7ed4e8aeb 100644 --- a/chord_metadata_service/chord/tests/test_api_data_types.py +++ b/chord_metadata_service/chord/tests/test_api_data_types.py @@ -5,8 +5,8 @@ from rest_framework import status from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase, PermissionsTestCaseMixin +from chord_metadata_service.discovery.scope import get_discovery_scope from chord_metadata_service.discovery.tests.constants import DISCOVERY_CONFIG_TEST -from chord_metadata_service.discovery.utils import get_discovery_scope from chord_metadata_service.phenopackets.tests.helpers import PhenoTestCase from ..data_types import DATA_TYPE_EXPERIMENT, DATA_TYPE_PHENOPACKET, DATA_TYPES diff --git a/chord_metadata_service/chord/tests/test_api_export.py b/chord_metadata_service/chord/tests/test_api_export.py index ab8815933..a66ada50e 100644 --- a/chord_metadata_service/chord/tests/test_api_export.py +++ b/chord_metadata_service/chord/tests/test_api_export.py @@ -1,14 +1,13 @@ -import json import os import shutil import tempfile from django.urls import reverse -from chord_metadata_service.chord.export.cbioportal import CBIO_FILES_SET -from chord_metadata_service.chord.export.utils import EXPORT_DIR from rest_framework import status -from rest_framework.test import APITestCase +from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase +from chord_metadata_service.chord.export.cbioportal import CBIO_FILES_SET +from chord_metadata_service.chord.export.utils import EXPORT_DIR from chord_metadata_service.chord.models import Project, Dataset from chord_metadata_service.chord.ingest import WORKFLOW_INGEST_FUNCTION_MAP from chord_metadata_service.chord.workflows.metadata import WORKFLOW_PHENOPACKETS_JSON @@ -17,7 +16,7 @@ from .example_ingest import EXAMPLE_INGEST_PHENOPACKET -class ExportTest(APITestCase): +class ExportTest(AuthzAPITestCase): def setUp(self) -> None: # Creates a test database and populate with a phenopacket test file @@ -28,28 +27,28 @@ def setUp(self) -> None: self.p = WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_PHENOPACKETS_JSON](EXAMPLE_INGEST_PHENOPACKET, self.d.identifier) - def test_export_cbio(self): + self.base_export_payload = { + "format": "cbioportal", + "object_type": "dataset", + "object_id": self.study_id, + } + + def test_export_cbio_no_body(self): # Test with no export body - r = self.client.post(reverse("export"), content_type="application/json") + r = self.one_authz_post(reverse("export"), content_type="application/json") self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) - try: - tmp_dir = tempfile.mkdtemp() - - export_payload = { - "format": "cbioportal", - "object_type": "dataset", - "object_id": self.study_id, - } - - # Test with no output_path: expect a tar archive to be returned - r = self.client.post(reverse("export"), data=json.dumps(export_payload), content_type="application/json") - self.assertEqual(r.get('Content-Disposition'), f"attachment; filename=\"{self.study_id}.tar.gz\"") + def test_export_cbio_no_path(self): + # Test with no output_path: expect a tar archive to be returned + r = self.one_authz_post(reverse("export"), json=self.base_export_payload) + self.assertEqual(r.get('Content-Disposition'), f"attachment; filename=\"{self.study_id}.tar.gz\"") + # TODO: More + def test_export_cbio_with_path(self): + tmp_dir = tempfile.mkdtemp() + try: # Test with output_path provided: expect files created in this directory - export_payload["output_path"] = tmp_dir - - r = self.client.post(reverse("export"), data=json.dumps(export_payload), content_type="application/json") + r = self.one_authz_post(reverse("export"), json={**self.base_export_payload, "output_path": tmp_dir}) self.assertEqual(r.status_code, status.HTTP_204_NO_CONTENT) # TODO: just write within the directory that has been provided export_path = os.path.join(tmp_dir, EXPORT_DIR, self.study_id) @@ -61,3 +60,7 @@ def test_export_cbio(self): shutil.rmtree(tmp_dir) # TODO: More + + def test_export_cbio_forbidden(self): + r = self.one_no_authz_post(reverse("export"), json=self.base_export_payload) + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) diff --git a/chord_metadata_service/chord/tests/test_api_ingest.py b/chord_metadata_service/chord/tests/test_api_ingest.py index 3e9ca3c24..5e3a9fe93 100644 --- a/chord_metadata_service/chord/tests/test_api_ingest.py +++ b/chord_metadata_service/chord/tests/test_api_ingest.py @@ -1,5 +1,3 @@ -import json - from django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase @@ -61,29 +59,27 @@ def test_workflow_404(self): class APITestCaseWithDataset(AuthzAPITestCaseWithProjectJSON): def setUp(self) -> None: super().setUp() - r = self.one_authz_post("/api/datasets", data=json.dumps(valid_dataset_1(self.project["identifier"]))) + r = self.one_authz_post("/api/datasets", json=valid_dataset_1(self.project["identifier"])) self.dataset = r.json() self.dataset_id = self.dataset["identifier"] class IngestTest(APITestCaseWithDataset): - def test_phenopackets_ingest(self): + def test_phenopackets_ingest_400s(self): # Invalid workflow ID - r = self.client.post( + r = self.one_authz_post( reverse("ingest-into-dataset", args=(self.dataset_id, "phenopackets_json_invalid")), - content_type="application/json", ) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) # No ingestion body - r = self.client.post( + r = self.one_authz_post( reverse("ingest-into-dataset", args=(self.dataset_id, WORKFLOW_PHENOPACKETS_JSON)), - content_type="application/json", ) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) # Bad ingestion body JSON - JSON parse error 400 - r = self.client.post( + r = self.one_authz_post( reverse("ingest-into-dataset", args=(self.dataset_id, WORKFLOW_PHENOPACKETS_JSON)), content_type="application/json", data="{}}", # noqa: W605 @@ -92,31 +88,45 @@ def test_phenopackets_ingest(self): # Invalid phenopacket JSON validation invalid_phenopacket = load_local_json("example_invalid_phenopacket.json") - r = self.client.post( + r = self.one_authz_post( reverse("ingest-into-dataset", args=(self.dataset_id, WORKFLOW_PHENOPACKETS_JSON)), - content_type="application/json", - data=json.dumps(invalid_phenopacket), + json=invalid_phenopacket, ) self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + def test_phenopackets_ingest_valid(self): # Success - valid_phenopacket = load_local_json("example_phenopacket_v2.json") - r = self.client.post( + r = self.one_authz_post( reverse("ingest-into-dataset", args=(self.dataset_id, WORKFLOW_PHENOPACKETS_JSON)), - content_type="application/json", - data=json.dumps(valid_phenopacket), + json=load_local_json("example_phenopacket_v2.json"), ) self.assertEqual(r.status_code, status.HTTP_204_NO_CONTENT) + def test_phenopackets_ingest_forbidden(self): + # Forbidden + r = self.one_no_authz_post( + reverse("ingest-into-dataset", args=(self.dataset_id, WORKFLOW_PHENOPACKETS_JSON)), + json=load_local_json("example_phenopacket_v2.json"), + ) + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + class IngestDerivedExperimentResultsTest(APITestCaseWithDataset): def test_ingest_derived_experiment_results(self): # ingest list of experiments WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_PHENOPACKETS_JSON](EXAMPLE_INGEST_PHENOPACKET, self.dataset_id) WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_EXPERIMENTS_JSON](EXAMPLE_INGEST_EXPERIMENT, self.dataset_id) + # ingest list of experiment results - self.client.post( + r = self.one_authz_post( reverse("ingest-derived-experiment-results", args=(self.dataset_id,)), - content_type="application/json", - data=json.dumps(EXAMPLE_INGEST_EXPERIMENT_RESULT), + json=EXAMPLE_INGEST_EXPERIMENT_RESULT, + ) + self.assertEqual(r.status_code, status.HTTP_204_NO_CONTENT) + + # forbidden + r = self.one_no_authz_post( + reverse("ingest-derived-experiment-results", args=(self.dataset_id,)), + json=EXAMPLE_INGEST_EXPERIMENT_RESULT, ) + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) diff --git a/chord_metadata_service/chord/tests/test_api_search.py b/chord_metadata_service/chord/tests/test_api_search.py index 7a612baba..0c6de47da 100644 --- a/chord_metadata_service/chord/tests/test_api_search.py +++ b/chord_metadata_service/chord/tests/test_api_search.py @@ -1,9 +1,10 @@ import json +import uuid from django.urls import reverse from rest_framework import status -from rest_framework.test import APITestCase +from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase from chord_metadata_service.patients.models import Individual from chord_metadata_service.phenopackets.models import Biosample, MetaData, Phenopacket, PhenotypicFeature from chord_metadata_service.experiments.models import Experiment, ExperimentResult, Instrument @@ -40,8 +41,19 @@ POST_GET = ("POST", "GET") +SQ1_DATA = { + "data_type": DATA_TYPE_PHENOPACKET, + "query": TEST_SEARCH_QUERY_1 +} -class SearchTest(APITestCase): +# Valid query to search for phenotypic feature type +SQ3_DATA = { + "query": TEST_SEARCH_QUERY_3, + "data_type": DATA_TYPE_PHENOPACKET, +} + + +class SearchTest(AuthzAPITestCase): def setUp(self) -> None: self.project = Project.objects.create(**VALID_PROJECT_1) self.dataset = Dataset.objects.create(**valid_dataset_1(self.project)) @@ -82,7 +94,7 @@ def setUp(self) -> None: biosample=self.biosample_1, instrument=self.instrument, dataset=self.dataset)) self.experiment.experiment_results.set([self.experiment_result]) - def _search_call(self, endpoint, args=None, data=None, method="GET"): + def _search_call(self, endpoint, args=None, data=None, method="GET", authz: bool = True): args = args or [] if method == "POST": @@ -93,135 +105,124 @@ def _search_call(self, endpoint, args=None, data=None, method="GET"): "query": json.dumps(data["query"]), } - return (self.client.post if method == "POST" else self.client.get)( - reverse(endpoint, args=args), - data=data, - **({"content_type": "application/json"} if method == "POST" else {})) + if authz: + fn = (self.one_authz_post if method == "POST" else self.one_authz_get) + else: + fn = (self.one_no_authz_post if method == "POST" else self.one_no_authz_get) + + return fn(reverse(endpoint, args=args), data=data) def test_common_search_1(self): # No body for method in POST_GET: - r = self._search_call("private-search", method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + with self.subTest(params=(method,)): + r = self._search_call("private-search", method=method) + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_common_search_2(self): # No data type for method in POST_GET: - r = self._search_call("private-search", data={"query": TEST_SEARCH_QUERY_1}, method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + with self.subTest(params=(method,)): + r = self._search_call("private-search", data={"query": TEST_SEARCH_QUERY_1}, method=method) + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_common_search_3(self): # No query for method in POST_GET: - r = self._search_call("private-search", data={"data_type": DATA_TYPE_PHENOPACKET}, method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + with self.subTest(params=(method,)): + r = self._search_call("private-search", data={"data_type": DATA_TYPE_PHENOPACKET}, method=method) + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_common_search_4(self): # Bad data type for method in POST_GET: - r = self._search_call("private-search", data={ - "data_type": "bad_data_type", - "query": TEST_SEARCH_QUERY_1, - }, method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + with self.subTest(params=(method,)): + r = self._search_call("private-search", data={ + "data_type": "bad_data_type", + "query": TEST_SEARCH_QUERY_1, + }, method=method) + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_common_search_5(self): # Bad syntax for query for method in POST_GET: - r = self._search_call("private-search", data={ - "data_type": DATA_TYPE_PHENOPACKET, - "query": ["hello", "world"] - }, method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + with self.subTest(params=(method,)): + r = self._search_call("private-search", data={ + "data_type": DATA_TYPE_PHENOPACKET, + "query": ["hello", "world"] + }, method=method) + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_search_without_result(self): # Valid search without result for method in POST_GET: - r = self._search_call("private-search", data={ - "data_type": DATA_TYPE_PHENOPACKET, - "query": TEST_SEARCH_QUERY_2 - }, method=method) - self.assertEqual(r.status_code, status.HTTP_200_OK) - c = r.json() - self.assertEqual(len(c["results"]), 0) + with self.subTest(params=(method,)): + r = self._search_call("private-search", data={ + "data_type": DATA_TYPE_PHENOPACKET, + "query": TEST_SEARCH_QUERY_2 + }, method=method) + self.assertEqual(r.status_code, status.HTTP_200_OK) + c = r.json() + self.assertEqual(len(c["results"]), 0) def test_private_search(self): # Valid search with result for method in POST_GET: - r = self._search_call("private-search", data={ - "data_type": DATA_TYPE_PHENOPACKET, - "query": TEST_SEARCH_QUERY_1 - }, method=method) - self.assertEqual(r.status_code, status.HTTP_200_OK) - c = r.json() + with self.subTest(params=(method,)): + r = self._search_call("private-search", data=SQ1_DATA, method=method) + self.assertEqual(r.status_code, status.HTTP_200_OK) + c = r.json() - self.assertIn(str(self.dataset.identifier), c["results"]) - self.assertEqual(c["results"][str(self.dataset.identifier)]["data_type"], DATA_TYPE_PHENOPACKET) - self.assertEqual(self.phenopacket.id, c["results"][str(self.dataset.identifier)]["matches"][0]["id"]) + self.assertIn(str(self.dataset.identifier), c["results"]) + self.assertEqual(c["results"][str(self.dataset.identifier)]["data_type"], DATA_TYPE_PHENOPACKET) + self.assertEqual(self.phenopacket.id, c["results"][str(self.dataset.identifier)]["matches"][0]["id"]) # TODO: Check schema? - def test_dataset_search_1(self): - # No body - for method in POST_GET: - r = self._search_call("public-dataset-search", args=[str(self.dataset.identifier)], method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) - - def test_private_dataset_search_2(self): - # No query - for method in POST_GET: - r = self._search_call("public-dataset-search", args=[str(self.dataset.identifier)], data={}, method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) - - def test_private_dataset_search_3(self): - # Bad syntax for query - d = {"query": ["hello", "world"]} + def test_private_search_forbidden(self): for method in POST_GET: - r = self._search_call("public-dataset-search", args=[str(self.dataset.identifier)], data=d, method=method) - self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) - - def test_private_dataset_search_4(self): - # Valid query with one result - - d = { - "data_type": DATA_TYPE_PHENOPACKET, - "query": TEST_SEARCH_QUERY_1, - } - + with self.subTest(params=(method,)): + r = self._search_call("private-search", data=SQ1_DATA, method=method, authz=False) + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + + def test_private_dataset_search_basic(self): + param_set = [ + ({}, status.HTTP_400_BAD_REQUEST), # No query + ({"query": ["hello", "world"]}, status.HTTP_400_BAD_REQUEST), # Bad syntax for query + (SQ1_DATA, status.HTTP_200_OK), # Valid query with one result + ({"query": True, "data_type": DATA_TYPE_PHENOPACKET}, status.HTTP_200_OK), # Valid query with one result + ] + args = [str(self.dataset.identifier)] + + for params in param_set: + for method in POST_GET: + with self.subTest(params=(*params, method)): + r = self._search_call("private-dataset-search", args=args, data=params[0], method=method) + self.assertEqual(r.status_code, params[1]) + if params[1] == status.HTTP_200_OK: + c = r.json() + self.assertEqual(len(c["results"]), 1) + self.assertEqual(self.phenopacket.id, c["results"][0]["id"]) + + r_forbidden = self._search_call( + "private-dataset-search", args=args, data=params[0], method=method, authz=False) + self.assertEqual(r_forbidden.status_code, status.HTTP_403_FORBIDDEN) + + def test_private_dataset_search_not_found(self): for method in POST_GET: - r = self._search_call("public-dataset-search", args=[str(self.dataset.identifier)], data=d, method=method) - self.assertEqual(r.status_code, status.HTTP_200_OK) - c = r.json() - self.assertEqual(c, True) + with self.subTest(params=(method,)): + r = self._search_call("private-dataset-search", args=["does-not-exist"], data=SQ3_DATA, method=method) + self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) - r = self._search_call("private-dataset-search", args=[str(self.dataset.identifier)], data=d, method=method) - self.assertEqual(r.status_code, status.HTTP_200_OK) - c = r.json() - self.assertEqual(len(c["results"]), 1) - self.assertEqual(self.phenopacket.id, c["results"][0]["id"]) - - def test_private_search_5(self): - d = { - "query": True, - "data_type": DATA_TYPE_PHENOPACKET - } - for method in POST_GET: - r = self._search_call("private-dataset-search", args=[str(self.dataset.identifier)], data=d, method=method) - self.assertEqual(r.status_code, status.HTTP_200_OK) - c = r.json() - self.assertEqual(len(c["results"]), 1) - self.assertEqual(self.phenopacket.id, c["results"][0]["id"]) + r = self._search_call("private-dataset-search", args=[str(uuid.uuid4())], data=SQ3_DATA, method=method) + self.assertEqual(r.status_code, status.HTTP_404_NOT_FOUND) def test_private_dataset_search_6(self): # Valid query to search for phenotypic feature type - d = { - "query": TEST_SEARCH_QUERY_3, - "data_type": DATA_TYPE_PHENOPACKET, - } - for method in POST_GET: - r = self._search_call("private-dataset-search", args=[str(self.dataset.identifier)], data=d, method=method) + r = self._search_call( + "private-dataset-search", args=[str(self.dataset.identifier)], data=SQ3_DATA, method=method) self.assertEqual(r.status_code, status.HTTP_200_OK) c = r.json() self.assertEqual(len(c["results"]), 1) diff --git a/chord_metadata_service/chord/tests/test_export_cbio.py b/chord_metadata_service/chord/tests/test_export_cbio.py index e4f01f0e3..cbcccc62a 100644 --- a/chord_metadata_service/chord/tests/test_export_cbio.py +++ b/chord_metadata_service/chord/tests/test_export_cbio.py @@ -2,6 +2,7 @@ from typing import TextIO from os import walk, path +from asgiref.sync import async_to_sync from django.db.models import F from django.test import TestCase @@ -74,7 +75,7 @@ def test_file_creation(self): """ with ExportFileContext(None, self.study_id) as file_export: - exp.study_export(file_export.get_path, self.study_id) + async_to_sync(exp.study_export)(file_export.get_path, self.study_id) export_dir = file_export.get_path() self.assertTrue(path.exists(export_dir)) @@ -118,7 +119,7 @@ def test_export_cbio_patient_meta(self): def test_export_cbio_patient_data(self): indiv = Individual.objects.filter(phenopackets=self.p) with io.StringIO() as output: - exp.individual_export(indiv, output) + async_to_sync(exp.individual_export)(indiv, output) # Check header output.seek(0) field_count = None @@ -156,7 +157,7 @@ def test_export_cbio_sample_data(self): samples = pm.Biosample.objects.filter(phenopacket=self.p) with io.StringIO() as output: - exp.sample_export(samples, output) + async_to_sync(exp.sample_export)(samples, output) # Check header output.seek(0) field_count = None diff --git a/chord_metadata_service/chord/urls.py b/chord_metadata_service/chord/urls.py index 3e3d4e17a..bfcb7d7df 100644 --- a/chord_metadata_service/chord/urls.py +++ b/chord_metadata_service/chord/urls.py @@ -36,7 +36,6 @@ path('datasets//data-types', views_data_types.dataset_data_type_summary, name="chord-dataset-data-type-summary"), - path('datasets//search', views_search.public_dataset_search, name="public-dataset-search"), path('private/datasets//search', views_search.private_dataset_search, name="private-dataset-search"), ] diff --git a/chord_metadata_service/chord/views_data_types.py b/chord_metadata_service/chord/views_data_types.py index 7a43bb6f8..57fc9a22c 100644 --- a/chord_metadata_service/chord/views_data_types.py +++ b/chord_metadata_service/chord/views_data_types.py @@ -20,11 +20,8 @@ from chord_metadata_service.cleanup import run_all_cleanup from chord_metadata_service.discovery.censorship import thresholded_count from chord_metadata_service.discovery.exceptions import DiscoveryScopeException -from chord_metadata_service.discovery.utils import ( - get_discovery_data_type_permissions, - ValidatedDiscoveryScope, - get_request_discovery_scope, -) +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope, get_request_discovery_scope +from chord_metadata_service.discovery.utils import get_discovery_data_type_permissions from chord_metadata_service.experiments.models import Experiment from chord_metadata_service.logger import logger from chord_metadata_service.phenopackets.models import Phenopacket diff --git a/chord_metadata_service/chord/views_search.py b/chord_metadata_service/chord/views_search.py index 1bdc083ec..d3f89b815 100644 --- a/chord_metadata_service/chord/views_search.py +++ b/chord_metadata_service/chord/views_search.py @@ -4,18 +4,18 @@ import logging from adrf.decorators import api_view as async_api_view +from asgiref.sync import sync_to_async +from bento_lib.auth.permissions import P_QUERY_DATA from bento_lib.responses import errors from bento_lib.search import build_search_response, postgres - from datetime import datetime from django.db import connection -from django.db.models import Count, F, Q +from django.db.models import Count, F, Q, QuerySet from django.db.models.functions import Coalesce from django.contrib.postgres.aggregates import ArrayAgg from django.core.exceptions import ValidationError from psycopg2 import sql -from rest_framework.decorators import api_view, permission_classes -from rest_framework.permissions import AllowAny +from rest_framework.decorators import permission_classes from rest_framework.request import Request as DrfRequest from rest_framework.response import Response from rest_framework import status @@ -23,9 +23,10 @@ from typing import Callable from chord_metadata_service.authz.helpers import get_data_type_query_permissions -from chord_metadata_service.authz.permissions import BentoAllowAny, OverrideOrSuperUserOnly, ReadOnly +from chord_metadata_service.authz.middleware import authz_middleware +from chord_metadata_service.authz.permissions import BentoAllowAny, BentoDeferToHandler -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope, get_request_discovery_scope from chord_metadata_service.experiments.api_views import EXPERIMENT_SELECT_REL, EXPERIMENT_PREFETCH from chord_metadata_service.experiments.models import Experiment @@ -64,7 +65,7 @@ def get_field_lookup(field: list[str]) -> str: return "__".join(f for f in field if f != "[item]") -def get_values_list(queryset, options): +def get_values_list(queryset: QuerySet, options): field_lookup = get_field_lookup(options.get("field", [])) # Filter out null values because these values will be used to make joins, @@ -89,23 +90,22 @@ def data_type_results(query: sql.SQL, params, key="id"): return set(dict(zip([col[0] for col in cursor.description], row))[key] for row in cursor.fetchall()) -def experiment_query_results(query, params, options=None): +async def experiment_query_results(scope: ValidatedDiscoveryScope, query, params, options=None): # TODO: possibly a quite inefficient way of doing things... # TODO: Prefetch related biosample or no? - queryset = Experiment.objects\ - .filter(id__in=data_type_results(query, params, "id")) + queryset = Experiment.get_model_scoped_queryset(scope).filter( + id__in=await sync_to_async(data_type_results)(query, params, "id")) output_format = options.get("output") if options else None if output_format == OUTPUT_FORMAT_VALUES_LIST: return get_values_list(queryset, options) - return queryset.select_related(*EXPERIMENT_SELECT_REL) \ - .prefetch_related(*EXPERIMENT_PREFETCH) + return queryset.select_related(*EXPERIMENT_SELECT_REL).prefetch_related(*EXPERIMENT_PREFETCH) -def phenopacket_query_results(query, params, options=None): - queryset = Phenopacket.objects \ - .filter(id__in=data_type_results(query, params, "id")) +async def phenopacket_query_results(scope: ValidatedDiscoveryScope, query, params, options=None): + queryset = Phenopacket.get_model_scoped_queryset(scope).filter( + id__in=await sync_to_async(data_type_results)(query, params, "id")) output_format = options.get("output") if options else None if output_format == OUTPUT_FORMAT_VALUES_LIST: @@ -125,20 +125,19 @@ def phenopacket_query_results(query, params, options=None): ) # Get the biosamples with experiments data - phenopacket_ids = [result['subject_id'] for result in results] - biosamples_experiments_details = get_biosamples_with_experiment_details(phenopacket_ids) + subject_ids = [result['subject_id'] async for result in results] + biosamples_experiments_details = get_biosamples_with_experiment_details(subject_ids) # Group the experiments with biosamples by subject_id - experiments_with_biosamples = build_experiments_by_subject(biosamples_experiments_details) + experiments_with_biosamples = await sync_to_async(build_experiments_by_subject)(biosamples_experiments_details) # Add the experiments_with_biosamples data to the results - for result in results: - result["experiments_with_biosamples"] = experiments_with_biosamples[result['subject_id']] + async for result in results: + result["experiments_with_biosamples"] = experiments_with_biosamples[result["subject_id"]] return results else: - return queryset.select_related(*PHENOPACKET_SELECT_REL) \ - .prefetch_related(*PHENOPACKET_PREFETCH) + return queryset.select_related(*PHENOPACKET_SELECT_REL).prefetch_related(*PHENOPACKET_PREFETCH) QUERY_RESULTS_FN: dict[str, Callable] = { @@ -152,7 +151,30 @@ def phenopacket_query_results(query, params, options=None): } -def search(request): +def _search_response(data_type, serializer_class, queryset: QuerySet, start): + return Response( + build_search_response({ + dataset_id: { + "data_type": data_type, + "matches": list(serializer_class(p).data for p in dataset_objects) + } for dataset_id, dataset_objects in itertools.groupby( + queryset if queryset is not None else [], + key=lambda o: str(o.dataset_id) # object here + ) + }, start) + ) + + +async def _async_group_by_dataset_id(queryset: QuerySet) -> itertools.groupby: + # Queryset is in an async context, so it becomes an async iterator. We need to convert it to a "normal" + # iterable object for itertools.groupby. + return itertools.groupby( + [r async for r in queryset], + key=lambda d: str(d["dataset_id"]) + ) + + +async def search(request: DrfRequest): """ Generic function that takes a request object containing the following parameters: - query: a Bento specific string representation of a query. e.g. @@ -161,8 +183,12 @@ def search(request): This function returns matches grouped by their "owning" datasets. The request can be made using POST or GET methods. """ + + scope = await get_request_discovery_scope(request) + search_params, err = get_chord_search_parameters(request) if err: + authz_middleware.mark_authz_done(request) return bad_request_response(err) if (search_params["output"] == OUTPUT_FORMAT_VALUES_LIST @@ -174,23 +200,26 @@ def search(request): compiled_query = search_params["compiled_query"] query_params = search_params["params"] + res = await authz_middleware.async_evaluate_one( + request, scope.as_authz_resource(data_type), P_QUERY_DATA, mark_authz_done=True + ) + if not res: + return Response(errors.forbidden_error("Forbidden"), status=status.HTTP_403_FORBIDDEN) + serializer_class = QUERY_RESULT_SERIALIZERS[data_type] query_function = QUERY_RESULTS_FN[data_type] - queryset = query_function(compiled_query, query_params, search_params) + queryset = await query_function(scope, compiled_query, query_params, search_params) if search_params["output"] == OUTPUT_FORMAT_VALUES_LIST: result = { dataset_id: { "data_type": data_type, "matches": [p["value"] for p in dataset_dicts] - } for dataset_id, dataset_dicts in itertools.groupby( - queryset, - key=lambda d: str(d["dataset_id"]) # dict here - ) + } for dataset_id, dataset_dicts in await _async_group_by_dataset_id(queryset) } return Response(build_search_response(result, start)) - if search_params["output"] == OUTPUT_FORMAT_BENTO_SEARCH_RESULT: + elif search_params["output"] == OUTPUT_FORMAT_BENTO_SEARCH_RESULT: # The queryset for the bento_search_result output is based on the # usage of Django ORM `values()` to restrict its content to specific fields. # This result in a slight change of the queryset iterable where @@ -202,29 +231,16 @@ def search(request): {key: value for key, value in p.items() if key != "dataset_id"} for p in dataset_dicts ] - } for dataset_id, dataset_dicts in itertools.groupby( - queryset, - key=lambda d: str(d["dataset_id"]) # dict here - ) + } for dataset_id, dataset_dicts in await _async_group_by_dataset_id(queryset) } return Response(build_search_response(result, start)) - return Response(build_search_response({ - dataset_id: { - "data_type": data_type, - "matches": list(serializer_class(p).data for p in dataset_objects) - } for dataset_id, dataset_objects in itertools.groupby( - queryset if queryset is not None else [], - key=lambda o: str(o.dataset_id) # object here - ) - }, start)) + return await sync_to_async(_search_response)(data_type, serializer_class, queryset, start) -# Mounted on /private/, so will get protected anyway; this allows for access from federation service -# TODO: Ugly and misleading permissions -@api_view(["GET", "POST"]) -@permission_classes([AllowAny]) -def chord_private_search(request): +@async_api_view(["GET", "POST"]) +@permission_classes([BentoDeferToHandler]) +async def chord_private_search(request: DrfRequest): """ Free-form search using Bento specific syntax. Results are grouped by table of origin. @@ -250,31 +266,7 @@ def chord_private_search(request): response. """ # Private search endpoints are protected by URL namespace, not by Django permissions. - return search(request) - - -def phenopacket_filter_results(subject_ids, disease_ids, biosample_ids, - phenotypicfeature_ids, phenopacket_ids): - query = Phenopacket.objects.get_queryset() - - if subject_ids: - query = query.filter(subject__id__in=subject_ids) - - if disease_ids: - query = query.filter(diseases__id__in=disease_ids) - - if biosample_ids: - query = query.filter(biosamples__id__in=biosample_ids) - - if phenotypicfeature_ids: - query = query.filter(phenotypic_features__id__in=phenotypicfeature_ids) - - if phenopacket_ids: - query = query.filter(id__in=phenopacket_ids) - - res = query.prefetch_related(*PHENOPACKET_PREFETCH) - - return res + return await search(request) def get_chord_search_parameters(request, data_type=None): @@ -299,6 +291,7 @@ def get_chord_search_parameters(request, data_type=None): - field: optional parameter, set when output is "values_list" } """ + query_params = request.query_params if request.method == "GET" else (request.data or {}) data_type = query_params.get("data_type") or data_type @@ -313,7 +306,6 @@ def get_chord_search_parameters(request, data_type=None): return None, "Missing query in request body" if request.method == "GET": # Query passed as a JSON in the URL: must be decoded. - # print(request.query_params) try: query = json.loads(query) except json.decoder.JSONDecodeError: @@ -342,10 +334,13 @@ def get_chord_search_parameters(request, data_type=None): }, None -def chord_dataset_search( - search_params, - dataset_id, start, - internal=False) -> tuple[bool | list | None, str | None]: +def _serialize_many(serializer_class, queryset): + return serializer_class(queryset, many=True).data + + +async def chord_dataset_search( + scope: ValidatedDiscoveryScope, search_params, start +) -> tuple[bool | list | None, str | None]: """ Performs a search based on a psycopg2 object and paramaters and restricted to a given table. @@ -354,50 +349,59 @@ def chord_dataset_search( serializer_class = QUERY_RESULT_SERIALIZERS[data_type] query_function = QUERY_RESULTS_FN[data_type] - queryset = query_function( + queryset = await query_function( + scope, query=sql.SQL("{} AND dataset_id = {}").format(search_params["compiled_query"], sql.Placeholder()), - params=search_params["params"] + (dataset_id,), + params=search_params["params"] + (scope.dataset_id,), options=search_params ) - if not internal: - return queryset.exists(), None # True if at least one match if search_params["output"] == OUTPUT_FORMAT_VALUES_LIST: - return list(queryset), None + return [v async for v in queryset], None if search_params["output"] == OUTPUT_FORMAT_BENTO_SEARCH_RESULT: - return list(queryset), None + return [v async for v in queryset], None debug_log(f"Started fetching from queryset and serializing data at {datetime.now() - start}") - serialized_data = serializer_class(queryset, many=True).data + serialized_data = await sync_to_async(_serialize_many)(serializer_class, queryset) debug_log(f"Finished running query and serializing in {datetime.now() - start}") return serialized_data, None -def dataset_search(request: DrfRequest, dataset_id: str, internal=False): +@async_api_view(["GET", "POST"]) +@permission_classes([BentoDeferToHandler]) +async def private_dataset_search(request: DrfRequest, dataset_id: str): + try: + dataset = await Dataset.objects.aget(identifier=dataset_id) + except (Dataset.DoesNotExist, ValidationError) as e: + authz_middleware.mark_authz_done(request) + return Response(errors.not_found_error(str(e)), status=status.HTTP_404_NOT_FOUND) + + project = await Project.objects.aget(identifier=dataset.project_id) + + # don't use request scope - the project/dataset are validated by the aget calls above and fixed + scope = ValidatedDiscoveryScope(project, dataset) + + # TODO: narrow based on queried data types + if not await authz_middleware.async_evaluate_one( + request, scope.as_authz_resource(), P_QUERY_DATA, mark_authz_done=True + ): + authz_middleware.mark_authz_done(request) + return Response(errors.forbidden_error("Forbidden"), status=status.HTTP_403_FORBIDDEN) + + # perform search: -------------------------------------------------------------------------------------------------- + start = datetime.now() search_params, err = get_chord_search_parameters(request=request) if err: return bad_request_response(err) - data, err = chord_dataset_search(search_params, dataset_id, start, internal) + data, err = await chord_dataset_search(scope, search_params, start) if err: return bad_request_response(err) - return Response(build_search_response(data, start) if internal else data) - - -@api_view(["GET", "POST"]) -@permission_classes([OverrideOrSuperUserOnly | ReadOnly]) -def public_dataset_search(request: DrfRequest, dataset_id: str): - return dataset_search(request=request, dataset_id=dataset_id) - - -@api_view(["GET", "POST"]) -@permission_classes([OverrideOrSuperUserOnly | ReadOnly]) -def private_dataset_search(request: DrfRequest, dataset_id: str): - return dataset_search(request=request, dataset_id=dataset_id, internal=True) + return Response(build_search_response(data, start)) DATASET_DATA_TYPE_SUMMARY_FUNCTIONS = { diff --git a/chord_metadata_service/discovery/api_views.py b/chord_metadata_service/discovery/api_views.py index e06d635ec..7f8dc6501 100644 --- a/chord_metadata_service/discovery/api_views.py +++ b/chord_metadata_service/discovery/api_views.py @@ -9,18 +9,20 @@ from rest_framework.decorators import permission_classes from rest_framework.request import Request as DrfRequest from rest_framework.response import Response +from typing import Type from chord_metadata_service.authz.permissions import BentoAllowAny from chord_metadata_service.chord import data_types as dts -from chord_metadata_service.discovery.exceptions import DiscoveryScopeException -from chord_metadata_service.discovery.utils import get_request_discovery_scope, get_public_model_scoped_queryset from chord_metadata_service.logger import logger -from .fields import get_field_options, get_range_stats, get_categorical_stats, get_date_stats -from .model_lookups import PUBLIC_MODEL_NAMES_TO_DATA_TYPE, PUBLIC_MODEL_NAMES_TO_MODEL, PublicModelName from . import responses as dres from .censorship import get_rules +from .exceptions import DiscoveryScopeException +from .fields import get_field_options, get_range_stats, get_categorical_stats, get_date_stats +from .model_lookups import PUBLIC_MODEL_NAMES_TO_DATA_TYPE, PUBLIC_MODEL_NAMES_TO_MODEL, PublicModelName from .schemas import DISCOVERY_SCHEMA +from .scope import get_request_discovery_scope +from .scopeable_model import BaseScopeableModel from .types import BinWithValue from .utils import get_discovery_data_type_permissions, get_discovery_field_set_permissions @@ -131,11 +133,14 @@ async def public_overview(request: DrfRequest): if not any(d["counts"] for d in dt_permissions.values()): return Response(dres.INSUFFICIENT_PRIVILEGES, status=status.HTTP_403_FORBIDDEN) - async def _counts_for_scoped_model_name(mn: PublicModelName) -> tuple[PublicModelName, int]: - return mn, await get_public_model_scoped_queryset(discovery_scope, mn).acount() + async def _counts_for_scoped_model_name( + m: tuple[PublicModelName, Type[BaseScopeableModel]] + ) -> tuple[PublicModelName, int]: + mn, model = m + return mn, await model.get_model_scoped_queryset(discovery_scope).acount() # Predefined counts - counts = dict(await asyncio.gather(*map(_counts_for_scoped_model_name, PUBLIC_MODEL_NAMES_TO_MODEL))) + counts = dict(await asyncio.gather(*map(_counts_for_scoped_model_name, PUBLIC_MODEL_NAMES_TO_MODEL.items()))) # Set counts to 0 if they're under the count threshold and the threshold is positive. for public_model_name in counts: diff --git a/chord_metadata_service/discovery/exceptions.py b/chord_metadata_service/discovery/exceptions.py index c5c75bc3d..a60a74bac 100644 --- a/chord_metadata_service/discovery/exceptions.py +++ b/chord_metadata_service/discovery/exceptions.py @@ -2,8 +2,14 @@ "DiscoveryScopeException", ] +from rest_framework import status +from rest_framework.exceptions import APIException -class DiscoveryScopeException(Exception): + +class DiscoveryScopeException(APIException): + status_code = status.HTTP_400_BAD_REQUEST + default_detail = "Error validating discovery scope (does not exist)" + default_code = "bad_request" def __init__(self, dataset_id: str | None = None, project_id: str | None = None, *args) -> None: self.dataset_id = dataset_id @@ -18,4 +24,4 @@ def __init__(self, dataset_id: str | None = None, project_id: str | None = None, message = message.format("project", project_id) self.message = {"message": message} - super().__init__(*args) + super().__init__(*args, detail=message) diff --git a/chord_metadata_service/discovery/fields.py b/chord_metadata_service/discovery/fields.py index 660cf5eee..416e635b7 100644 --- a/chord_metadata_service/discovery/fields.py +++ b/chord_metadata_service/discovery/fields.py @@ -6,13 +6,13 @@ from django.db.models.functions import Cast from typing import Any, Mapping -from .utils import ValidatedDiscoveryScope -from ..authz.types import DataPermissionsDict -from ..logger import logger +from chord_metadata_service.authz.types import DataPermissionsDict +from chord_metadata_service.logger import logger from . import fields_utils as f_utils from .censorship import get_threshold, thresholded_count -from .stats import stats_for_field, get_scoped_queryset +from .scope import ValidatedDiscoveryScope +from .stats import stats_for_field from .types import BinWithValue, DiscoveryConfig, DiscoveryFieldProps LENGTH_Y_M = 4 + 1 + 2 # dates stored as yyyy-mm-dd @@ -210,7 +210,7 @@ async def get_range_stats( ] query_set = ( - get_scoped_queryset(model, scope) + model.get_model_scoped_queryset(scope) .values(label=Case(*whens, default=Value("missing"), output_field=CharField())) .annotate(total=Count("label")) ) @@ -306,7 +306,7 @@ async def get_date_stats( # Note: lexical sort works on ISO dates query_set = ( - get_scoped_queryset(model, scope) + model.get_model_scoped_queryset(scope) .values(field_name) .order_by(field_name) .annotate(total=Count(field_name)) diff --git a/chord_metadata_service/discovery/fields_utils.py b/chord_metadata_service/discovery/fields_utils.py index b80278117..fd166c6a7 100644 --- a/chord_metadata_service/discovery/fields_utils.py +++ b/chord_metadata_service/discovery/fields_utils.py @@ -2,6 +2,7 @@ from django.db.models import Q, Func, BooleanField, F, Value, Model, JSONField from chord_metadata_service.discovery.model_lookups import PUBLIC_MODEL_NAMES_TO_MODEL, PublicModelName +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel MAPPING_SEPARATOR = "/" JSON_PATH_ACCESSOR = "." @@ -28,7 +29,7 @@ def get_public_model_name_and_field_path(field_id: str) -> tuple[str, tuple[str, return model_name, tuple(field_path) -def get_model_and_field(field_id: str) -> tuple[Type[Model], str]: +def get_model_and_field(field_id: str) -> tuple[Type[BaseScopeableModel], str]: """ Parses a path-like string representing an ORM such as "individual/extra_properties/date_of_consent" where the first crumb represents the object in the DB model, and the next ones @@ -39,7 +40,7 @@ def get_model_and_field(field_id: str) -> tuple[Type[Model], str]: model_name, field_path = get_public_model_name_and_field_path(field_id) - model: Type[Model] | None = PUBLIC_MODEL_NAMES_TO_MODEL.get(model_name) + model: Type[BaseScopeableModel] | None = PUBLIC_MODEL_NAMES_TO_MODEL.get(model_name) if model is None: msg = f"Accessing field on model {model_name} not implemented" raise NotImplementedError(msg) diff --git a/chord_metadata_service/discovery/model_lookups.py b/chord_metadata_service/discovery/model_lookups.py index edbfe8437..4dd13156a 100644 --- a/chord_metadata_service/discovery/model_lookups.py +++ b/chord_metadata_service/discovery/model_lookups.py @@ -1,23 +1,21 @@ -from django.db.models import Model -from typing import Literal, Type, TypedDict +from typing import Literal, Type from chord_metadata_service.chord.data_types import DATA_TYPE_PHENOPACKET, DATA_TYPE_EXPERIMENT from chord_metadata_service.experiments import models as exp_models from chord_metadata_service.patients import models as patient_models from chord_metadata_service.phenopackets import models as pheno_models +from .scopeable_model import BaseScopeableModel + __all__ = [ "PUBLIC_MODEL_NAMES_TO_MODEL", "PUBLIC_MODEL_NAMES_TO_DATA_TYPE", - "PUBLIC_MODEL_NAMES_TO_SCOPE_FILTERS", "PublicModelName", - "PublicScopeFilterKeys", ] PublicModelName = Literal["individual", "biosample", "experiment"] -PublicScopeFilterKeys = Literal["project", "dataset"] -PUBLIC_MODEL_NAMES_TO_MODEL: dict[PublicModelName, Type[Model]] = { +PUBLIC_MODEL_NAMES_TO_MODEL: dict[PublicModelName, Type[BaseScopeableModel]] = { "individual": patient_models.Individual, "biosample": pheno_models.Biosample, "experiment": exp_models.Experiment, @@ -28,48 +26,3 @@ "biosample": DATA_TYPE_PHENOPACKET, "experiment": DATA_TYPE_EXPERIMENT, } - - -class ScopeFilter(TypedDict, total=False): - filter: str - prefetch_related: tuple[str, ...] - select_related: tuple[str, ...] - - -class ProjectDatasetScopeFilters(TypedDict): - project: ScopeFilter - dataset: ScopeFilter - - -PUBLIC_MODEL_NAMES_TO_SCOPE_FILTERS: dict[PublicModelName, ProjectDatasetScopeFilters] = { - "individual": { - "project": { - "filter": "phenopackets__dataset__project__identifier", - "prefetch_related": ("phenopackets__dataset__project",) - }, - "dataset": { - "filter": "phenopackets__dataset__identifier", - "prefetch_related": ("phenopackets__dataset",) - }, - }, - "biosample": { - "project": { - "filter": "phenopacket__dataset__project__identifier", - "prefetch_related": ("phenopacket__dataset__project",), - }, - "dataset": { - "filter": "phenopacket__dataset__identifier", - "prefetch_related": ("phenopacket__dataset",), - }, - }, - "experiment": { - "project": { - "filter": "dataset__project__identifier", - "prefetch_related": ("dataset__project",), - }, - "dataset": { - "filter": "dataset__identifier", - "prefetch_related": ("dataset",), - }, - }, -} diff --git a/chord_metadata_service/discovery/scope.py b/chord_metadata_service/discovery/scope.py new file mode 100644 index 000000000..441c69824 --- /dev/null +++ b/chord_metadata_service/discovery/scope.py @@ -0,0 +1,164 @@ +import uuid + +from bento_lib.auth.resources import build_resource +from django.conf import settings +from django.core.exceptions import ObjectDoesNotExist +from rest_framework.request import Request as DrfRequest + +from chord_metadata_service.chord import models as cm + +from .exceptions import DiscoveryScopeException +from .types import DiscoveryOrEmptyConfig, OptionalDiscoveryOrEmptyConfig + +__all__ = [ + "ValidatedDiscoveryScope", + "get_discovery_scope", + "get_request_discovery_scope", + "INSTANCE_SCOPE", +] + + +class ValidatedDiscoveryScope: + """ + Contains discovery scope information (i.e., project and dataset), as well as helper methods for accessing the + scope's discovery configuration, Bento authorization resource representation, and IDs. + + Projects and datasets are passed into the constructor rather than IDs to allow discovery calculations *and* ensure + the project/dataset actually exist before scope object creation, thus the name - the project and dataset's + existences are pre-validated. Of course, a project/dataset could be deleted asynchronously elsewhere, which could + result in this becoming invalid. + """ + + def __init__(self, project: cm.Project | None, dataset: cm.Dataset | None): + """ + Constructor for an already-validated discovery scope - i.e., since we are getting fed project/dataset instances + rather than just string IDs, we know these objects exist at the time of construction. + """ + + self._project = project + self._dataset = dataset + + # Additional validation + if self._dataset: + if not self._project: + # - make sure we have project set if dataset is set + raise DiscoveryScopeException(dataset_id=str(self._dataset.identifier)) + elif (project_id := self._project.identifier) != self._dataset.project_id: + # - make sure the specified project ID matches the dataset's project ID + raise DiscoveryScopeException(dataset_id=str(self._dataset.identifier), project_id=str(project_id)) + + # We can cache the discovery property after the first call to the getter defined below, since instances of this + # class MUST NOT be mutated. + self._discovery: OptionalDiscoveryOrEmptyConfig = None + + @property + def project_id(self) -> str | None: + """ + String representation of the scope project's ID, if set. + """ + return str(self._project.identifier) if self._project else None + + @property + def dataset_id(self) -> str | None: + """ + String representation of the scope dataset's ID, if set. + """ + return str(self._dataset.identifier) if self._dataset else None + + def __repr__(self): + return f"" + + def _get_project_discovery_or_fallback(self) -> DiscoveryOrEmptyConfig: + if self._project and (d := self._project.discovery): + return d + else: + # fallback on global discovery config if project is not set or has None as discovery + return settings.CONFIG_PUBLIC + + def _get_dataset_discovery_or_fallback(self) -> DiscoveryOrEmptyConfig: + """ + Gets the dataset discovery configuration dictionary, or falls back to the project (and eventually instance) one. + """ + if self._dataset and (d := self._dataset.discovery): + return d + else: + return self._get_project_discovery_or_fallback() + + @property + def discovery(self) -> DiscoveryOrEmptyConfig: + """ + Get the discovery configuration dictionary for this scope, properly handling falling back + (dataset -> project -> instance) as required. + """ + if self._discovery is not None: + return self._discovery + else: + d = self._get_dataset_discovery_or_fallback() + self._discovery = d + return d + + def as_authz_resource(self, data_type: str | None = None) -> dict: + """ + Build a Bento authorization system-compatible resource dictionary from this discovery scope. + Optionally, a data type can be passed to narrow the resource to a specific data type. + """ + return build_resource(self.project_id, self.dataset_id, data_type=data_type) + + +def _get_project_id_and_dataset_id_from_request(request: DrfRequest) -> tuple[str | None, str | None]: + return request.query_params.get("project") or None, request.query_params.get("dataset") or None + + +async def _get_project_by_id(project_id: str) -> cm.Project: + return await cm.Project.objects.filter(identifier=project_id).aget() + + +async def get_discovery_scope(project_id: str | None, dataset_id: str | None) -> ValidatedDiscoveryScope: + project: cm.Project | None = None + dataset: cm.Dataset | None = None + + try: + if project_id: + uuid.UUID(project_id) + if dataset_id: + uuid.UUID(dataset_id) + except ValueError: + # We don't want to facilitate log injection, so replace the true values with placeholders + raise DiscoveryScopeException("", "") + + try: + if dataset_id: + qs = cm.Dataset.objects.filter(identifier=dataset_id) + if project_id: + # check if the dataset exists and belongs to the specified project if project ID is specified; + # otherwise, infer the project from the dataset. + qs = qs.filter(project_id=project_id) + + dataset = await qs.aget() + project = await _get_project_by_id(dataset.project_id) + + elif project_id: + project = await _get_project_by_id(project_id) + + except ObjectDoesNotExist: + # We've already checked these are UUIDs, so they're fine to log + raise DiscoveryScopeException(dataset_id, project_id) + + return ValidatedDiscoveryScope(project=project, dataset=dataset) + + +async def get_request_discovery_scope(request: DrfRequest) -> ValidatedDiscoveryScope: + if (existing_scope := getattr(request, "discovery_scope", None)) is not None: + return existing_scope # already cached by a previous call to this function + + project_id, dataset_id = _get_project_id_and_dataset_id_from_request(request) + scope = await get_discovery_scope(project_id, dataset_id) + + # hack: cache discovery scope for this request on the object itself as an arbitrary property for future calls to + # this function, to avoid database request spam. + request.discovery_scope = scope + + return scope + + +INSTANCE_SCOPE = ValidatedDiscoveryScope(None, None) # re-usable singleton for instance-wide scope diff --git a/chord_metadata_service/discovery/scopeable_model.py b/chord_metadata_service/discovery/scopeable_model.py new file mode 100644 index 000000000..a84816d14 --- /dev/null +++ b/chord_metadata_service/discovery/scopeable_model.py @@ -0,0 +1,85 @@ +from __future__ import annotations # need to use string-based annotations to make the below type-checking imports work +from abc import abstractmethod +from django.db.models import Model, Q, QuerySet +from typing import Literal, TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + # gross hack to make type-checking possible without causing circular import issues. + # see: https://stackoverflow.com/a/39757388 + from .scope import ValidatedDiscoveryScope + from .types import ModelScopeFilters + +__all__ = ["BaseScopeableModel", "TOP_LEVEL_MODEL_SCOPE_FILTERS"] + +PublicScopeFilterKeys = Literal["project", "dataset"] + + +class BaseScopeableModel(Model): + + class Meta: + abstract = True + + @staticmethod + @abstractmethod + def get_scope_filters() -> ModelScopeFilters: # pragma: no cover + """ + Abstract static method (essentially a property) which returns a dictionary matching the ModelScopeFilters + format, which defines which lookups are used to filter a queryset of objects of this model to just those which + fall under a given scope. + """ + pass + + async def scope_contains_object(self, scope: ValidatedDiscoveryScope) -> bool: + """ + Returns whether the scoped queryset for the model and the passed scope contains this particular object. + Useful for checking permissions. + """ + return await self.get_model_scoped_queryset(scope).filter(pk=self.pk).aexists() + + @classmethod + def get_model_scoped_queryset(cls, scope: ValidatedDiscoveryScope) -> QuerySet: + """ + Returns a queryset (and subset) of objects of this model which belong to the passed scope. This method uses the + defined get_scope_filters() function to narrow the queryset. + """ + + filter_scope: PublicScopeFilterKeys + if scope.dataset_id: + filter_scope = "dataset" + value = scope.dataset_id + elif scope.project_id and not scope.dataset_id: + filter_scope = "project" + value = scope.project_id + else: + return cls.objects.distinct() + + scope_filter_spec = cls.get_scope_filters()[filter_scope] + + prefetch = scope_filter_spec["prefetch_related"] + + filter_query = scope_filter_spec["filter"] + if isinstance(filter_query, tuple): + # If filter is a tuple, the field contains multiple filters that are ORed together. This is useful for, + # e.g., the Resource model, where there are multiple possible paths one can take from the object to the + # parent dataset(s). + obj_q = Q(**{filter_query[0]: value}) + for fq in filter_query[1:]: + obj_q = obj_q | Q(**{fq: value}) + else: + # Just one filter to get the scoped queryset + obj_q = Q(**{filter_query: value}) + + return cls.objects.distinct().prefetch_related(*prefetch).filter(obj_q) + + +# Common model scope filters for phenopacket + experiment, which share a top-level dataset property. +TOP_LEVEL_MODEL_SCOPE_FILTERS: ModelScopeFilters = { + "project": { + "filter": "dataset__project_id", + "prefetch_related": ("dataset",), + }, + "dataset": { + "filter": "dataset_id", + "prefetch_related": (), + }, +} diff --git a/chord_metadata_service/discovery/stats.py b/chord_metadata_service/discovery/stats.py index a86bfa1c0..8657a5aa8 100644 --- a/chord_metadata_service/discovery/stats.py +++ b/chord_metadata_service/discovery/stats.py @@ -1,12 +1,12 @@ -from django.db.models import Count, F, Model, QuerySet - +from django.db.models import Count, F, QuerySet from typing import Mapping, Type -from .utils import ValidatedDiscoveryScope, get_public_model_scoped_queryset -from ..authz.types import DataPermissionsDict +from chord_metadata_service.authz.types import DataPermissionsDict from .censorship import thresholded_count -from .fields_utils import get_jsonb_path_query, get_public_model_name +from .fields_utils import get_jsonb_path_query +from .scope import ValidatedDiscoveryScope +from .scopeable_model import BaseScopeableModel from .types import BinWithValue, OptionalDiscoveryOrEmptyConfig __all__ = [ @@ -15,7 +15,6 @@ "bento_public_format_count_and_stats_list", "stats_for_field", "queryset_stats_for_field", - "get_scoped_queryset", ] @@ -84,12 +83,8 @@ async def bento_public_format_count_and_stats_list( return thresholded_count(total, discovery, field_permissions), stats_list -def get_scoped_queryset(model: Type[Model], discovery_scope: ValidatedDiscoveryScope) -> QuerySet: - return get_public_model_scoped_queryset(discovery_scope, get_public_model_name(model)) - - async def stats_for_field( - model: Type[Model], + model: Type[BaseScopeableModel], scope: ValidatedDiscoveryScope, field: str, field_permissions: DataPermissionsDict, @@ -100,7 +95,7 @@ async def stats_for_field( Computes counts of distinct values for a given field. Mainly applicable to char fields representing categories """ - qs = get_scoped_queryset(model, scope) + qs = model.get_model_scoped_queryset(scope) return await queryset_stats_for_field( qs, field, scope.discovery, field_permissions, add_missing=add_missing, group_by=group_by) diff --git a/chord_metadata_service/discovery/tests/test_discovery_utils.py b/chord_metadata_service/discovery/tests/test_scope.py similarity index 74% rename from chord_metadata_service/discovery/tests/test_discovery_utils.py rename to chord_metadata_service/discovery/tests/test_scope.py index 1131ed5b2..afcf787cf 100644 --- a/chord_metadata_service/discovery/tests/test_discovery_utils.py +++ b/chord_metadata_service/discovery/tests/test_scope.py @@ -1,13 +1,14 @@ from chord_metadata_service.chord import models as cm +from chord_metadata_service.chord.data_types import DATA_TYPE_PHENOPACKET from chord_metadata_service.chord.tests.helpers import ProjectTestCase from chord_metadata_service.discovery.exceptions import DiscoveryScopeException -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope, INSTANCE_SCOPE class DiscoveryScopeBuildingTestCase(ProjectTestCase): def setUp(self): - self.instance_scope = ValidatedDiscoveryScope(None, None) + self.instance_scope = INSTANCE_SCOPE self.project_scope = ValidatedDiscoveryScope(self.project, None) self.project_dataset_scope = ValidatedDiscoveryScope(self.project, self.dataset) @@ -53,14 +54,29 @@ def test_scope_repr(self): def test_scope_authz_repr(self): subtest_params = [ - (self.instance_scope, {"everything": True}), - (self.project_scope, {"project": str(self.project.identifier)}), + (self.instance_scope, {"everything": True}, None), + (self.project_scope, {"project": str(self.project.identifier)}, None), + ( + self.project_scope, + {"project": str(self.project.identifier), "data_type": DATA_TYPE_PHENOPACKET}, + DATA_TYPE_PHENOPACKET, + ), ( self.project_dataset_scope, {"project": str(self.project.identifier), "dataset": str(self.dataset.identifier)}, + None, + ), + ( + self.project_dataset_scope, + { + "project": str(self.project.identifier), + "dataset": str(self.dataset.identifier), + "data_type": DATA_TYPE_PHENOPACKET, + }, + DATA_TYPE_PHENOPACKET, ), ] for params in subtest_params: with self.subTest(params=params): - self.assertDictEqual(params[0].as_authz_resource(), params[1]) + self.assertDictEqual(params[0].as_authz_resource(params[2]), params[1]) diff --git a/chord_metadata_service/discovery/types.py b/chord_metadata_service/discovery/types.py index 9c8551080..20e0818e7 100644 --- a/chord_metadata_service/discovery/types.py +++ b/chord_metadata_service/discovery/types.py @@ -10,6 +10,7 @@ "EmptyConfig", "DiscoveryOrEmptyConfig", "OptionalDiscoveryOrEmptyConfig", + "ModelScopeFilters", ] @@ -61,3 +62,15 @@ class EmptyConfig(TypedDict): # TODO: py3.12: type keyword DiscoveryOrEmptyConfig = DiscoveryConfig | EmptyConfig OptionalDiscoveryOrEmptyConfig = DiscoveryOrEmptyConfig | None + + +class ScopeLevelFilters(TypedDict): + # If filter is a tuple, the field contains multiple filters that are ORed together. This is useful for, e.g., the + # Resource model, where there are multiple possible paths one can take from the object to the parent dataset(s). + filter: str | tuple[str, ...] + prefetch_related: tuple[str, ...] + + +class ModelScopeFilters(TypedDict): + project: ScopeLevelFilters + dataset: ScopeLevelFilters diff --git a/chord_metadata_service/discovery/utils.py b/chord_metadata_service/discovery/utils.py index aef006f94..0537a3718 100644 --- a/chord_metadata_service/discovery/utils.py +++ b/chord_metadata_service/discovery/utils.py @@ -1,9 +1,4 @@ -import uuid - -from bento_lib.auth.resources import build_resource -from django.conf import settings -from django.core.exceptions import ObjectDoesNotExist, ValidationError -from django.db.models import QuerySet +from django.core.exceptions import ValidationError from rest_framework.request import Request as DrfRequest from typing import Iterable @@ -11,163 +6,19 @@ from chord_metadata_service.authz.types import ( DataPermissionsDict, DataTypeDiscoveryPermissions, FieldDiscoveryPermissions ) -from chord_metadata_service.chord import models as cm -from .exceptions import DiscoveryScopeException from .fields_utils import get_public_model_name_and_field_path -from .model_lookups import ( - PublicModelName, - PublicScopeFilterKeys, - PUBLIC_MODEL_NAMES_TO_DATA_TYPE, - PUBLIC_MODEL_NAMES_TO_MODEL, - PUBLIC_MODEL_NAMES_TO_SCOPE_FILTERS, -) -from .types import DiscoveryFieldProps, DiscoveryOrEmptyConfig, OptionalDiscoveryOrEmptyConfig +from .model_lookups import PUBLIC_MODEL_NAMES_TO_DATA_TYPE +from .scope import ValidatedDiscoveryScope +from .types import DiscoveryFieldProps, DiscoveryOrEmptyConfig __all__ = [ - "ValidatedDiscoveryScope", - "get_discovery_scope", - "get_request_discovery_scope", "get_discovery_queryable_fields", "get_discovery_data_type_permissions", "get_discovery_field_set_permissions", - "get_public_model_scoped_queryset", ] -class ValidatedDiscoveryScope: - """ - Contains discovery scope information (i.e., project and dataset), as well as helper methods for accessing the - scope's discovery configuration, Bento authorization resource representation, and IDs. - - Projects and datasets are passed into the constructor rather than IDs to allow discovery calculations *and* ensure - the project/dataset actually exist before scope object creation, thus the name - the project and dataset's - existences are pre-validated. Of course, a project/dataset could be deleted asynchronously elsewhere, which could - result in this becoming invalid. - """ - - def __init__(self, project: cm.Project | None, dataset: cm.Dataset | None): - """ - Constructor for an already-validated discovery scope - i.e., since we are getting fed project/dataset instances - rather than just string IDs, we know these objects exist at the time of construction. - """ - - self._project = project - self._dataset = dataset - - # Additional validation - if self._dataset: - if not self._project: - # - make sure we have project set if dataset is set - raise DiscoveryScopeException(dataset_id=str(self._dataset.identifier)) - elif (project_id := self._project.identifier) != self._dataset.project_id: - # - make sure the specified project ID matches the dataset's project ID - raise DiscoveryScopeException(dataset_id=str(self._dataset.identifier), project_id=str(project_id)) - - # We can cache the discovery property after the first call to the getter defined below, since instances of this - # class MUST NOT be mutated. - self._discovery: OptionalDiscoveryOrEmptyConfig = None - - @property - def project_id(self) -> str | None: - """ - String representation of the scope project's ID, if set. - """ - return str(self._project.identifier) if self._project else None - - @property - def dataset_id(self) -> str | None: - """ - String representation of the scope dataset's ID, if set. - """ - return str(self._dataset.identifier) if self._dataset else None - - def __repr__(self): - return f"" - - def _get_project_discovery_or_fallback(self) -> DiscoveryOrEmptyConfig: - if self._project and (d := self._project.discovery): - return d - else: - # fallback on global discovery config if project is not set or has None as discovery - return settings.CONFIG_PUBLIC - - def _get_dataset_discovery_or_fallback(self) -> DiscoveryOrEmptyConfig: - """ - Gets the dataset discovery configuration dictionary, or falls back to the project (and eventually instance) one. - """ - if self._dataset and (d := self._dataset.discovery): - return d - else: - return self._get_project_discovery_or_fallback() - - @property - def discovery(self) -> DiscoveryOrEmptyConfig: - """ - Get the discovery configuration dictionary for this scope, properly handling falling back - (dataset -> project -> instance) as required. - """ - if self._discovery is not None: - return self._discovery - else: - d = self._get_dataset_discovery_or_fallback() - self._discovery = d - return d - - def as_authz_resource(self) -> dict: - """ - Build a Bento authorization system-compatible resource dictionary from this discovery scope. - """ - return build_resource(self.project_id, self.dataset_id) - - -def _get_project_id_and_dataset_id_from_request(request: DrfRequest) -> tuple[str | None, str | None]: - return request.query_params.get("project") or None, request.query_params.get("dataset") or None - - -async def _get_project_by_id(project_id: str) -> cm.Project: - return await cm.Project.objects.filter(identifier=project_id).aget() - - -async def get_discovery_scope(project_id: str | None, dataset_id: str | None) -> ValidatedDiscoveryScope: - project: cm.Project | None = None - dataset: cm.Dataset | None = None - - try: - if project_id: - uuid.UUID(project_id) - if dataset_id: - uuid.UUID(dataset_id) - except ValueError: - # We don't want to facilitate log injection, so replace the true values with placeholders - raise DiscoveryScopeException("", "") - - try: - if dataset_id: - qs = cm.Dataset.objects.filter(identifier=dataset_id) - if project_id: - # check if the dataset exists and belongs to the specified project if project ID is specified; - # otherwise, infer the project from the dataset. - qs = qs.filter(project_id=project_id) - - dataset = await qs.aget() - project = await _get_project_by_id(dataset.project_id) - - elif project_id: - project = await _get_project_by_id(project_id) - - except ObjectDoesNotExist: - # We've already checked these are UUIDs, so they're fine to log - raise DiscoveryScopeException(dataset_id, project_id) - - return ValidatedDiscoveryScope(project=project, dataset=dataset) - - -async def get_request_discovery_scope(request: DrfRequest) -> ValidatedDiscoveryScope: - project_id, dataset_id = _get_project_id_and_dataset_id_from_request(request) - return await get_discovery_scope(project_id, dataset_id) - - def get_discovery_queryable_fields(discovery: DiscoveryOrEmptyConfig) -> dict[str, DiscoveryFieldProps]: if not discovery: return {} @@ -231,38 +82,3 @@ def get_discovery_field_set_permissions( "counts": all(dt_permissions[dt]["counts"] for dt in dts_accessed), "data": all(dt_permissions[dt]["data"] for dt in dts_accessed), }, field_permissions - - -def get_public_model_scoped_queryset(scope: ValidatedDiscoveryScope, mn: PublicModelName) -> QuerySet: - """ - Discovery models can be scoped to either a project or dataset; for downstream filtering, we need to pre-scope the - model queryset to the project/dataset being queried. - - Since downstream filtering may be applied to (possibly deeply)nested fields/models (e.g., biosamples, experiments), - we use `.distinct()` instead of `.all()`. Otherwise, there may be multiple instances of the same top-level object - (individual especially, which has this deep nesting) for each nested instance, for example, in the case of multiple - experiments for a biosample for an individual. - - :param scope: The scope to filter the queryset to. - :param mn: The discovery/"public" model name for - """ - - filter_scope: PublicScopeFilterKeys - if scope.dataset_id: - filter_scope = "dataset" - value = scope.dataset_id - elif scope.project_id and not scope.dataset_id: - filter_scope = "project" - value = scope.project_id - else: - return PUBLIC_MODEL_NAMES_TO_MODEL[mn].objects.distinct() - - filter_query = PUBLIC_MODEL_NAMES_TO_SCOPE_FILTERS[mn][filter_scope]["filter"] - prefetch = PUBLIC_MODEL_NAMES_TO_SCOPE_FILTERS[mn][filter_scope]["prefetch_related"] - - return ( - PUBLIC_MODEL_NAMES_TO_MODEL[mn].objects - .distinct() - .prefetch_related(*prefetch) - .filter(**{filter_query: value}) - ) diff --git a/chord_metadata_service/experiments/api_views.py b/chord_metadata_service/experiments/api_views.py index 58e4efa54..fdc377f4b 100644 --- a/chord_metadata_service/experiments/api_views.py +++ b/chord_metadata_service/experiments/api_views.py @@ -1,25 +1,29 @@ +from asgiref.sync import async_to_sync +from bento_lib.auth.permissions import P_QUERY_DATA from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.utils import extend_schema, inline_serializer -from rest_framework import mixins, serializers, status, viewsets +from rest_framework import serializers, status from rest_framework.settings import api_settings from rest_framework.decorators import api_view, permission_classes +from rest_framework.request import Request as DrfRequest from rest_framework.response import Response -from .serializers import ExperimentSerializer, ExperimentResultSerializer -from .models import Experiment, ExperimentResult -from .schemas import EXPERIMENT_SCHEMA, experiment_resolver, experiment_base_uri -from .filters import ExperimentFilter, ExperimentResultFilter from chord_metadata_service.authz.permissions import BentoAllowAny -from chord_metadata_service.restapi.constants import MODEL_ID_PATTERN -from chord_metadata_service.restapi.pagination import LargeResultsSetPagination, BatchResultsSetPagination - - +from chord_metadata_service.authz.viewset import BentoAuthzScopedModelViewSet, BentoAuthzScopedModelGenericListViewSet +from chord_metadata_service.chord.data_types import DATA_TYPE_EXPERIMENT +from chord_metadata_service.discovery.scope import get_request_discovery_scope from chord_metadata_service.restapi.api_renderers import ( PhenopacketsRenderer, ExperimentCSVRenderer, ) - +from chord_metadata_service.restapi.constants import MODEL_ID_PATTERN from chord_metadata_service.restapi.negociation import FormatInPostContentNegotiation +from chord_metadata_service.restapi.pagination import LargeResultsSetPagination, BatchResultsSetPagination + +from .serializers import ExperimentSerializer, ExperimentResultSerializer +from .models import Experiment, ExperimentResult +from .schemas import EXPERIMENT_SCHEMA, experiment_resolver, experiment_base_uri +from .filters import ExperimentFilter, ExperimentResultFilter __all__ = [ "EXPERIMENT_SELECT_REL", @@ -28,6 +32,7 @@ "get_experiment_schema", ] + EXPERIMENT_SELECT_REL = ( "instrument", ) @@ -38,7 +43,7 @@ ) -class ExperimentViewSet(viewsets.ModelViewSet): +class ExperimentViewSet(BentoAuthzScopedModelViewSet): """ get: Return a list of all existing experiments @@ -47,10 +52,8 @@ class ExperimentViewSet(viewsets.ModelViewSet): Create a new experiment """ - queryset = Experiment.objects.all() \ - .select_related(*EXPERIMENT_SELECT_REL) \ - .prefetch_related(*EXPERIMENT_PREFETCH) \ - .order_by("id") + data_type = DATA_TYPE_EXPERIMENT + serializer_class = ExperimentSerializer pagination_class = LargeResultsSetPagination renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) @@ -58,19 +61,18 @@ class ExperimentViewSet(viewsets.ModelViewSet): filterset_class = ExperimentFilter lookup_value_regex = MODEL_ID_PATTERN - def dispatch(self, *args, **kwargs): - return super(ExperimentViewSet, self).dispatch(*args, **kwargs) - - -class BatchViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): - """ - A viewset that only implements the 'list' action. - To be used with the BatchListRouter which maps the POST method to .list() - """ - pass + @async_to_sync + async def get_queryset(self): + return ( + Experiment + .get_model_scoped_queryset(await get_request_discovery_scope(self.request)) + .select_related(*EXPERIMENT_SELECT_REL) + .prefetch_related(*EXPERIMENT_PREFETCH) + .order_by("id") + ) -class ExperimentBatchViewSet(BatchViewSet): +class ExperimentBatchViewSet(BentoAuthzScopedModelGenericListViewSet): """ get: Return a list of all existing experiments @@ -84,28 +86,42 @@ class ExperimentBatchViewSet(BatchViewSet): renderer_classes = (*api_settings.DEFAULT_RENDERER_CLASSES, PhenopacketsRenderer, ExperimentCSVRenderer) content_negotiation_class = FormatInPostContentNegotiation - def get_queryset(self): - experiment_ids = self.request.data.get("id", None) - filter_by_id = {"id__in": experiment_ids} if experiment_ids else {} + data_type = DATA_TYPE_EXPERIMENT - return ( - Experiment.objects - .filter(**filter_by_id) - .select_related(*EXPERIMENT_SELECT_REL) - .prefetch_related(*EXPERIMENT_PREFETCH) - .order_by("id") - ) + @async_to_sync + async def _get_filtered_queryset(self, ids_list: list[str] | None = None): + # We pre-filter experiments to the scope. This way, if they specify an ID outside the scope, it's just ignored + # - the requester won't even know if it exists. + queryset = Experiment.get_model_scoped_queryset(await get_request_discovery_scope(self.request)) - def create(self, request, *_args, **_kwargs): - ids_list = request.data.get('id', []) - request.data["id"] = ids_list - queryset = self.get_queryset() + if ids_list: + queryset = queryset.filter(id__in=ids_list) + + return queryset.select_related(*EXPERIMENT_SELECT_REL).prefetch_related(*EXPERIMENT_PREFETCH).order_by("id") + + @async_to_sync + async def get_queryset(self): + return self._get_filtered_queryset(self.request.data.get("id", None)) + + def permission_from_request(self, request: DrfRequest): + if self.action in ("list", "create"): + # Here, "create" maps to the data query permission because we use create(..) (i.e., POST) as a way to run a + # query with a large body. + # TODO: distant future: replace with HTTP QUERY verb. + return P_QUERY_DATA + return None # viewset not implemented for any other action + def create(self, request, *_args, **_kwargs): + """ + Despite the name, this is a POST request for returning a list of experiments. Since query parameters have a + maximum size, POST requests can be used for large batches. + """ + queryset = self._get_filtered_queryset(request.data.get("id", [])) serializer = ExperimentSerializer(queryset, many=True) return Response(serializer.data, status=status.HTTP_200_OK) -class ExperimentResultViewSet(viewsets.ModelViewSet): +class ExperimentResultViewSet(BentoAuthzScopedModelViewSet): """ get: Return a list of all existing experiment results @@ -114,16 +130,21 @@ class ExperimentResultViewSet(viewsets.ModelViewSet): Create a new experiment result """ - queryset = ExperimentResult.objects.all().order_by("id") + data_type = DATA_TYPE_EXPERIMENT + serializer_class = ExperimentResultSerializer pagination_class = LargeResultsSetPagination renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) filter_backends = [DjangoFilterBackend] filterset_class = ExperimentResultFilter - # Cache page for the requested url for 2 hours - def dispatch(self, *args, **kwargs): - return super(ExperimentResultViewSet, self).dispatch(*args, **kwargs) + @async_to_sync + async def get_queryset(self): + return ( + ExperimentResult + .get_model_scoped_queryset(await get_request_discovery_scope(self.request)) + .order_by("id") + ) @extend_schema( diff --git a/chord_metadata_service/experiments/models.py b/chord_metadata_service/experiments/models.py index a3898fecc..62bd71db0 100644 --- a/chord_metadata_service/experiments/models.py +++ b/chord_metadata_service/experiments/models.py @@ -1,6 +1,8 @@ from django.db import models from django.db.models import CharField, JSONField from django.contrib.postgres.fields import ArrayField +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel, TOP_LEVEL_MODEL_SCOPE_FILTERS +from chord_metadata_service.discovery.types import ModelScopeFilters from chord_metadata_service.restapi.models import IndexableMixin from chord_metadata_service.restapi.description_utils import rec_help from chord_metadata_service.restapi.validators import ontology_list_validator, key_value_validator @@ -17,16 +19,20 @@ # model for the desired purposes. -class Experiment(models.Model, IndexableMixin): +class Experiment(BaseScopeableModel, IndexableMixin): """ Class to store Experiment information. This model is primarily designed for genomic experiments; it is thus - linked to a specific bisample. + linked to a specific biosample. Experiments can be linked via a many-to-many relationship to ExperimentResults; many-to-many because a result may be derived from multiple experiments. Consider, for example, the results of a pairwise analysis derived from two Experiments, each of which was performed on a different Biosample. """ + @staticmethod + def get_scope_filters() -> ModelScopeFilters: + return TOP_LEVEL_MODEL_SCOPE_FILTERS + id = CharField(primary_key=True, max_length=200, help_text=rec_help(d.EXPERIMENT, "id")) # STUDY TYPE # ["Whole Genome Sequencing","Metagenomics","Transcriptome Analysis","Resequencing","Epigenetics", @@ -78,7 +84,20 @@ def __str__(self): return str(self.id) -class ExperimentResult(models.Model, IndexableMixin): +class ExperimentResult(BaseScopeableModel, IndexableMixin): + @staticmethod + def get_scope_filters() -> ModelScopeFilters: + return { + "project": { + "filter": "experiment__dataset__project_id", + "prefetch_related": ("experiment_set__dataset",), + }, + "dataset": { + "filter": "experiment__dataset_id", + "prefetch_related": ("experiment_set",), + }, + } + """ Class to represent information about analysis of sequencing data in a file format. """ # TODO identifier assigned by lab (?) identifier = CharField(max_length=200, blank=True, null=True, diff --git a/chord_metadata_service/experiments/summaries.py b/chord_metadata_service/experiments/summaries.py index 2ab1b6173..f99376620 100644 --- a/chord_metadata_service/experiments/summaries.py +++ b/chord_metadata_service/experiments/summaries.py @@ -4,9 +4,9 @@ from chord_metadata_service.authz.types import DataPermissionsDict from chord_metadata_service.discovery.censorship import thresholded_count +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope from chord_metadata_service.discovery.stats import queryset_stats_for_field from chord_metadata_service.discovery.types import DiscoveryConfig -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope from . import models __all__ = [ diff --git a/chord_metadata_service/experiments/tests/test_api.py b/chord_metadata_service/experiments/tests/test_api.py index 2687d82e6..4845a98ea 100644 --- a/chord_metadata_service/experiments/tests/test_api.py +++ b/chord_metadata_service/experiments/tests/test_api.py @@ -1,18 +1,20 @@ -from django.urls import reverse -from jsonschema.validators import Draft7Validator - -from django.test import TestCase -from chord_metadata_service.experiments.schemas import EXPERIMENT_SCHEMA -from chord_metadata_service.restapi.api_renderers import ExperimentCSVRenderer import csv import io +import uuid +from django.test import TestCase +from django.urls import reverse +from jsonschema.validators import Draft7Validator from rest_framework import status from rest_framework.test import APITestCase + +from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase from chord_metadata_service.chord.models import Project, Dataset -from chord_metadata_service.chord.tests.constants import VALID_DATA_USE_1 +from chord_metadata_service.chord.tests.constants import VALID_DATA_USE_1, VALID_PROJECT_2, VALID_PROJECT_1 from chord_metadata_service.chord.ingest import WORKFLOW_INGEST_FUNCTION_MAP from chord_metadata_service.chord.workflows.metadata import WORKFLOW_PHENOPACKETS_JSON, WORKFLOW_EXPERIMENTS_JSON +from chord_metadata_service.experiments.schemas import EXPERIMENT_SCHEMA +from chord_metadata_service.restapi.api_renderers import ExperimentCSVRenderer from chord_metadata_service.restapi.tests.utils import load_local_json @@ -20,7 +22,7 @@ EXAMPLE_INGEST_OUTPUTS_PHENOPACKETS_JSON = load_local_json("example_phenopackets.json") -class GetExperimentsAppApisTest(APITestCase): +class GetExperimentsAppApisTest(AuthzAPITestCase): """ Test Experiments app APIs. """ @@ -29,16 +31,18 @@ def setUp(self) -> None: """ Create two datasets but ingest phenopackets and experiments in just one dataset """ - p = Project.objects.create(title="Test Project", description="Test") + self.p = Project.objects.create(**VALID_PROJECT_1) self.d1 = Dataset.objects.create(title="dataset_1", description="Some dataset 1", data_use=VALID_DATA_USE_1, - project=p) + project=self.p) self.d1_id = self.d1.identifier self.d2 = Dataset.objects.create(title="dataset_2", description="Some dataset 2", data_use=VALID_DATA_USE_1, - project=p) + project=self.p) self.d2_id = self.d2.identifier WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_PHENOPACKETS_JSON](EXAMPLE_INGEST_OUTPUTS_PHENOPACKETS_JSON, self.d1_id) WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_EXPERIMENTS_JSON](EXAMPLE_INGEST_OUTPUTS_EXPERIMENTS_JSON, self.d1_id) + self.p2 = Project.objects.create(**VALID_PROJECT_2) + def assert_response_200_and_length(self, response, assert_len: int): self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() @@ -46,81 +50,135 @@ def assert_response_200_and_length(self, response, assert_len: int): self.assertEqual(len(response_data["results"]), assert_len) def test_get_experiments(self): - response = self.client.get('/api/experiments') + response = self.one_authz_get('/api/experiments') self.assert_response_200_and_length(response, 2) + def test_get_experiments_forbidden(self): + response = self.one_no_authz_get('/api/experiments') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_get_experiments_scoped(self): + r = self.one_authz_get(f"/api/experiments?project={self.p.identifier}") + self.assert_response_200_and_length(r, 2) + + r = self.one_authz_get(f"/api/experiments?project={self.p.identifier}&dataset={self.d1_id}") + self.assert_response_200_and_length(r, 2) + + # nothing ingested under this dataset + r = self.one_authz_get(f"/api/experiments?project={self.p.identifier}&dataset={self.d2_id}") + self.assert_response_200_and_length(r, 0) + + # nothing ingested under this project + r = self.one_authz_get(f"/api/experiments?project={self.p2.identifier}") + self.assert_response_200_and_length(r, 0) + + def test_get_experiments_scoped_forbidden(self): + r = self.one_no_authz_get(f"/api/experiments?project={self.p.identifier}") + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + + def test_get_experiments_scope_not_found(self): + # not found, yields scope bad request + r = self.one_authz_get(f"/api/experiments?project={uuid.uuid4()}") + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) + def test_get_experiment_one(self): - response = self.client.get('/api/experiments/katsu.experiment:1') + response = self.one_authz_get('/api/experiments/katsu.experiment:1') self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual(response_data['id'], 'katsu.experiment:1') + def test_get_experiment_one_forbidden(self): + response = self.one_no_authz_get('/api/experiments/katsu.experiment:1') + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + def test_get_experiment_schema(self): - response = self.client.get('/api/schemas/experiment') + # endpoint is open to everyone + response = self.one_authz_get('/api/schemas/experiment') self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() Draft7Validator.check_schema(response_data) def test_filter_experiments(self): - response = self.client.get('/api/experiments?study_type=epigenetics') - self.assert_response_200_and_length(response, 0) + filter_params = [ + ("study_type=epigenetics", 0), + (f"datasets={self.d1_id}", 2), + (f"datasets={self.d2_id}", 0), + (f"datasets={self.d2_id},{self.d1_id}", 2), + ] + + for params in filter_params: + with self.subTest(params=params): + response = self.one_authz_get(f"/api/experiments?{params[0]}") + self.assert_response_200_and_length(response, params[1]) - def test_filter_experiments_by_dataset_1(self): - response = self.client.get(f'/api/experiments?datasets={self.d1_id}') - self.assert_response_200_and_length(response, 2) + def test_get_experiment_results(self): + response = self.one_authz_get('/api/experimentresults') + self.assert_response_200_and_length(response, 4) - def test_filter_experiments_by_dataset_2(self): - response = self.client.get(f'/api/experiments?datasets={self.d2_id}') - self.assert_response_200_and_length(response, 0) + def test_get_experiment_results_scoped(self): + r = self.one_authz_get(f"/api/experimentresults?project={self.p.identifier}") + self.assert_response_200_and_length(r, 4) - def test_filter_experiments_by_datasets_list(self): - response = self.client.get(f'/api/experiments?datasets={self.d2_id},{self.d1_id}') - self.assert_response_200_and_length(response, 2) + r = self.one_authz_get(f"/api/experimentresults?project={self.p.identifier}&dataset={self.d1_id}") + self.assert_response_200_and_length(r, 4) - def test_get_experiment_results(self): - response = self.client.get('/api/experimentresults') - self.assert_response_200_and_length(response, 4) + # nothing ingested under this dataset + r = self.one_authz_get(f"/api/experimentresults?project={self.p.identifier}&dataset={self.d2_id}") + self.assert_response_200_and_length(r, 0) + + # nothing ingested under this project + r = self.one_authz_get(f"/api/experimentresults?project={self.p2.identifier}") + self.assert_response_200_and_length(r, 0) + + def test_get_experiment_results_scoped_forbidden(self): + r = self.one_no_authz_get(f"/api/experimentresults?project={self.p.identifier}") + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + + def test_get_experiment_results_scope_not_found(self): + # not found (bad request for scope) + r = self.one_authz_get(f"/api/experimentresults?project={uuid.uuid4()}") + self.assertEqual(r.status_code, status.HTTP_400_BAD_REQUEST) def test_filter_experiment_results(self): - response = self.client.get('/api/experimentresults?file_format=vcf') + response = self.one_authz_get('/api/experimentresults?file_format=vcf') self.assert_response_200_and_length(response, 2) def test_filter_experiment_results_url(self): - response = self.client.get('/api/experimentresults?url=example.org') + response = self.one_authz_get('/api/experimentresults?url=example.org') self.assert_response_200_and_length(response, 1) def test_filter_experiment_results_indices(self): - response = self.client.get('/api/experimentresults?indices=tabix') + response = self.one_authz_get('/api/experimentresults?indices=tabix') self.assert_response_200_and_length(response, 1) def test_filter_experiment_results_by_dataset_1(self): - response = self.client.get(f'/api/experimentresults?datasets={self.d1_id}') + response = self.one_authz_get(f'/api/experimentresults?datasets={self.d1_id}') self.assert_response_200_and_length(response, 4) def test_filter_experiment_results_by_dataset_2(self): - response = self.client.get(f'/api/experimentresults?datasets={self.d2_id}') + response = self.one_authz_get(f'/api/experimentresults?datasets={self.d2_id}') self.assert_response_200_and_length(response, 0) def test_filter_experiment_results_by_datasets_list(self): - response = self.client.get(f'/api/experimentresults?datasets={self.d2_id},{self.d1_id}') + response = self.one_authz_get(f'/api/experimentresults?datasets={self.d2_id},{self.d1_id}') self.assert_response_200_and_length(response, 4) def test_combine_filters_experiment_results(self): - response = self.client.get(f'/api/experimentresults?datasets={self.d2_id},{self.d1_id}&file_format=cram') + response = self.one_authz_get(f'/api/experimentresults?datasets={self.d2_id},{self.d1_id}&file_format=cram') self.assert_response_200_and_length(response, 2) def test_combine_filters_experiment_results_2(self): # there are no experiments in dataset_2 - response = self.client.get(f'/api/experimentresults?datasets={self.d2_id}&file_format=vcf') + response = self.one_authz_get(f'/api/experimentresults?datasets={self.d2_id}&file_format=vcf') self.assert_response_200_and_length(response, 0) def test_post_experiment_batch_no_data(self): - response = self.client.post('/api/batch/experiments', format='json') + response = self.one_authz_post('/api/batch/experiments', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.json()), 2) def test_post_experiment_batch_with_ids(self): - response = self.client.post('/api/batch/experiments', {'id': ['katsu.experiment:1']}, format='json') + response = self.one_authz_post('/api/batch/experiments', {'id': ['katsu.experiment:1']}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual(len(response_data), 1) diff --git a/chord_metadata_service/experiments/tests/test_summaries.py b/chord_metadata_service/experiments/tests/test_summaries.py index 22850e8c9..7d5f7f01e 100644 --- a/chord_metadata_service/experiments/tests/test_summaries.py +++ b/chord_metadata_service/experiments/tests/test_summaries.py @@ -1,7 +1,7 @@ from chord_metadata_service.authz.tests.helpers import PermissionsTestCaseMixin from chord_metadata_service.chord import models as cm from chord_metadata_service.chord.tests.constants import VALID_DATA_USE_1 -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope, INSTANCE_SCOPE from ..summaries import dt_experiment_summary from .helpers import ExperimentTestCase @@ -42,13 +42,13 @@ async def test_summary_1_exp_no_perms_whole_instance(self): for params in subtest_params: with self.subTest(params=params): - r = await dt_experiment_summary(ValidatedDiscoveryScope(None, None), params[0]) + r = await dt_experiment_summary(INSTANCE_SCOPE, params[0]) self.assertDictEqual(r, params[1]) async def test_summary_1_exp_full_perms_whole_instance(self): self.maxDiff = None - r = await dt_experiment_summary(ValidatedDiscoveryScope(None, None), self.permissions_full) + r = await dt_experiment_summary(INSTANCE_SCOPE, self.permissions_full) self.assertDictEqual(r, { "count": 1, "data_type_specific": { diff --git a/chord_metadata_service/metadata/settings.py b/chord_metadata_service/metadata/settings.py index d97f6eb79..67a735682 100644 --- a/chord_metadata_service/metadata/settings.py +++ b/chord_metadata_service/metadata/settings.py @@ -69,10 +69,6 @@ else: SCHEMAS_BASE_URL = "/chord_metadata_service/schemas" -# SECURITY WARNING: Don't run with CHORD_PERMISSIONS turned off in production, -# unless an alternative permissions system is in place. -CHORD_PERMISSIONS = os.environ.get("CHORD_PERMISSIONS", str(not DEBUG)).lower() == "true" - CHORD_SERVICE_ARTIFACT = "metadata" # NOTE: LEAVE CHORD UNLESS YOU WANT A BUNCH OF BROKEN TABLES... vvv CHORD_SERVICE_TYPE_NO_VER = f"ca.c3g.chord:{CHORD_SERVICE_ARTIFACT}" @@ -85,9 +81,6 @@ CHORD_SERVICE_ID = os.environ.get("SERVICE_ID", CHORD_SERVICE_TYPE_NO_VER) BENTO_SERVICE_KIND = "metadata" -# SECURITY WARNING: don't run with AUTH_OVERRIDE turned on in production! -AUTH_OVERRIDE = not CHORD_PERMISSIONS - # When Katsu is hosted on a subpath (e.g. http://myportal.com/api/katsu), this # parameter is used by Django to compute correct URLs in templates (for example # in DRF API discovery pages, or swagger UI) @@ -171,7 +164,6 @@ CORS_ALLOWED_ORIGINS = [orig.strip() for orig in os.environ.get("CORS_ORIGINS", "").split(";") if orig.strip()] CORS_ALLOW_CREDENTIALS = True - CORS_PREFLIGHT_MAX_AGE = 0 ROOT_URLCONF = 'chord_metadata_service.metadata.urls' @@ -276,9 +268,10 @@ def get_secret(path): 'djangorestframework_camel_case.parser.CamelCaseFormParser', 'djangorestframework_camel_case.parser.CamelCaseMultiPartParser', ), - 'DEFAULT_PERMISSION_CLASSES': ['chord_metadata_service.authz.permissions.OverrideOrSuperUserOnly'], + 'DEFAULT_PERMISSION_CLASSES': ['chord_metadata_service.authz.permissions.BentoDeferToHandler'], 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema', 'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'], + 'EXCEPTION_HANDLER': 'chord_metadata_service.restapi.exception_handler.katsu_exception_handler', 'JSON_UNDERSCOREIZE': { 'no_underscore_before_number': True } diff --git a/chord_metadata_service/patients/api_views.py b/chord_metadata_service/patients/api_views.py index 3f0b9fbdf..f90e2ddc8 100644 --- a/chord_metadata_service/patients/api_views.py +++ b/chord_metadata_service/patients/api_views.py @@ -1,6 +1,8 @@ import asyncio from adrf.views import APIView +from asgiref.sync import async_to_sync +from bento_lib.auth.permissions import Permission, P_QUERY_DATA from bento_lib.responses import errors from bento_lib.search import build_search_response from copy import deepcopy @@ -12,27 +14,26 @@ from django.http.request import QueryDict from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.utils import extend_schema, inline_serializer -from rest_framework import viewsets, filters, mixins, serializers, status +from rest_framework import filters, serializers, status from rest_framework.decorators import action from rest_framework.request import Request as DrfRequest from rest_framework.response import Response from rest_framework.settings import api_settings from chord_metadata_service.authz.middleware import authz_middleware +from chord_metadata_service.authz.viewset import BentoAuthzScopedModelViewSet, BentoAuthzScopedModelGenericListViewSet from chord_metadata_service.authz.types import DataTypeDiscoveryPermissions from chord_metadata_service.chord import data_types as dts from chord_metadata_service.discovery import responses as dres from chord_metadata_service.discovery.censorship import get_max_query_parameters, get_threshold, thresholded_count from chord_metadata_service.discovery.exceptions import DiscoveryScopeException from chord_metadata_service.discovery.fields import get_field_options, filter_queryset_field_value +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope, get_request_discovery_scope from chord_metadata_service.discovery.stats import individual_biosample_tissue_stats, individual_experiment_type_stats from chord_metadata_service.discovery.utils import ( get_discovery_queryable_fields, get_discovery_data_type_permissions, get_discovery_field_set_permissions, - get_request_discovery_scope, - get_public_model_scoped_queryset, - ValidatedDiscoveryScope, ) from chord_metadata_service.logger import logger from chord_metadata_service.phenopackets.api_views import BIOSAMPLE_PREFETCH, PHENOPACKET_PREFETCH @@ -59,7 +60,7 @@ OUTPUT_FORMAT_BENTO_SEARCH_RESULT = "bento_search_result" -class IndividualViewSet(viewsets.ModelViewSet): +class IndividualViewSet(BentoAuthzScopedModelViewSet): """ get: Return a list of all existing individuals @@ -68,6 +69,7 @@ class IndividualViewSet(viewsets.ModelViewSet): Create a new individual """ + serializer_class = IndividualSerializer pagination_class = LargeResultsSetPagination renderer_classes = ( @@ -80,14 +82,32 @@ class IndividualViewSet(viewsets.ModelViewSet): filterset_class = IndividualFilter ordering_fields = ["id"] search_fields = ["sex"] - queryset = Individual.objects.all().prefetch_related( - *(f"biosamples__{p}" for p in BIOSAMPLE_PREFETCH), - *(f"phenopackets__{p}" for p in PHENOPACKET_PREFETCH if p != "subject"), - ).order_by("id") lookup_value_regex = MODEL_ID_PATTERN + data_type = dts.DATA_TYPE_PHENOPACKET + + def permission_from_request(self, request: DrfRequest) -> Permission | None: + if self.action == "phenopackets": + # GET or POST; either way, we're querying data for this action + return P_QUERY_DATA + return super().permission_from_request(request) + + @async_to_sync + async def get_queryset(self): + scope = await get_request_discovery_scope(self.request) + return ( + Individual.get_model_scoped_queryset(scope) + .prefetch_related( + *(f"biosamples__{p}" for p in BIOSAMPLE_PREFETCH), + *(f"phenopackets__{p}" for p in PHENOPACKET_PREFETCH if p != "subject"), + ) + .order_by("id") + ) + def list(self, request, *args, **kwargs): if request.query_params.get("format") == OUTPUT_FORMAT_BENTO_SEARCH_RESULT: + scope = async_to_sync(get_request_discovery_scope)(self.request) + start = datetime.now() # filterset applies filtering from the GET parameters filterset = self.filterset_class(request.query_params, queryset=self.queryset) @@ -99,14 +119,20 @@ def list(self, request, *args, **kwargs): individual_ids = filterset.qs.values_list("id", flat=True) # TODO: code duplicated from chord/view_search.py biosamples_experiments_details = get_biosamples_with_experiment_details(individual_ids) - qs = Phenopacket.objects.filter(subject__id__in=individual_ids).values( - "subject_id", - alternate_ids=Coalesce(F("subject__alternate_ids"), []) - ).annotate( - num_experiments=Count("biosamples__experiment"), - biosamples=Coalesce( - ArrayAgg("biosamples__id", distinct=True, filter=Q(biosamples__id__isnull=False)), - [] + qs = ( + Phenopacket + .get_model_scoped_queryset(scope) + .filter(subject__id__in=individual_ids) + .values( + "subject_id", + alternate_ids=Coalesce(F("subject__alternate_ids"), []) + ) + .annotate( + num_experiments=Count("biosamples__experiment"), + biosamples=Coalesce( + ArrayAgg("biosamples__id", distinct=True, filter=Q(biosamples__id__isnull=False)), + [] + ) ) ) experiments_with_biosamples = build_experiments_by_subject(biosamples_experiments_details) @@ -123,10 +149,12 @@ def list(self, request, *args, **kwargs): @action(detail=True, methods=["GET", "POST"]) def phenopackets(self, request: DrfRequest, *_args, **_kwargs): + scope = async_to_sync(get_request_discovery_scope)(request) + individual = self.get_object() phenopackets = ( - Phenopacket.objects + Phenopacket.get_model_scoped_queryset(scope) .filter(subject=individual) .prefetch_related(*PHENOPACKET_PREFETCH) .order_by("id") @@ -139,15 +167,7 @@ def phenopackets(self, request: DrfRequest, *_args, **_kwargs): ) -class BatchViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): - """ - A viewset that only implements the 'list' action. - To be used with the BatchListRouter which maps the POST method to .list() - """ - pass - - -class IndividualBatchViewSet(BatchViewSet): +class IndividualBatchViewSet(BentoAuthzScopedModelGenericListViewSet): serializer_class = IndividualSerializer pagination_class = BatchResultsSetPagination @@ -160,11 +180,17 @@ class IndividualBatchViewSet(BatchViewSet): # Override to infer the renderer based on a `format` argument from the POST request body content_negotiation_class = FormatInPostContentNegotiation - def get_queryset(self): + data_type = dts.DATA_TYPE_PHENOPACKET + + @async_to_sync + async def get_queryset(self): + scope = await get_request_discovery_scope(self.request) + individual_ids = self.request.data.get("id", None) filter_by_id = {"id__in": individual_ids} if individual_ids else {} queryset = ( - Individual.objects + Individual + .get_model_scoped_queryset(scope) .filter(**filter_by_id) .prefetch_related(*(f"phenopackets__{p}" for p in PHENOPACKET_PREFETCH if p != "subject")) .order_by("id") @@ -182,7 +208,7 @@ async def public_discovery_filter_queryset( request: DrfRequest, dt_permissions: DataTypeDiscoveryPermissions, queryset: QuerySet, -) -> QuerySet: +) -> tuple[QuerySet, list[str]]: """ Process query parameters, check validity, and filter the queryset by the passed parameters. :param discovery_scope: Discovery scope for the queryset we're filtering. @@ -248,7 +274,7 @@ async def public_discovery_filter_queryset( # recursion queryset = filter_queryset_field_value(queryset, field_props, value) - return queryset + return queryset, queried_fields # noinspection PyMethodMayBeStatic @@ -289,10 +315,10 @@ async def get(self, request, *_args, **_kwargs): perm_pheno_query_data = dt_perms_pheno["data"] # Get individuals filtered to the requested scope - base_qs = get_public_model_scoped_queryset(discovery_scope, "individual") + base_qs = Individual.get_model_scoped_queryset(discovery_scope) try: - filtered_qs = await public_discovery_filter_queryset( + filtered_qs, queried_fields = await public_discovery_filter_queryset( discovery_scope, request, dt_permissions, base_qs ) except EmptyDiscoveryException: @@ -311,12 +337,14 @@ async def get(self, request, *_args, **_kwargs): # 0 count means insufficient data if we only have counts permissions, but means a true 0 if we have full # data permissions. logger.info( - f"Public individuals endpoint recieved {len(request.query_params)} query params which resulted in " + f"Public individuals endpoint queried fields {queried_fields} which resulted in " f"sub-threshold count: {ind_qct} <= {get_threshold(discovery, dt_perms_pheno)} " f"({repr(discovery_scope)})") authz_middleware.mark_authz_done(request) return Response(dres.INSUFFICIENT_DATA_AVAILABLE) + logger.info(f"Public individuals search queried fields {queried_fields}, resulting in {ind_qct} individuals") + (tissues_count, sampled_tissues), (experiments_count, experiment_types) = await asyncio.gather( individual_biosample_tissue_stats(filtered_qs, discovery, dt_perms_pheno), individual_experiment_type_stats(filtered_qs, discovery, dt_perms_exp), diff --git a/chord_metadata_service/patients/models.py b/chord_metadata_service/patients/models.py index c4c681d41..e2f7035c9 100644 --- a/chord_metadata_service/patients/models.py +++ b/chord_metadata_service/patients/models.py @@ -2,10 +2,12 @@ from django.db import models from django.db.models import JSONField from django.contrib.postgres.fields import ArrayField +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel from chord_metadata_service.restapi.models import BaseTimeStamp, IndexableMixin, SchemaType, BaseExtraProperties from chord_metadata_service.restapi.schemas import TIME_ELEMENT_SCHEMA from chord_metadata_service.restapi.validators import JsonSchemaValidator, ontology_validator from .values import PatientStatus, Sex, KaryotypicSex +from ..discovery.types import ModelScopeFilters class VitalStatus(BaseTimeStamp, IndexableMixin): @@ -21,13 +23,26 @@ class VitalStatus(BaseTimeStamp, IndexableMixin): " after their primary diagnosis") -class Individual(BaseExtraProperties, BaseTimeStamp, IndexableMixin): +class Individual(BaseExtraProperties, BaseTimeStamp, BaseScopeableModel, IndexableMixin): """ Class to store demographic information about an Individual (Patient) """ @property def schema_type(self) -> SchemaType: return SchemaType.INDIVIDUAL + @staticmethod + def get_scope_filters() -> ModelScopeFilters: + return { + "project": { + "filter": "phenopackets__dataset__project_id", + "prefetch_related": ("phenopackets__dataset",) + }, + "dataset": { + "filter": "phenopackets__dataset_id", + "prefetch_related": ("phenopackets",) + } + } + def get_project_id(self) -> str | None: if not self.phenopackets.count(): # Need to wait for phenopacket to exist diff --git a/chord_metadata_service/patients/tests/test_api.py b/chord_metadata_service/patients/tests/test_api.py index d9a6eee54..f506fc1c9 100644 --- a/chord_metadata_service/patients/tests/test_api.py +++ b/chord_metadata_service/patients/tests/test_api.py @@ -1,5 +1,4 @@ import csv -import json import io import random import uuid @@ -8,7 +7,6 @@ from django.urls import reverse from django.test import TestCase, override_settings from rest_framework import status -from rest_framework.test import APITestCase from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase from chord_metadata_service.chord import models as cm from chord_metadata_service.chord.tests.constants import VALID_DATA_USE_1 @@ -34,7 +32,7 @@ CONFIG_PUBLIC_TEST_NO_THRESHOLD["rules"]["count_threshold"] = 0 -class CreateIndividualTest(APITestCase): +class CreateIndividualTest(AuthzAPITestCase): """ Test module for creating an Individual. """ def setUp(self): @@ -45,28 +43,24 @@ def setUp(self): def test_create_individual(self): """ POST a new individual. """ - response = self.client.post( - reverse('individuals-list'), - data=json.dumps(self.valid_payload), - content_type='application/json' - ) + response = self.one_authz_post(reverse('individuals-list'), json=self.valid_payload) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(Individual.objects.count(), 1) self.assertEqual(Individual.objects.get().id, 'patient:1') + def test_create_individual_forbidden(self): + response = self.one_no_authz_post(reverse('individuals-list'), json=self.valid_payload) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + def test_create_invalid_individual(self): """ POST a new individual with invalid data. """ - invalid_response = self.client.post( - reverse('individuals-list'), - data=json.dumps(self.invalid_payload), - content_type='application/json' - ) + invalid_response = self.one_authz_post(reverse('individuals-list'), json=self.invalid_payload) self.assertEqual(invalid_response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(Individual.objects.count(), 0) -class UpdateIndividualTest(APITestCase): +class UpdateIndividualTest(AuthzAPITestCase): """ Test module for updating an existing Individual record. """ def setUp(self): @@ -95,31 +89,30 @@ def setUp(self): def test_update_individual(self): """ PUT new data in an existing Individual record. """ - response = self.client.put( - reverse( - 'individuals-detail', - kwargs={'pk': self.individual_one.id} - ), - data=json.dumps(self.put_valid_payload), - content_type='application/json' + response = self.one_authz_put( + reverse('individuals-detail', kwargs={'pk': self.individual_one.id}), + json=self.put_valid_payload ) self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_update_individual_forbidden(self): + response = self.one_no_authz_put( + reverse('individuals-detail', kwargs={'pk': self.individual_one.id}), + json=self.put_valid_payload + ) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + def test_update_invalid_individual(self): """ PUT new invalid data in an existing Individual record. """ - response = self.client.put( - reverse( - 'individuals-detail', - kwargs={'pk': self.individual_one.id} - ), - data=json.dumps(self.invalid_payload), - content_type='application/json' + response = self.one_authz_put( + reverse('individuals-detail', kwargs={'pk': self.individual_one.id}), + json=self.invalid_payload, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) -class DeleteIndividualTest(APITestCase): +class DeleteIndividualTest(AuthzAPITestCase): """ Test module for deleting an existing Individual record. """ def setUp(self): @@ -128,34 +121,106 @@ def setUp(self): def test_delete_individual(self): """ DELETE an existing Individual record. """ - response = self.client.delete( - reverse( - 'individuals-detail', - kwargs={'pk': self.individual_one.id} - ) + response = self.one_authz_delete( + reverse('individuals-detail', kwargs={'pk': self.individual_one.id}) ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + def test_delete_individual_forbidden(self): + response = self.one_no_authz_delete( + reverse('individuals-detail', kwargs={'pk': self.individual_one.id}) + ) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + def test_delete_non_existing_individual(self): """ DELETE a non-existing Individual record. """ - response = self.client.delete( - reverse( - 'individuals-detail', - kwargs={'pk': 'patient:what'} - ) + response = self.one_authz_delete( + reverse('individuals-detail', kwargs={'pk': 'patient:what'}) ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) -class IndividualCSVRendererTest(APITestCase): +class IndividualListFilterTest(AuthzAPITestCase): + + def setUp(self): + self.project_1 = cm.Project.objects.create(title="Project 1", description="p1") + self.dataset_1 = cm.Dataset.objects.create(**{ + "title": "Dataset 1", + "description": "Test Dataset 1", + "data_use": VALID_DATA_USE_1, + "project": self.project_1 + }) + + self.project_2 = cm.Project.objects.create(title="Project 2", description="p2") + self.dataset_2 = cm.Dataset.objects.create(**{ + "title": "Dataset 2", + "description": "Test Dataset 2", + "data_use": VALID_DATA_USE_1, + "project": self.project_2 + }) + + # ---- + + self.md1 = ph_m.MetaData.objects.create(**ph_c.VALID_META_DATA_1) + + self.ind1 = Individual.objects.create(**c.VALID_INDIVIDUAL) + self.pheno1 = ph_m.Phenopacket.objects.create(**ph_c.valid_phenopacket(self.ind1, self.md1, "phenopacket:1")) + self.pheno1.dataset = self.dataset_1 + self.pheno1.save() + + self.ind2 = Individual.objects.create(**c.VALID_INDIVIDUAL_2) + self.pheno2 = ph_m.Phenopacket.objects.create(**ph_c.valid_phenopacket(self.ind2, self.md1, "phenopacket:2")) + self.pheno2.dataset = self.dataset_2 + self.pheno2.save() + + def test_individuals_list(self): + r = self.one_authz_get("/api/individuals") + data = r.json() + self.assertEqual(len(data["results"]), 2) + + def test_individuals_project_scope(self): + r = self.one_authz_get(f"/api/individuals?project={self.project_1.identifier}") + data = r.json() + self.assertEqual(len(data["results"]), 1) + self.assertEqual(data["results"][0]["id"], self.ind1.id) + + r = self.one_authz_get(f"/api/individuals?project={self.project_2.identifier}") + data = r.json() + self.assertEqual(len(data["results"]), 1) + self.assertEqual(data["results"][0]["id"], self.ind2.id) + + def test_individuals_dataset_scope(self): + r = self.one_authz_get( + f"/api/individuals?project={self.project_1.identifier}&dataset={self.dataset_1.identifier}" + ) + data = r.json() + self.assertEqual(len(data["results"]), 1) + self.assertEqual(data["results"][0]["id"], self.ind1.id) + + r = self.one_authz_get( + f"/api/individuals?project={self.project_2.identifier}&dataset={self.dataset_2.identifier}" + ) + data = r.json() + self.assertEqual(len(data["results"]), 1) + self.assertEqual(data["results"][0]["id"], self.ind2.id) + + def test_individuals_forbidden(self): + r = self.one_no_authz_get("/api/individuals") + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + + r = self.one_no_authz_get(f"/api/individuals?project={self.project_1.identifier}") + self.assertEqual(r.status_code, status.HTTP_403_FORBIDDEN) + + +class IndividualCSVRendererTest(AuthzAPITestCase): """ Test csv export for Individuals. """ def setUp(self): self.individual_one = Individual.objects.create(**c.VALID_INDIVIDUAL) def test_csv_export(self): - get_resp = self.client.get('/api/individuals?format=csv') + get_resp = self.one_authz_get('/api/individuals?format=csv') self.assertEqual(get_resp.status_code, status.HTTP_200_OK) content = get_resp.content.decode('utf-8') cvs_reader = csv.reader(io.StringIO(content)) @@ -166,10 +231,22 @@ def test_csv_export(self): 'age', 'diseases', 'created', 'updated']: self.assertIn(column, [column_name.lower() for column_name in headers]) + def test_csv_export_forbidden(self): + get_resp = self.one_no_authz_get('/api/individuals?format=csv') + self.assertEqual(get_resp.status_code, status.HTTP_403_FORBIDDEN) + -class IndividualWithPhenopacketSearchTest(APITestCase): +class IndividualWithPhenopacketSearchTest(AuthzAPITestCase): """ Test for api/individuals?search= """ + search_test_params = ( + ("search=P49Y", 1, None), + ("search=NCBITaxon:9606", 2, None), + # 5 fields in the bento search response: + ("search=P49Y&format=bento_search_result", 1, 5), + ("search=NCBITaxon:9606&format=bento_search_result", 1, 5), # only 1 of the individuals has a phenopacket + ) + def setUp(self): self.individual_one = Individual.objects.create(**c.VALID_INDIVIDUAL) self.individual_two = Individual.objects.create(**c.VALID_INDIVIDUAL_2) @@ -178,36 +255,43 @@ def setUp(self): **ph_c.valid_phenopacket(subject=self.individual_one, meta_data=self.metadata_1) ) - def test_search(self): # test full-text search - get_resp_1 = self.client.get('/api/individuals?search=P49Y') - self.assertEqual(get_resp_1.status_code, status.HTTP_200_OK) - response_obj_1 = get_resp_1.json() - self.assertEqual(len(response_obj_1['results']), 1) - - get_resp_2 = self.client.get('/api/individuals?search=NCBITaxon:9606') - self.assertEqual(get_resp_2.status_code, status.HTTP_200_OK) - response_obj_2 = get_resp_2.json() - self.assertEqual(len(response_obj_2['results']), 2) - - def test_search_bento_search_format(self): # test full-text search - bento search result format - get_resp_1 = self.client.get('/api/individuals?search=P49Y&format=bento_search_result') - self.assertEqual(get_resp_1.status_code, status.HTTP_200_OK) - response_obj_1 = get_resp_1.json() - self.assertEqual(len(response_obj_1['results']), 1) - self.assertEqual(len(response_obj_1['results'][0]), 5) # 5 fields in the bento search response + def test_search(self): # test full-text search (standard + bento search format) + for params in self.search_test_params: + with self.subTest(params=params): + res = self.one_authz_get(f"/api/individuals?{params[0]}") + self.assertEqual(res.status_code, status.HTTP_200_OK) + res_data = res.json() + self.assertEqual(len(res_data["results"]), params[1]) + if (n_keys := params[2]) is not None: + self.assertEqual(len(res_data["results"][0]), n_keys) + + def test_search_forbidden(self): + for params in self.search_test_params: + with self.subTest(params=params): + res = self.one_no_authz_get(f"/api/individuals?{params[0]}") + self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN) def test_individual_phenopackets(self): - get_resp = self.client.get(f"/api/individuals/{self.individual_one.id}/phenopackets") + get_resp = self.one_authz_get(f"/api/individuals/{self.individual_one.id}/phenopackets") self.assertEqual(get_resp.status_code, status.HTTP_200_OK) response_obj_1 = get_resp.json() self.assertEqual(len(response_obj_1), 1) # 1 phenopacket for individual - post_resp = self.client.post(f"/api/individuals/{self.individual_one.id}/phenopackets?attachment=1") + def test_individual_phenopackets_forbidden(self): + get_resp = self.one_no_authz_get(f"/api/individuals/{self.individual_one.id}/phenopackets") + self.assertEqual(get_resp.status_code, status.HTTP_403_FORBIDDEN) + + def test_individual_phenopackets_attachment(self): + post_resp = self.one_authz_post(f"/api/individuals/{self.individual_one.id}/phenopackets?attachment=1") self.assertEqual(post_resp.status_code, status.HTTP_200_OK) self.assertIn("attachment; filename=", post_resp.headers.get("Content-Disposition", "")) response_obj_2 = post_resp.json() self.assertEqual(len(response_obj_2), 1) # 1 phenopacket for individual, still + def test_individual_phenopackets_attachment_forbidden(self): + post_resp = self.one_no_authz_post(f"/api/individuals/{self.individual_one.id}/phenopackets?attachment=1") + self.assertEqual(post_resp.status_code, status.HTTP_403_FORBIDDEN) + # Note: the next five tests use the same setUp method. Initially they were # all combined in the same class. But this caused bugs with regard to unavailable @@ -215,7 +299,7 @@ def test_individual_phenopackets(self): # One hypothesis is that using POST requests without actually # adding data to the database creates unexpected behaviour with one of the # libraries used during the testing (?) maybe at teardown time. -class BatchIndividualsCSVTest(APITestCase): +class BatchIndividualsCSVTest(AuthzAPITestCase): """ Test for getting a batch of individuals as csv. """ def setUp(self): @@ -223,12 +307,15 @@ def setUp(self): self.individual_two = Individual.objects.create(**c.VALID_INDIVIDUAL_2) def test_batch_individuals_csv_no_ids(self): - data = json.dumps({'format': 'csv'}) - response = self.client.post(reverse('batch/individuals'), data, content_type='application/json') + response = self.one_authz_post(reverse('batch/individuals'), json={'format': 'csv'}) self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_batch_individuals_csv_forbidden(self): + response = self.one_no_authz_post(reverse('batch/individuals'), json={'format': 'csv'}) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + -class BatchIndividualsCSVTest1(APITestCase): +class BatchIndividualsCSVTest1(AuthzAPITestCase): """ Test for getting a batch of individuals as csv. """ def setUp(self): @@ -236,8 +323,10 @@ def setUp(self): self.individual_two = Individual.objects.create(**c.VALID_INDIVIDUAL_2) def test_batch_individuals_csv(self): - data = json.dumps({'format': 'csv', 'id': [self.individual_one.id, self.individual_two.id]}) - get_resp = self.client.post(reverse('batch/individuals'), data, content_type='application/json') + get_resp = self.one_authz_post( + reverse('batch/individuals'), + json={'format': 'csv', 'id': [self.individual_one.id, self.individual_two.id]} + ) self.assertEqual(get_resp.status_code, status.HTTP_200_OK) content = get_resp.content.decode('utf-8') @@ -252,7 +341,7 @@ def test_batch_individuals_csv(self): self.assertEqual(resp_body[i][:-2], correct_body[i][:-2]) -class BatchIndividualsCSVTest2(APITestCase): +class BatchIndividualsCSVTest2(AuthzAPITestCase): """ Test for getting a batch of individuals as csv. """ def setUp(self): @@ -260,12 +349,11 @@ def setUp(self): self.individual_two = Individual.objects.create(**c.VALID_INDIVIDUAL_2) def test_batch_individuals_csv_invalid_ids(self): - data = json.dumps({'format': 'csv', 'id': ['invalid']}) - response = self.client.post(reverse('batch/individuals'), data, content_type='application/json') + response = self.one_authz_post(reverse('batch/individuals'), json={'format': 'csv', 'id': ['invalid']}) self.assertEqual(response.status_code, status.HTTP_200_OK) -class BatchIndividualsCSVTest3(APITestCase): +class BatchIndividualsCSVTest3(AuthzAPITestCase): """ Test for getting a batch of individuals as csv. """ def setUp(self): @@ -273,15 +361,17 @@ def setUp(self): self.individual_two = Individual.objects.create(**c.VALID_INDIVIDUAL_2) def test_batch_individuals_csv_invalid_ids(self): - data = json.dumps({'format': 'csv', 'id': [self.individual_one.id, 'invalid', "I don't exist"]}) - response = self.client.post(reverse('batch/individuals'), data, content_type='application/json') + response = self.one_authz_post( + reverse('batch/individuals'), + json={'format': 'csv', 'id': [self.individual_one.id, 'invalid', "I don't exist"]}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) lines = response.content.decode('utf8').split('\n') nb_lines = len([line for line in lines if line]) # ignore trailing line break self.assertEqual(nb_lines, 2) # 2 lines expected: header + individual_one -class BatchIndividualsCSVTest4(APITestCase): +class BatchIndividualsCSVTest4(AuthzAPITestCase): """ Test for getting a batch of individuals as csv. """ def setUp(self): @@ -290,8 +380,8 @@ def setUp(self): def test_batch_individuals_csv_invalid_format(self): # defaults to default renderer - data = json.dumps({'format': 'invalid', 'id': [self.individual_one.id]}) - response = self.client.post(reverse('batch/individuals'), data, content_type='application/json') + response = self.one_authz_post( + reverse('batch/individuals'), json={'format': 'invalid', 'id': [self.individual_one.id]}) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/chord_metadata_service/phenopackets/api_views.py b/chord_metadata_service/phenopackets/api_views.py index ebb9500a9..d49678c4f 100644 --- a/chord_metadata_service/phenopackets/api_views.py +++ b/chord_metadata_service/phenopackets/api_views.py @@ -1,10 +1,17 @@ +from asgiref.sync import async_to_sync +from bento_lib.auth.permissions import P_QUERY_DATA from django_filters.rest_framework import DjangoFilterBackend -from rest_framework import viewsets +from drf_spectacular.utils import extend_schema, inline_serializer +from rest_framework import serializers, status from rest_framework.settings import api_settings from rest_framework.decorators import api_view, permission_classes +from rest_framework.request import Request as DrfRequest from rest_framework.response import Response from chord_metadata_service.authz.permissions import BentoAllowAny +from chord_metadata_service.authz.viewset import BentoAuthzScopedModelViewSet, BentoAuthzScopedModelGenericListViewSet +from chord_metadata_service.chord.data_types import DATA_TYPE_PHENOPACKET +from chord_metadata_service.discovery.scope import get_request_discovery_scope from chord_metadata_service.restapi.api_renderers import ( PhenopacketsRenderer, BiosamplesCSVRenderer, @@ -14,12 +21,13 @@ from chord_metadata_service.restapi.pagination import LargeResultsSetPagination, BatchResultsSetPagination from chord_metadata_service.restapi.negociation import FormatInPostContentNegotiation from chord_metadata_service.phenopackets.schemas import PHENOPACKET_SCHEMA, phenopacket_resolver, phenopacket_base_uri + from . import models as m, serializers as s, filters as f -from drf_spectacular.utils import extend_schema, inline_serializer -from rest_framework import serializers, status -class PhenopacketsModelViewSet(viewsets.ModelViewSet): +class PhenopacketsModelViewSet(BentoAuthzScopedModelViewSet): + data_type = DATA_TYPE_PHENOPACKET + renderer_classes = (*api_settings.DEFAULT_RENDERER_CLASSES, PhenopacketsRenderer) pagination_class = LargeResultsSetPagination @@ -38,14 +46,23 @@ class BiosampleViewSet(PhenopacketsModelViewSet): post: Create a new biosample """ + serializer_class = s.BiosampleSerializer filter_backends = [DjangoFilterBackend] filterset_class = f.BiosampleFilter - queryset = m.Biosample.objects.all().prefetch_related(*BIOSAMPLE_PREFETCH).order_by("id") lookup_value_regex = MODEL_ID_PATTERN + # required to have discovery-scope-enabled queryset here to use a BentoAuthzScopedModelViewSet-derived viewset + @async_to_sync + async def get_queryset(self): + return ( + m.Biosample.get_model_scoped_queryset(await get_request_discovery_scope(self.request)) + .prefetch_related(*BIOSAMPLE_PREFETCH) + .order_by("id") + ) -class BiosampleBatchViewSet(PhenopacketsModelViewSet): + +class BiosampleBatchViewSet(BentoAuthzScopedModelGenericListViewSet): """ get: Return a list of all existing biosamples @@ -53,6 +70,7 @@ class BiosampleBatchViewSet(PhenopacketsModelViewSet): post: Filter biosamples by a list of ids """ + serializer_class = s.BiosampleSerializer filter_backends = [DjangoFilterBackend] filterset_class = f.BiosampleFilter @@ -65,24 +83,36 @@ class BiosampleBatchViewSet(PhenopacketsModelViewSet): ) content_negotiation_class = FormatInPostContentNegotiation - def _get_filtered_queryset(self, ids_list=None): - queryset = m.Biosample.objects.all() + data_type = DATA_TYPE_PHENOPACKET + + @async_to_sync + async def _get_filtered_queryset(self, ids_list: list[str] | None = None): + # We pre-filter biosamples to the scope. This way, if they specify an ID outside the scope, it's just ignored + # - the requester won't even know if it exists. + queryset = m.Biosample.get_model_scoped_queryset(await get_request_discovery_scope(self.request)) if ids_list: queryset = queryset.filter(id__in=ids_list) - queryset = queryset.prefetch_related(*BIOSAMPLE_PREFETCH) \ - .order_by("id") - - return queryset + return queryset.prefetch_related(*BIOSAMPLE_PREFETCH).order_by("id") def get_queryset(self): - individual_ids = self.request.data.get("id", None) - return self._get_filtered_queryset(ids_list=individual_ids) + return self._get_filtered_queryset(ids_list=self.request.data.get("id", None)) + + def permission_from_request(self, request: DrfRequest): + if self.action in ("list", "create"): + # Here, "create" maps to the data query permission because we use create(..) (i.e., POST) as a way to run a + # query with a large body. + return P_QUERY_DATA + return None # viewset not implemented for any other action def create(self, request, *args, **kwargs): - ids_list = request.data.get('id', []) - queryset = self._get_filtered_queryset(ids_list=ids_list) + """ + Despite the name, this is a POST request for returning a list of biosamples. Since query parameters have a + maximum size, POST requests can be used for large batches. + """ + + queryset = self._get_filtered_queryset(ids_list=request.data.get("id", [])) serializer = s.BiosampleSerializer(queryset, many=True) return Response(serializer.data, status=status.HTTP_200_OK) @@ -109,14 +139,22 @@ class PhenopacketViewSet(PhenopacketsModelViewSet): post: Create a new phenopacket - """ + serializer_class = s.PhenopacketSerializer filter_backends = [DjangoFilterBackend] filterset_class = f.PhenopacketFilter - queryset = m.Phenopacket.objects.all().prefetch_related(*PHENOPACKET_PREFETCH).order_by("id") lookup_value_regex = MODEL_ID_PATTERN + # required to have discovery-scope-enabled queryset here to use a BentoAuthzScopedModelViewSet-derived viewset + @async_to_sync + async def get_queryset(self): + return ( + m.Phenopacket.get_model_scoped_queryset(await get_request_discovery_scope(self.request)) + .prefetch_related(*PHENOPACKET_PREFETCH) + .order_by("id") + ) + @extend_schema( description="Chord phenopacket schema that can be shared with data providers", diff --git a/chord_metadata_service/phenopackets/filters.py b/chord_metadata_service/phenopackets/filters.py index 71aecf8ed..04711fb74 100644 --- a/chord_metadata_service/phenopackets/filters.py +++ b/chord_metadata_service/phenopackets/filters.py @@ -44,21 +44,6 @@ def filter_extra_properties(qs, name, value): return qs.filter(extra_properties__icontains=value) -def authorize_datasets(qs, name, value): - """ - Filter by authorized datasets. - - If value is 'NO_DATASETS_AUTHORIZED', returns no objects. - Otherwise, returns objects that are in the specified datasets. - """ - if value == 'NO_DATASETS_AUTHORIZED': - lookup = "__".join([name, "in"]) - return qs.filter(**{lookup: []}) - else: - lookup = "__".join([name, "in"]) - return qs.filter(**{lookup: value.split(',')}).distinct() - - def filter_datasets(qs, name, value): """ Filters by datasets. @@ -89,10 +74,6 @@ class MetaDataFilter(django_filters.rest_framework.FilterSet): extra_properties = django_filters.CharFilter(method=filter_extra_properties, label="Extra properties") datasets = django_filters.CharFilter( method=filter_datasets, field_name="phenopacket__dataset__title", label="Datasets") - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, field_name="phenopacket__dataset__title", - label="Authorized datasets" - ) class Meta: model = m.MetaData @@ -121,11 +102,6 @@ class PhenotypicFeatureFilter(django_filters.rest_framework.FilterSet): field_name="phenopacket__dataset__title", label="Datasets" ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, - field_name="phenopacket__dataset__title", - label="Authorized datasets" - ) class Meta: model = m.PhenotypicFeature @@ -166,11 +142,6 @@ class DiseaseFilter(django_filters.rest_framework.FilterSet): field_name="phenopacket__dataset__title", label="Datasets" ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, - field_name="phenopacket__dataset__title", - label="Authorized datasets" - ) class Meta: model = m.Disease @@ -199,11 +170,6 @@ class BiosampleFilter(django_filters.rest_framework.FilterSet): field_name="phenopacket__dataset__title", label="Datasets" ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, - field_name="phenopacket__dataset__title", - label="Authorized datasets" - ) procedure = django_filters.CharFilter( method=filter_time_element, field_name="procedure", label="Procedure") @@ -226,11 +192,6 @@ class PhenopacketFilter(django_filters.rest_framework.FilterSet): field_name="dataset__title", label="Datasets" ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, - field_name="dataset__title", - label="Authorized datasets" - ) class Meta: model = m.Phenopacket @@ -293,11 +254,6 @@ class DiagnosisFilter(django_filters.rest_framework.FilterSet): field_name="disease__phenopacket__dataset__title", label="Datasets" ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, - field_name="disease__phenopacket__dataset__title", - label="Authorized datasets" - ) class Meta: model = m.Diagnosis @@ -312,11 +268,6 @@ class InterpretationFilter(django_filters.rest_framework.FilterSet): field_name="phenopacket__dataset__title", label="Datasets" ) - authorized_datasets = django_filters.CharFilter( - method=authorize_datasets, - field_name="phenopacket__dataset__title", - label="Authorized datasets" - ) class Meta: model = m.Interpretation diff --git a/chord_metadata_service/phenopackets/models.py b/chord_metadata_service/phenopackets/models.py index 70184fe8f..ea1c2c3ae 100644 --- a/chord_metadata_service/phenopackets/models.py +++ b/chord_metadata_service/phenopackets/models.py @@ -3,6 +3,8 @@ from django.core.exceptions import ValidationError, ObjectDoesNotExist from django.db.models import JSONField from django.contrib.postgres.fields import ArrayField +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel, TOP_LEVEL_MODEL_SCOPE_FILTERS +from chord_metadata_service.discovery.types import ModelScopeFilters from chord_metadata_service.patients.models import Individual from chord_metadata_service.resources.models import Resource from chord_metadata_service.restapi.description_utils import rec_help @@ -13,6 +15,7 @@ ontology_validator, ontology_list_validator ) +from chord_metadata_service.restapi.schemas import TIME_ELEMENT_SCHEMA from . import descriptions as d from .schemas import ( EXPRESSION_SCHEMA, @@ -25,7 +28,6 @@ PHENOPACKET_MEDICAL_ACTION_SCHEMA, ) from .validators import vrs_variation_validator -from ..restapi.schemas import TIME_ELEMENT_SCHEMA ############################################################# @@ -39,8 +41,6 @@ class MetaData(BaseTimeStamp): """ Class to store structured definitions of the resources and ontologies used within the phenopacket - - FHIR: Metadata """ created_by = models.CharField(max_length=200, blank=True, null=True, default=None, @@ -131,13 +131,26 @@ def __str__(self): return str(self.id) -class Biosample(BaseExtraProperties, BaseTimeStamp, IndexableMixin): +class Biosample(BaseExtraProperties, BaseTimeStamp, IndexableMixin, BaseScopeableModel): """ Class to describe a unit of biological material FHIR: Specimen """ + @staticmethod + def get_scope_filters() -> ModelScopeFilters: + return { + "project": { + "filter": "phenopacket__dataset__project__identifier", + "prefetch_related": ("phenopacket__dataset__project",), + }, + "dataset": { + "filter": "phenopacket__dataset__identifier", + "prefetch_related": ("phenopacket__dataset",), + }, + } + id = models.CharField(primary_key=True, max_length=200, help_text=rec_help(d.BIOSAMPLE, "id")) # if Individual instance is deleted Biosample instance is deleted too individual = models.ForeignKey( @@ -409,7 +422,7 @@ def __str__(self): # # ############################################################# -class Phenopacket(BaseExtraProperties, BaseTimeStamp, IndexableMixin): +class Phenopacket(BaseExtraProperties, BaseTimeStamp, BaseScopeableModel, IndexableMixin): """ Class to aggregate Individual's experiments data @@ -425,6 +438,10 @@ class Meta: def schema_type(self) -> SchemaType: return SchemaType.PHENOPACKET + @staticmethod + def get_scope_filters() -> ModelScopeFilters: + return TOP_LEVEL_MODEL_SCOPE_FILTERS + def get_project_id(self) -> str | None: model = apps.get_model("chord.Project") try: diff --git a/chord_metadata_service/phenopackets/schemas.py b/chord_metadata_service/phenopackets/schemas.py index 849a0d3cd..988e38a5c 100644 --- a/chord_metadata_service/phenopackets/schemas.py +++ b/chord_metadata_service/phenopackets/schemas.py @@ -32,6 +32,7 @@ __all__ = [ + "phenopacket_base_uri", "PHENOPACKET_EXTERNAL_REFERENCE_SCHEMA", "PHENOPACKET_UPDATE_SCHEMA", "PHENOPACKET_META_DATA_SCHEMA", diff --git a/chord_metadata_service/phenopackets/summaries.py b/chord_metadata_service/phenopackets/summaries.py index 98a478abc..eead63be6 100644 --- a/chord_metadata_service/phenopackets/summaries.py +++ b/chord_metadata_service/phenopackets/summaries.py @@ -4,9 +4,9 @@ from chord_metadata_service.authz.types import DataPermissionsDict from chord_metadata_service.discovery.censorship import thresholded_count +from chord_metadata_service.discovery.scope import ValidatedDiscoveryScope from chord_metadata_service.discovery.stats import queryset_stats_for_field from chord_metadata_service.discovery.types import DiscoveryConfig -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope from chord_metadata_service.patients.summaries import individual_summary from . import models diff --git a/chord_metadata_service/phenopackets/tests/test_api.py b/chord_metadata_service/phenopackets/tests/test_api.py index 22a0763e4..96827e445 100644 --- a/chord_metadata_service/phenopackets/tests/test_api.py +++ b/chord_metadata_service/phenopackets/tests/test_api.py @@ -1,24 +1,23 @@ import csv import io -import json from django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase -from chord_metadata_service.phenopackets.schemas import PHENOPACKET_SCHEMA -from . import constants as c -from .. import models as m, serializers as s - -from chord_metadata_service.restapi.tests.utils import get_post_response +from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase from chord_metadata_service.chord.models import Project, Dataset from chord_metadata_service.chord.ingest import WORKFLOW_INGEST_FUNCTION_MAP from chord_metadata_service.chord.workflows.metadata import WORKFLOW_PHENOPACKETS_JSON from chord_metadata_service.chord.tests.constants import VALID_DATA_USE_1 from chord_metadata_service.restapi.tests import constants as restapi_c +from . import constants as c +from ..schemas import PHENOPACKET_SCHEMA +from .. import models as m, serializers as s + -class CreateBiosampleTest(APITestCase): +class CreateBiosampleTest(AuthzAPITestCase): """ Test module for creating an Biosample. """ def setUp(self): @@ -65,30 +64,36 @@ def setUp(self): def test_create_biosample(self): """ POST a new biosample. """ - response = get_post_response('biosamples-list', self.valid_payload) + response = self.one_authz_post(reverse("biosamples-list"), json=self.valid_payload) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(m.Biosample.objects.count(), 1) self.assertEqual(m.Biosample.objects.get().id, 'katsu.biosample_id:1') + def test_create_biosample_forbidden(self): + """ POST a new biosample. """ + + response = self.one_no_authz_post(reverse("biosamples-list"), json=self.valid_payload) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + def test_create_invalid_biosample(self): """ POST a new biosample with invalid data. """ - invalid_response = get_post_response('biosamples-list', self.invalid_payload) + invalid_response = self.one_authz_post(reverse('biosamples-list'), self.invalid_payload) self.assertEqual( invalid_response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(m.Biosample.objects.count(), 0) - def test_seriliazer_validate_invalid(self): + def test_serializer_validate_invalid(self): serializer = s.BiosampleSerializer(data=self.invalid_payload) self.assertEqual(serializer.is_valid(), False) - def test_seriliazer_validate_valid(self): + def test_serializer_validate_valid(self): serializer = s.BiosampleSerializer(data=self.valid_payload) self.assertEqual(serializer.is_valid(), True) def test_update(self): # Create initial biosample - response = get_post_response('biosamples-list', self.valid_payload) + response = self.one_authz_post(reverse("biosamples-list"), json=self.valid_payload) biosample_id = response.data['id'] # Should be 1 @@ -96,12 +101,7 @@ def test_update(self): # Update the biosample.procedure.performed field self.valid_payload["procedure"]["performed"] = self.procedure_age_performed - # response = get_post_response('biosamples-list', self.valid_payload) - response = self.client.put( - f"/api/biosamples/{biosample_id}", - data=json.dumps(self.valid_payload), - content_type='application/json', - ) + response = self.one_authz_put(f"/api/biosamples/{biosample_id}", json=self.valid_payload) # Should be 1 as well post_update_count = m.Biosample.objects.all().count() @@ -110,24 +110,28 @@ def test_update(self): self.assertEqual(response.data['procedure']['performed'], self.procedure_age_performed) -class BatchBiosamplesCSVTest(APITestCase): +class BatchBiosamplesCSVTest(AuthzAPITestCase): def setUp(self): self.individual = m.Individual.objects.create(**c.VALID_INDIVIDUAL_1) self.valid_payload = c.valid_biosample_1(self.individual) self.biosample = m.Biosample.objects.create(**self.valid_payload) self.view = 'batch/biosamples-list' + self.post_biosamples_body = { + 'id': [str(self.biosample.id)], + 'format': 'csv' + } - def test_get_all_biosamples(self): - response = self.client.get(reverse(self.view)) + def test_get_all_biosamples_batch(self): + response = self.one_authz_get(reverse(self.view)) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data['results']), 1), + self.assertEqual(len(response.data['results']), 1) + + def test_get_all_biosamples_batch_forbidden(self): + response = self.one_no_authz_get(reverse(self.view)) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_post_biosamples_with_ids(self): - data = { - 'id': [str(self.biosample.id)], - 'format': 'csv' - } - response = get_post_response(self.view, data) + response = self.one_authz_post(reverse(self.view), json=self.post_biosamples_body) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) content = response.content.decode('utf-8') @@ -140,8 +144,13 @@ def test_post_biosamples_with_ids(self): 'created', 'updated', 'individual']: self.assertIn(column, [column_name.lower() for column_name in headers]) + def test_post_biosamples_with_ids_forbidden(self): + response = self.one_no_authz_post(reverse(self.view), json=self.post_biosamples_body) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + # TODO: test content -class CreatePhenopacketTest(APITestCase): + +class CreatePhenopacketTest(AuthzAPITestCase): def setUp(self): individual = m.Individual.objects.create(**c.VALID_INDIVIDUAL_1) @@ -152,17 +161,22 @@ def setUp(self): subject=self.subject, meta_data=self.metadata) - def test_phenopacket(self): - response = get_post_response('phenopackets-list', self.phenopacket) + def test_phenopacket_create(self): + response = self.one_authz_post(reverse("phenopackets-list"), json=self.phenopacket) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(m.Phenopacket.objects.count(), 1) + def test_phenopacket_create_forbidden(self): + response = self.one_no_authz_post(reverse("phenopackets-list"), json=self.phenopacket) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(m.Phenopacket.objects.count(), 0) + def test_serializer(self): serializer = s.PhenopacketSerializer(data=self.phenopacket) self.assertEqual(serializer.is_valid(), True) -class GetPhenopacketsApiTest(APITestCase): +class GetPhenopacketsApiTest(AuthzAPITestCase): """ Test that we can retrieve phenopackets with valid dataset titles or without dataset title. """ @@ -182,107 +196,61 @@ def setUp(self) -> None: WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_PHENOPACKETS_JSON]( restapi_c.VALID_PHENOPACKET_2, self.d2.identifier) - def test_get_phenopackets(self): + def test_get_phenopackets_no_access(self): """ - Test that we can get 2 phenopackets without a dataset title. + Test that we cannot get the complete set of phenopackets without authorization. """ - response = self.client.get('/api/phenopackets') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 2) + response = self.one_no_authz_get("/api/phenopackets") + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - def test_get_phenopackets_with_valid_dataset(self): - """ - Test that we can get 1 phenopacket under dataset_1. - """ - response = self.client.get('/api/phenopackets?datasets=dataset_1') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 1) - - def test_get_phenopackets_with_valid_dataset_2(self): - """ - Test that we can get 1 phenopacket under dataset_2. - """ - response = self.client.get('/api/phenopackets?datasets=dataset_2') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 1) - - def test_get_phenopackets_with_valid_dataset_3(self): + def test_get_phenopackets(self): """ - Test that we can get 2 phenopackets under both dataset_1 and dataset_2. + Test that we can get 2 phenopackets without a dataset title. """ - response = self.client.get('/api/phenopackets?datasets=dataset_1,dataset_2') + response = self.one_authz_get("/api/phenopackets") self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 2) + self.assertEqual(len(response.json()["results"]), 2) - def test_get_phenopackets_with_valid_dataset_4(self): + def test_get_phenopackets_with_valid_dataset_via_scope(self): """ - Test that we can get 1 phenopacket under dataset_1 and an invalid dataset. + Test that we can get 1 phenopacket under dataset_1 via discovery scoping. """ - response = self.client.get('/api/phenopackets?datasets=dataset_1,noSuchDataset') + response = self.one_authz_get(f"/api/phenopackets?dataset={self.d.identifier}") self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual(len(response_data["results"]), 1) - def test_get_phenopackets_with_invalid_dataset(self): - """ - Test that we cannot get phenopackets with invalid dataset titles. - """ - response = self.client.get('/api/phenopackets?datasets=notADataset') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 0) - - def test_get_phenopackets_with_authz_dataset_1(self): - """ - Test that we cannot get phenopackets with no authorized datasets. - """ - response = self.client.get('/api/phenopackets?datasets=dataset_1&authorized_datasets=dataset2') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 0) - - def test_get_phenopackets_with_authz_dataset_2(self): + def test_get_phenopackets_with_valid_dataset_via_scope_no_access(self): """ - Test that we can get 1 phenopacket with 1 authorized datasets. + Test that we can get 1 phenopacket under dataset_1 via discovery scoping. """ - response = self.client.get('/api/phenopackets?authorized_datasets=dataset_1') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 1) + response = self.one_no_authz_get(f"/api/phenopackets?dataset={self.d.identifier}") + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - def test_get_phenopackets_with_authz_dataset_3(self): + def test_get_phenopackets_with_valid_dataset_via_filter(self): """ - Test that we can get 2 phenopackets with 2 authorized datasets. + Test that we can get phenopackets under specific datasets via title using Django filter. """ - response = self.client.get('/api/phenopackets?authorized_datasets=dataset_1,dataset_2') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 2) - def test_get_phenopackets_with_authz_dataset_4(self): - """ - Test that we can get 1 phenopackets with 1 authorized datasets. - """ - response = self.client.get('/api/phenopackets?datasets=dataset_1&authorized_datasets=dataset_1') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 1) + subtest_params = [ + ("dataset_1", 1), + ("dataset_2", 1), + ("dataset_1,dataset_2", 2), + ("dataset_1,noSuchDataset", 1), + ("notADataset", 0), + ] - def test_get_phenopackets_with_authz_dataset_5(self): - """ - Test that we can get 0 phenopackets with 0 authorized datasets. - """ - response = self.client.get('/api/phenopackets?authorized_datasets=NO_DATASETS_AUTHORIZED') - self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(len(response_data["results"]), 0) + for params in subtest_params: + with self.subTest(params=params): + ds_title, exp_count = params + response = self.one_authz_get(f"/api/phenopackets?datasets={ds_title}") + self.assertEqual(response.status_code, status.HTTP_200_OK) + response_data = response.json() + self.assertEqual(len(response_data["results"]), exp_count) class PhenopacketSchema(APITestCase): + # No authz needed for these endpoints def test_get_phenopacket_schema(self): response = self.client.get("/api/schemas/phenopacket") diff --git a/chord_metadata_service/resources/api_views.py b/chord_metadata_service/resources/api_views.py index 9e8bb364c..7d797cd97 100644 --- a/chord_metadata_service/resources/api_views.py +++ b/chord_metadata_service/resources/api_views.py @@ -1,7 +1,10 @@ -from rest_framework import viewsets +from asgiref.sync import async_to_sync from rest_framework.settings import api_settings from django_filters.rest_framework import DjangoFilterBackend +from chord_metadata_service.authz.viewset import BentoAuthzScopedModelViewSet +from chord_metadata_service.chord.data_types import DATA_TYPE_PHENOPACKET +from chord_metadata_service.discovery.scope import get_request_discovery_scope from chord_metadata_service.restapi.api_renderers import PhenopacketsRenderer from chord_metadata_service.restapi.pagination import LargeResultsSetPagination @@ -10,7 +13,7 @@ from .filters import ResourceFilter -class ResourceViewSet(viewsets.ModelViewSet): +class ResourceViewSet(BentoAuthzScopedModelViewSet): """ get: Return a list of all existing resources @@ -19,9 +22,15 @@ class ResourceViewSet(viewsets.ModelViewSet): Create a new resource """ - queryset = Resource.objects.all().order_by("id") + + data_type = DATA_TYPE_PHENOPACKET + serializer_class = ResourceSerializer renderer_classes = (*api_settings.DEFAULT_RENDERER_CLASSES, PhenopacketsRenderer) pagination_class = LargeResultsSetPagination filter_backends = [DjangoFilterBackend] filterset_class = ResourceFilter + + @async_to_sync + async def get_queryset(self): + return Resource.get_model_scoped_queryset(await get_request_discovery_scope(self.request)).order_by("id") diff --git a/chord_metadata_service/resources/models.py b/chord_metadata_service/resources/models.py index 0748c5026..06c764263 100644 --- a/chord_metadata_service/resources/models.py +++ b/chord_metadata_service/resources/models.py @@ -2,12 +2,16 @@ from django.db import models from django.db.models import JSONField +from chord_metadata_service.discovery.scopeable_model import BaseScopeableModel +from chord_metadata_service.discovery.types import ModelScopeFilters from chord_metadata_service.restapi.description_utils import rec_help from . import descriptions as d +__all__ = ["Resource"] -class Resource(models.Model): + +class Resource(BaseScopeableModel): """ Class to represent a description of an external resource used for referencing an object @@ -18,6 +22,19 @@ class Resource(models.Model): class Meta: unique_together = (("namespace_prefix", "version"),) + @staticmethod + def get_scope_filters() -> ModelScopeFilters: + return { + "project": { + "filter": ("dataset__project_id", "metadata__phenopacket__dataset__project_id"), + "prefetch_related": ("dataset_set", "metadata_set__phenopacket_set__dataset"), + }, + "dataset": { + "filter": ("dataset__identifier", "metadata__phenopacket__dataset_id"), + "prefetch_related": ("dataset_set", "metadata_set__phenopacket_set"), + }, + } + # resource_id e.g. "id": "uniprot:2019_07" id = models.CharField(primary_key=True, max_length=200, help_text=rec_help(d.RESOURCE, "id")) name = models.CharField(max_length=200, help_text=rec_help(d.RESOURCE, "name")) diff --git a/chord_metadata_service/resources/tests/test_api.py b/chord_metadata_service/resources/tests/test_api.py index b1c464a86..45eb216cb 100644 --- a/chord_metadata_service/resources/tests/test_api.py +++ b/chord_metadata_service/resources/tests/test_api.py @@ -1,23 +1,137 @@ +import uuid + from rest_framework import status -from rest_framework.test import APITestCase -from chord_metadata_service.restapi.tests.utils import get_post_response +from rest_framework.reverse import reverse +from chord_metadata_service.authz.tests.helpers import AuthzAPITestCase +from chord_metadata_service.chord.ingest import WORKFLOW_INGEST_FUNCTION_MAP +from chord_metadata_service.chord.models import Dataset +from chord_metadata_service.chord.tests.constants import valid_dataset_1, VALID_PROJECT_2, valid_dataset_2 +from chord_metadata_service.chord.tests.helpers import AuthzAPITestCaseWithProjectJSON +from chord_metadata_service.chord.workflows.metadata import WORKFLOW_PHENOPACKETS_JSON +from chord_metadata_service.restapi.tests.constants import VALID_PHENOPACKET_1 from ..models import Resource from ..serializers import ResourceSerializer -from .constants import VALID_RESOURCE_2, DUPLICATE_RESOURCE_3 +from .constants import VALID_RESOURCE_1, VALID_RESOURCE_2, DUPLICATE_RESOURCE_3 -class CreateResourceTest(APITestCase): +class CreateResourceTest(AuthzAPITestCase): def setUp(self): self.resource = VALID_RESOURCE_2 self.duplicate_resource = DUPLICATE_RESOURCE_3 - def test_resource(self): - response = get_post_response('resource-list', self.resource) + def test_create_resource(self): + response = self.one_authz_post(reverse("resource-list"), json=self.resource) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(Resource.objects.count(), 1) + def test_create_resource_forbidden(self): + response = self.one_no_authz_post(reverse("resource-list"), json=self.resource) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(Resource.objects.count(), 0) + def test_serializer(self): serializer = ResourceSerializer(data=self.resource) self.assertEqual(serializer.is_valid(), True) + + +class ListResourceTest(AuthzAPITestCaseWithProjectJSON): + + def setUp(self): + super().setUp() + self.url = reverse("resource-list") + self.url_with_proj = f"{self.url}?project={self.project['identifier']}" + + # dataset for project 1 + r = self.one_authz_post(reverse("dataset-list"), json=valid_dataset_1(self.project['identifier'])) + self.dataset = r.json() + self.url_with_proj_ds = f"{self.url}?project={self.project['identifier']}&dataset={self.dataset['identifier']}" + + # project 2 + r = self.one_authz_post(reverse("project-list"), json=VALID_PROJECT_2) + self.project_2 = r.json() + + # - dataset for project 2 + r = self.one_authz_post(reverse("dataset-list"), json=valid_dataset_2(self.project_2['identifier'])) + self.dataset_2 = r.json() + + def test_list_resources_basic(self): + self.one_authz_post(self.url, json=VALID_RESOURCE_1) + self.one_authz_post(self.url, json=VALID_RESOURCE_2) + + res = self.one_authz_get(self.url) + self.assertEqual(res.status_code, status.HTTP_200_OK) + self.assertEqual(len(res.json()["results"]), 2) + + # check that we don't have any resources under the project (until later tests...) + res = self.one_authz_get(self.url_with_proj) + self.assertEqual(res.status_code, status.HTTP_200_OK) + self.assertEqual(len(res.json()["results"]), 0) + + def test_list_resources_scope_dne(self): + res = self.one_authz_get(f"{self.url}?project=does-not-exist") + # non-UUID - triggers scope error when handling permissions: + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) + + res = self.one_authz_get(f"{self.url}?project={uuid.uuid4()}") + # does not exist - triggers scope error when handling permissions: + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) + + def test_list_resources_forbidden(self): + response = self.one_no_authz_get(self.url) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + response = self.one_no_authz_get(self.url_with_proj) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_list_resources_project_dataset(self): + r = Resource.objects.create(**VALID_RESOURCE_1) + Resource.objects.create(**VALID_RESOURCE_2) # r2 + + ds = Dataset.objects.get(pk=self.dataset["identifier"]) + ds.additional_resources.add(r) + + subtests = [ + (self.url, 2), + (self.url_with_proj, 1), + (f"{self.url}?project={self.project_2['identifier']}", 0), + (self.url_with_proj_ds, 1), + (f"{self.url}?project={self.project_2['identifier']}&dataset={self.dataset_2['identifier']}", 0), + ] + + for subtest in subtests: + with self.subTest(params=subtest): + res = self.one_authz_get(subtest[0]) + self.assertEqual(res.status_code, status.HTTP_200_OK) + self.assertEqual(len(res.json()["results"]), subtest[1]) + + def test_list_resources_dataset_and_phenopacket(self): + r = Resource.objects.create(**VALID_RESOURCE_1) + + ds = Dataset.objects.get(pk=self.dataset["identifier"]) + ds.additional_resources.add(r) + + pd = { + **VALID_PHENOPACKET_1, + "dataset": self.dataset["identifier"], + "meta_data": {**VALID_PHENOPACKET_1["meta_data"], "resources": [VALID_RESOURCE_2]}, + } + + # create phenopacket associated with the dataset (+ a new resource in the phenopacket metadata) + WORKFLOW_INGEST_FUNCTION_MAP[WORKFLOW_PHENOPACKETS_JSON](pd, ds.identifier) + + # first, check we get all the resources back successfully with no scoping + res = self.one_authz_get(self.url) + self.assertEqual(res.status_code, status.HTTP_200_OK) + self.assertEqual(len(res.json()["results"]), 2) + + # then, check if we scope in that we correctly get both paths to the dataset resources + + res = self.one_authz_get(self.url_with_proj) + self.assertEqual(res.status_code, status.HTTP_200_OK) + self.assertEqual(len(res.json()["results"]), 2) + + res = self.one_authz_get(self.url_with_proj_ds) + self.assertEqual(res.status_code, status.HTTP_200_OK) + self.assertEqual(len(res.json()["results"]), 2) diff --git a/chord_metadata_service/restapi/__init__.py b/chord_metadata_service/restapi/__init__.py index b3a724850..e69de29bb 100644 --- a/chord_metadata_service/restapi/__init__.py +++ b/chord_metadata_service/restapi/__init__.py @@ -1,3 +0,0 @@ -from . import auth_schema - -__all__ = ['auth_schema'] diff --git a/chord_metadata_service/restapi/api_renderers.py b/chord_metadata_service/restapi/api_renderers.py index 0b4cd5faa..bc8fc83e4 100644 --- a/chord_metadata_service/restapi/api_renderers.py +++ b/chord_metadata_service/restapi/api_renderers.py @@ -6,6 +6,7 @@ from rdflib.plugin import register from rdflib.serializer import Serializer from django.http import HttpResponse +from rest_framework import status from rest_framework.renderers import JSONRenderer from djangorestframework_camel_case.render import CamelCaseJSONRenderer @@ -41,7 +42,7 @@ class PhenopacketsRenderer(CamelCaseJSONRenderer): format = 'phenopackets' def render(self, data, media_type=None, renderer_context=None): - return super(PhenopacketsRenderer, self).render(data, media_type, renderer_context) + return super().render(data, media_type, renderer_context) class JSONLDDatasetRenderer(PhenopacketsRenderer): @@ -54,7 +55,7 @@ def render(self, data, media_type=None, renderer_context=None): else: json_obj = dataset_to_jsonld(data) - return super(JSONLDDatasetRenderer, self).render(json_obj, media_type, renderer_context) + return super().render(json_obj, media_type, renderer_context) class RDFDatasetRenderer(PhenopacketsRenderer): @@ -79,13 +80,17 @@ def render(self, data, media_type=None, renderer_context=None): return rdf_data -def generate_csv_response(data, filename, columns): - headers = {key: key.replace('_', ' ').capitalize() for key in columns} - response = HttpResponse(content_type='text/csv') - response['Content-Disposition'] = f"attachment; filename='{filename}'" +def generate_csv_response(file_name: str, columns: list[str], data: list[dict]): + # remove underscore and capitalize column names + headers = {key: key.replace("_", " ").capitalize() for key in columns} + + response = HttpResponse(content_type="text/csv") + response["Content-Disposition"] = f"attachment; filename='{file_name}'" + dict_writer = csv.DictWriter(response, fieldnames=columns) dict_writer.writerow(headers) dict_writer.writerows(data) + return response @@ -104,104 +109,143 @@ def render_age(item: Dict[str, Any], time_key: str) -> Optional[str]: return None -class IndividualCSVRenderer(JSONRenderer): - media_type = 'text/csv' - format = 'csv' +class KatsuCSVRenderer(JSONRenderer): + media_type = "text/csv" + format = "csv" - def render(self, data, media_type=None, renderer_context=None): - if 'results' not in data or not data['results']: - return + file_name: str = "data.csv" + + def get_columns(self) -> list[str]: # pragma: no cover + raise NotImplementedError("get_columns() not implemented") + + def get_dicts(self, data, renderer_context) -> list[dict]: # pragma: no cover + raise NotImplementedError("get_dicts() not implemented") + + def render(self, data, accepted_media_type=None, renderer_context=None): + if not data: + return b"" + + if renderer_context and (res_status := renderer_context["response"].status_code) != status.HTTP_200_OK: + # error response as JSON instead of CSV + return HttpResponse( + json.dumps(data).encode("utf-8"), + status=res_status, + content_type="application/json; charset=utf-8", + ) + + return generate_csv_response(self.file_name, self.get_columns(), self.get_dicts(data, renderer_context)) + + +class IndividualCSVRenderer(KatsuCSVRenderer): + file_name = "individuals.csv" + + def get_columns(self) -> list[str]: + return ["id", "sex", "date_of_birth", "taxonomy", "karyotypic_sex", "age", "diseases", "created", "updated"] + def get_dicts(self, data, _renderer_context): individuals = [] - for individual in data['results']: + + for individual in data["results"]: ind_obj = { - 'id': individual['id'], - 'sex': individual.get('sex', None), - 'date_of_birth': individual.get('date_of_birth', None), - 'taxonomy': None, - 'karyotypic_sex': individual['karyotypic_sex'], - 'age': render_age(individual, 'time_at_last_encounter'), - 'diseases': None, - 'created': individual['created'], - 'updated': individual['updated'] + "id": individual["id"], + "sex": individual.get("sex", None), + "date_of_birth": individual.get("date_of_birth", None), + "taxonomy": individual.get("taxonomy", {}).get("label", None), + "karyotypic_sex": individual["karyotypic_sex"], + "age": render_age(individual, "time_at_last_encounter"), + "diseases": None, + "created": individual["created"], + "updated": individual["updated"] } - if 'taxonomy' in individual: - ind_obj['taxonomy'] = individual['taxonomy'].get('label', None) - if 'phenopackets' in individual: + if "phenopackets" in individual: all_diseases = [] - for phenopacket in individual['phenopackets']: - if 'diseases' in phenopacket: + for phenopacket in individual["phenopackets"]: + if "diseases" in phenopacket: # use ; because some disease terms might contain , in their label - single_phenopacket_diseases = '; '.join( + single_phenopacket_diseases = "; ".join( [ f"{d['term']['label']} ({parse_onset(d['onset'])})" - if 'onset' in d else d['term']['label'] for d in phenopacket['diseases'] + if "onset" in d else d["term"]["label"] for d in phenopacket["diseases"] ] ) all_diseases.append(single_phenopacket_diseases) if all_diseases: - ind_obj['diseases'] = '; '.join(all_diseases) + ind_obj["diseases"] = "; ".join(all_diseases) individuals.append(ind_obj) - columns = individuals[0].keys() - # remove underscore and capitalize column names - return generate_csv_response(individuals, 'data.csv', columns) - - -class BiosamplesCSVRenderer(JSONRenderer): - media_type = 'text/csv' - format = 'csv' - def render(self, data, media_type=None, renderer_context=None): - if not data: - return - - biosamples = [] - for biosample in data: - bio_obj = { - 'id': biosample['id'], - 'description': biosample.get('description', 'NA'), - 'sampled_tissue': biosample.get('sampled_tissue', {}).get('label', 'NA'), - 'time_of_collection': render_age(biosample, "time_of_collection"), - 'histological_diagnosis': biosample.get('histological_diagnosis', {}).get('label', 'NA'), - 'extra_properties': f"Material: {biosample.get('extra_properties', {}).get('material', 'NA')}", - 'created': biosample['created'], - 'updated': biosample['updated'], - 'individual': biosample['individual'] + return individuals + + +class BiosamplesCSVRenderer(KatsuCSVRenderer): + file_name = "biosamples.csv" + + def get_columns(self) -> list[str]: + return [ + "id", + "description", + "sampled_tissue", + "time_of_collection", + "histological_diagnosis", + "extra_properties", + "created", + "updated", + "individual", + ] + + def get_dicts(self, data, _renderer_context) -> list[dict]: + return [ + { + "id": biosample["id"], + "description": biosample.get("description", "NA"), + "sampled_tissue": biosample.get("sampled_tissue", {}).get("label", "NA"), + "time_of_collection": render_age(biosample, "time_of_collection"), + "histological_diagnosis": biosample.get("histological_diagnosis", {}).get("label", "NA"), + "extra_properties": f"Material: {biosample.get('extra_properties', {}).get('material', 'NA')}", + "created": biosample["created"], + "updated": biosample["updated"], + "individual": biosample["individual"] } - biosamples.append(bio_obj) - - columns = biosamples[0].keys() - return generate_csv_response(biosamples, 'biosamples.csv', columns) - - -class ExperimentCSVRenderer(JSONRenderer): - media_type = 'text/csv' - format = 'csv' - - def render(self, data, media_type=None, renderer_context=None): - if not data: - return - - experiments = [] - for experiment in data: - exp_obj = { - 'id': experiment.get('id'), - 'study_type': experiment.get('study_type'), - 'experiment_type': experiment.get('experiment_type', 'NA'), - 'molecule': experiment.get('molecule'), - 'library_strategy': experiment.get('library_strategy'), - 'library_source': experiment.get('library_source', 'NA'), - 'library_selection': experiment.get('library_selection'), - 'library_layout': experiment.get('library_layout'), - 'created': experiment.get('created'), - 'updated': experiment.get('updated'), - 'biosample': experiment.get('biosample'), - 'individual_id': experiment.get('biosample_individual', {}).get('id', 'NA'), + for biosample in data + ] + + +class ExperimentCSVRenderer(KatsuCSVRenderer): + file_name = "experiments.csv" + + def get_columns(self) -> list[str]: + return [ + "id", + "study_type", + "experiment_type", + "molecule", + "library_strategy", + "library_source", + "library_selection", + "library_layout", + "created", + "updated", + "biosample", + "individual_id", + ] + + def get_dicts(self, data, _renderer_context) -> list[dict]: + return [ + { + "id": experiment.get("id"), + "study_type": experiment.get("study_type"), + "experiment_type": experiment.get("experiment_type", "NA"), + "molecule": experiment.get("molecule"), + "library_strategy": experiment.get("library_strategy"), + "library_source": experiment.get("library_source", "NA"), + "library_selection": experiment.get("library_selection"), + "library_layout": experiment.get("library_layout"), + "created": experiment.get("created"), + "updated": experiment.get("updated"), + "biosample": experiment.get("biosample"), + "individual_id": experiment.get("biosample_individual", {}).get("id", "NA"), } - experiments.append(exp_obj) - - columns = experiments[0].keys() - return generate_csv_response(experiments, 'experiments.csv', columns) + for experiment in data + ] class IndividualBentoSearchRenderer(JSONRenderer): diff --git a/chord_metadata_service/restapi/api_views.py b/chord_metadata_service/restapi/api_views.py index 4ccda772e..22f9afcbb 100644 --- a/chord_metadata_service/restapi/api_views.py +++ b/chord_metadata_service/restapi/api_views.py @@ -1,18 +1,17 @@ import asyncio from adrf.decorators import api_view -from django.db.models import QuerySet -from drf_spectacular.utils import extend_schema, inline_serializer -from rest_framework import serializers +from bento_lib.responses import errors +from rest_framework import status from rest_framework.decorators import permission_classes from rest_framework.request import Request as DrfRequest from rest_framework.response import Response from chord_metadata_service.authz.helpers import get_data_type_query_permissions -from chord_metadata_service.authz.permissions import BentoAllowAny, OverrideOrSuperUserOnly -from chord_metadata_service.authz.types import DataTypeDiscoveryPermissions +from chord_metadata_service.authz.middleware import authz_middleware +from chord_metadata_service.authz.permissions import BentoAllowAny, BentoDeferToHandler from chord_metadata_service.chord.data_types import DATA_TYPE_PHENOPACKET, DATA_TYPE_EXPERIMENT -from chord_metadata_service.discovery.utils import ValidatedDiscoveryScope +from chord_metadata_service.discovery.scope import get_request_discovery_scope from chord_metadata_service.experiments import models as experiments_models from chord_metadata_service.experiments.summaries import dt_experiment_summary from chord_metadata_service.metadata.service_info import get_service_info @@ -35,54 +34,6 @@ async def service_info(_request: DrfRequest): return Response(await get_service_info()) -async def build_overview_response( - scope: ValidatedDiscoveryScope, - dt_permissions: DataTypeDiscoveryPermissions, - phenopackets: QuerySet | None = None, - experiments: QuerySet | None = None, -) -> Response: - phenopackets_summary, experiments_summary = await asyncio.gather( - dt_phenopacket_summary(scope, dt_permissions[DATA_TYPE_PHENOPACKET], phenopackets), - dt_experiment_summary(scope, dt_permissions[DATA_TYPE_EXPERIMENT], experiments), - ) - - return Response({ - DATA_TYPE_PHENOPACKET: phenopackets_summary, - DATA_TYPE_EXPERIMENT: experiments_summary, - }) - - -@extend_schema( - description="Overview of all Phenopackets in the database", - responses={ - 200: inline_serializer( - name='overview_response', - fields={ - 'phenopackets': serializers.IntegerField(), - 'data_type_specific': serializers.JSONField(), - } - ) - } -) -@api_view(["GET"]) -@permission_classes([OverrideOrSuperUserOnly]) -async def overview(request: DrfRequest): - """ - get: - Overview of all Phenopackets and experiments in the database - private endpoint - """ - - # TODO: permissions based on project - this endpoint should be scrapped / completely rethought - # use node level discovery config for private overview - discovery_scope = ValidatedDiscoveryScope(project=None, dataset=None) - - dt_permissions = await get_data_type_query_permissions( - request, [DATA_TYPE_PHENOPACKET, DATA_TYPE_EXPERIMENT], discovery_scope.as_authz_resource() - ) - - return await build_overview_response(discovery_scope, dt_permissions) - - @api_view(["GET"]) @permission_classes([BentoAllowAny]) def extra_properties_schema_types(_request: DrfRequest): @@ -95,22 +46,26 @@ def extra_properties_schema_types(_request: DrfRequest): @api_view(["GET", "POST"]) +@permission_classes([BentoDeferToHandler]) # careful here since it's private - mark authz done as we go. async def search_overview(request: DrfRequest): """ get+post: Overview statistics of a list of patients (associated with a search result) - Parameter - id: a list of patient ids + - project (optional), dataset (optional): scope for search overview """ - # TODO: this should be project / dataset-scoped and probably shouldn't even exist as-is - # use node level discovery config for private search overview - discovery_scope = ValidatedDiscoveryScope(project=None, dataset=None) + # TODO: this probably shouldn't even exist as-is + scope = await get_request_discovery_scope(request) individual_ids = request.GET.getlist("id") if request.method == "GET" else request.data.get("id", []) - phenopackets = pheno_models.Phenopacket.objects.all().filter(subject_id__in=individual_ids) - experiments = experiments_models.Experiment.objects.all().filter( - biosample_id__in=phenopackets.values_list("biosamples__id", flat=True)) + phenopackets = pheno_models.Phenopacket.get_model_scoped_queryset(scope).filter(subject_id__in=individual_ids) + experiments = ( + experiments_models.Experiment + .get_model_scoped_queryset(scope) + .filter(biosample_id__in=[b async for b in phenopackets.values_list("biosamples__id", flat=True)]) + ) # TODO: this hardcodes the biosample linked field set relationship # - in general, this endpoint is less than ideal and should be derived from search results themselves vs. this @@ -118,12 +73,21 @@ async def search_overview(request: DrfRequest): # TODO: resource should be tied to search dt_permissions = await get_data_type_query_permissions( - request, [DATA_TYPE_PHENOPACKET, DATA_TYPE_EXPERIMENT], discovery_scope.as_authz_resource() + request, [DATA_TYPE_PHENOPACKET, DATA_TYPE_EXPERIMENT], scope.as_authz_resource() ) - return await build_overview_response( - discovery_scope, - dt_permissions, - phenopackets=phenopackets, - experiments=experiments, + authz_middleware.mark_authz_done(request) + + if not dt_permissions[DATA_TYPE_PHENOPACKET]["data"]: + # If we don't have query:data on phenopackets, we cannot request a search overview + return Response(errors.forbidden_error("Forbidden"), status=status.HTTP_403_FORBIDDEN) + + phenopackets_summary, experiments_summary = await asyncio.gather( + dt_phenopacket_summary(scope, dt_permissions[DATA_TYPE_PHENOPACKET], phenopackets), + dt_experiment_summary(scope, dt_permissions[DATA_TYPE_EXPERIMENT], experiments), ) + + return Response({ + DATA_TYPE_PHENOPACKET: phenopackets_summary, + DATA_TYPE_EXPERIMENT: experiments_summary, + }) diff --git a/chord_metadata_service/restapi/auth_schema.py b/chord_metadata_service/restapi/auth_schema.py deleted file mode 100644 index c25509d2b..000000000 --- a/chord_metadata_service/restapi/auth_schema.py +++ /dev/null @@ -1,21 +0,0 @@ -from drf_spectacular.extensions import OpenApiAuthenticationExtension - - -class BentoRemoteUserAuthenticationScheme(OpenApiAuthenticationExtension): - """ - This class provides the custom authentication scheme for drf-spectacular - so we can test the API with authentication and unauthorized requests - """ - target_class = 'bento_lib.auth.django_remote_user.BentoRemoteUserAuthentication' - name = 'BentoRemoteUserAuthentication' - - # TODO: this is a temporaty fix for the issue with the authentication scheme - # not being picked up by drf-spectacular - # Once we figure out how to implement BentoRemoteUserAuthentication, we can - # fill this in - def get_security_definition(self, auto_schema): - return { - 'type': 'apiKey', - 'in': 'header', - 'name': 'api_key', - } diff --git a/chord_metadata_service/restapi/exception_handler.py b/chord_metadata_service/restapi/exception_handler.py new file mode 100644 index 000000000..d5482e451 --- /dev/null +++ b/chord_metadata_service/restapi/exception_handler.py @@ -0,0 +1,17 @@ +from rest_framework.views import exception_handler + +from chord_metadata_service.authz.middleware import authz_middleware +from chord_metadata_service.discovery.exceptions import DiscoveryScopeException + +__all__ = ["katsu_exception_handler"] + + +def katsu_exception_handler(exc, context): + # Start with default DRF exception handler + response = exception_handler(exc, context) + + if isinstance(exc, DiscoveryScopeException): + # Allow scope exception responses through the authz middleware (mark them as authorized) + authz_middleware.mark_authz_done(context["request"]) + + return response diff --git a/chord_metadata_service/restapi/tests/test_api.py b/chord_metadata_service/restapi/tests/test_api.py index 1d714a41a..5539311b6 100644 --- a/chord_metadata_service/restapi/tests/test_api.py +++ b/chord_metadata_service/restapi/tests/test_api.py @@ -73,49 +73,9 @@ def setUp(self) -> None: self.experiment_result = exp_m.ExperimentResult.objects.create(**exp_c.valid_experiment_result()) self.experiment.experiment_results.set([self.experiment_result]) - def test_overview(self): - response = self.dt_authz_full_get('/api/overview') - response_obj = response.json() - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIsInstance(response_obj, dict) - # phenopackets - phenopacket_res = response_obj['phenopacket'] - self.assertEqual(phenopacket_res['count'], 2) - self.assertEqual(phenopacket_res['data_type_specific']['individuals']['count'], 2) - self.assertIsInstance(phenopacket_res['data_type_specific']['individuals']['age'], dict) - self.assertEqual( - phenopacket_res['data_type_specific']['individuals']['age'], - {**{'40': 1, '30': 1}, **phenopacket_res['data_type_specific']['individuals']['age']}) - self.assertEqual(phenopacket_res['data_type_specific']['biosamples']['count'], 2) - self.assertEqual(phenopacket_res['data_type_specific']['phenotypic_features']['count'], 1) - self.assertEqual(phenopacket_res['data_type_specific']['diseases']['count'], 1) - # experiments - experiment_res = response_obj['experiment'] - self.assertEqual(experiment_res['count'], 2) - self.assertEqual( - experiment_res['data_type_specific']['experiments']['study_type']['Whole genome Sequencing'], 2) - self.assertEqual( - experiment_res['data_type_specific']['experiments']['experiment_type']['DNA Methylation'], 2 - ) - self.assertEqual(experiment_res['data_type_specific']['experiments']['molecule']['total RNA'], 2) - self.assertEqual(experiment_res['data_type_specific']['experiments']['library_strategy']['Bisulfite-Seq'], 2) - self.assertEqual(experiment_res['data_type_specific']['experiments']['library_source']['Genomic'], 2) - self.assertEqual(experiment_res['data_type_specific']['experiments']['library_selection']['PCR'], 2) - self.assertEqual(experiment_res['data_type_specific']['experiments']['library_layout']['Single'], 2) - self.assertEqual(experiment_res['data_type_specific']['experiments']['extraction_protocol']['NGS'], 2) - self.assertEqual(experiment_res['data_type_specific']['experiment_results']['count'], 1) - self.assertEqual(experiment_res['data_type_specific']['experiment_results']['file_format']['VCF'], 1) - self.assertEqual( - experiment_res['data_type_specific']['experiment_results']['data_output_type']['Derived data'], 1 - ) - self.assertEqual(experiment_res['data_type_specific']['experiment_results']['usage']['download'], 1) - self.assertEqual(experiment_res['data_type_specific']['instruments']['count'], 1) - self.assertEqual(experiment_res['data_type_specific']['instruments']['platform']['Illumina'], 2) - self.assertEqual(experiment_res['data_type_specific']['instruments']['model']['Illumina HiSeq 4000'], 2) - def test_search_overview(self): payload = json.dumps({'id': [ph_c.VALID_INDIVIDUAL_1['id']]}) - response = self.dt_authz_full_post(reverse('search-overview'), payload, content_type='application/json') + response = self.dt_authz_full_post(reverse('search-overview'), data=payload, content_type='application/json') response_obj = response.json() phenopacket_res = response_obj['phenopacket']['data_type_specific'] self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -124,3 +84,9 @@ def test_search_overview(self): self.assertIn('wall of urinary bladder', phenopacket_res['biosamples']['sampled_tissue']) self.assertIn('Proptosis', phenopacket_res['phenotypic_features']['type']) self.assertIn(ph_c.VALID_DISEASE_1['term']['label'], phenopacket_res['diseases']['term']) + + def test_search_overview_forbidden(self): + payload = json.dumps({'id': [ph_c.VALID_INDIVIDUAL_1['id']]}) + response = self.dt_authz_counts_post(reverse('search-overview'), data=payload, content_type='application/json') + # search overview should be forbidden with counts, since we have to be able to query by ID: + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) diff --git a/chord_metadata_service/restapi/tests/test_jsonld.py b/chord_metadata_service/restapi/tests/test_jsonld.py index cc28593cf..ba835d9cc 100644 --- a/chord_metadata_service/restapi/tests/test_jsonld.py +++ b/chord_metadata_service/restapi/tests/test_jsonld.py @@ -1,4 +1,3 @@ -import json from rest_framework import status from chord_metadata_service.chord.tests.constants import VALID_DATS_CREATORS, dats_dataset from chord_metadata_service.chord.tests.helpers import AuthzAPITestCaseWithProjectJSON @@ -9,7 +8,7 @@ def setUp(self) -> None: super().setUp() self.creators = VALID_DATS_CREATORS self.dataset = dats_dataset(self.project['identifier'], self.creators) - self.one_authz_post("/api/datasets", data=json.dumps(self.dataset)) + self.one_authz_post("/api/datasets", json=self.dataset) def test_jsonld(self): get_resp = self.client.get('/api/datasets?format=json-ld') diff --git a/chord_metadata_service/restapi/tests/utils.py b/chord_metadata_service/restapi/tests/utils.py index f1b82238d..aebd96f40 100644 --- a/chord_metadata_service/restapi/tests/utils.py +++ b/chord_metadata_service/restapi/tests/utils.py @@ -1,21 +1,10 @@ import json import inspect import os -from django.urls import reverse -from rest_framework.test import APIClient # Helper functions for tests -def get_post_response(viewname, obj): - """ Generic POST function. """ - client = APIClient() - return client.post( - reverse(viewname), - data=json.dumps(obj), - content_type='application/json' - ) - def load_local_json(file_name): """ diff --git a/chord_metadata_service/restapi/urls.py b/chord_metadata_service/restapi/urls.py index d2ceae9d8..977c638ce 100644 --- a/chord_metadata_service/restapi/urls.py +++ b/chord_metadata_service/restapi/urls.py @@ -12,7 +12,7 @@ from chord_metadata_service.patients import api_views as individual_views from chord_metadata_service.phenopackets import api_views as phenopacket_views from chord_metadata_service.resources import api_views as resources_views -from chord_metadata_service.restapi.api_views import overview, search_overview, extra_properties_schema_types +from chord_metadata_service.restapi.api_views import search_overview, extra_properties_schema_types from chord_metadata_service.restapi.routers import BatchListRouter __all__ = ["router", "batch_router", "urlpatterns"] @@ -22,11 +22,11 @@ # CHORD app urls router.register(r'projects', chord_views.ProjectViewSet) -router.register(r'datasets', chord_views.DatasetViewSet, basename="datasets") +router.register(r'datasets', chord_views.DatasetViewSet, basename="dataset") router.register(r'project_json_schemas', chord_views.ProjectJsonSchemaViewSet) # Experiments app urls -router.register(r'experiments', experiment_views.ExperimentViewSet) +router.register(r'experiments', experiment_views.ExperimentViewSet, basename="experiments") router.register(r'experimentresults', experiment_views.ExperimentResultViewSet, basename="experimentresults") router.register(r'batch/experiments', experiment_views.ExperimentBatchViewSet, basename="batch/experiments") @@ -42,7 +42,7 @@ router.register(r'phenopackets', phenopacket_views.PhenopacketViewSet, basename="phenopackets") # Resources app urls -router.register(r'resources', resources_views.ResourceViewSet) +router.register(r'resources', resources_views.ResourceViewSet, basename="resource") urlpatterns = [ path('', include(router.urls)), @@ -61,7 +61,6 @@ path('extra_properties_schema_types', extra_properties_schema_types, name="extra-properties-schema-types"), # overviews (statistics) - path('overview', overview, name="overview"), path('search_overview', search_overview, name="search-overview"), # public endpoints (no confidential information leak) diff --git a/docs/modules/overview_api.rst b/docs/modules/overview_api.rst deleted file mode 100644 index bd2c3df18..000000000 --- a/docs/modules/overview_api.rst +++ /dev/null @@ -1,8 +0,0 @@ -Overview API -============ - -:code:`api/overview` GET: returns an overview of all phenopackets, individuals and other related data types. -The overview includes counts for individuals, unique diseases, phenotypic features, experiments and other information. - -:code:`api/mcode_overview` GET: returns an overview of mcode-based data. -The overview includes counts for individuals, cancer conditions, cancer related procedures and cancer status. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 99661da77..4a5cbc28c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "adrf" -version = "0.1.8" +version = "0.1.9" description = "Async support for Django REST framework" optional = false python-versions = ">=3.8" files = [ - {file = "adrf-0.1.8-py3-none-any.whl", hash = "sha256:3032b987085d75cfd59eb3d4dcd7138fc20085de1782b065603559ccec69531f"}, - {file = "adrf-0.1.8.tar.gz", hash = "sha256:18844630dd9272c38cc3f761fce6bfb50f91c4f84dadf99846f86d4527f19c7f"}, + {file = "adrf-0.1.9-py3-none-any.whl", hash = "sha256:fd6c45df908e042c91571fdcff1ea54180c871ec18659b639cf3217d67ce97d5"}, + {file = "adrf-0.1.9.tar.gz", hash = "sha256:e2f59fd84960a564b0385d9201c55531a30c6118eb40c86c5356c077f279af23"}, ] [package.dependencies] @@ -40,87 +40,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.11.9" +version = "3.11.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" files = [ - {file = "aiohttp-3.11.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0411777249f25d11bd2964a230b3ffafcbed6cd65d0f2b132bc2b8f5b8c347c7"}, - {file = "aiohttp-3.11.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:499368eb904566fbdf1a3836a1532000ef1308f34a1bcbf36e6351904cced771"}, - {file = "aiohttp-3.11.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b5a5009b0159a8f707879dc102b139466d8ec6db05103ec1520394fdd8ea02c"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:176f8bb8931da0613bb0ed16326d01330066bb1e172dd97e1e02b1c27383277b"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6435a66957cdba1a0b16f368bde03ce9c79c57306b39510da6ae5312a1a5b2c1"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:202f40fb686e5f93908eee0c75d1e6fbe50a43e9bd4909bf3bf4a56b560ca180"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39625703540feb50b6b7f938b3856d1f4886d2e585d88274e62b1bd273fae09b"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6beeac698671baa558e82fa160be9761cf0eb25861943f4689ecf9000f8ebd0"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:96726839a42429318017e67a42cca75d4f0d5248a809b3cc2e125445edd7d50d"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3f5461c77649358610fb9694e790956b4238ac5d9e697a17f63619c096469afe"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4313f3bc901255b22f01663eeeae167468264fdae0d32c25fc631d5d6e15b502"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:d6e274661c74195708fc4380a4ef64298926c5a50bb10fbae3d01627d7a075b7"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db2914de2559809fdbcf3e48f41b17a493b58cb7988d3e211f6b63126c55fe82"}, - {file = "aiohttp-3.11.9-cp310-cp310-win32.whl", hash = "sha256:27935716f8d62c1c73010428db310fd10136002cfc6d52b0ba7bdfa752d26066"}, - {file = "aiohttp-3.11.9-cp310-cp310-win_amd64.whl", hash = "sha256:afbe85b50ade42ddff5669947afde9e8a610e64d2c80be046d67ec4368e555fa"}, - {file = "aiohttp-3.11.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:afcda759a69c6a8be3aae764ec6733155aa4a5ad9aad4f398b52ba4037942fe3"}, - {file = "aiohttp-3.11.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5bba6b83fde4ca233cfda04cbd4685ab88696b0c8eaf76f7148969eab5e248a"}, - {file = "aiohttp-3.11.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:442356e8924fe1a121f8c87866b0ecdc785757fd28924b17c20493961b3d6697"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f737fef6e117856400afee4f17774cdea392b28ecf058833f5eca368a18cf1bf"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea142255d4901b03f89cb6a94411ecec117786a76fc9ab043af8f51dd50b5313"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1e9e447856e9b7b3d38e1316ae9a8c92e7536ef48373de758ea055edfd5db5"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7f6173302f8a329ca5d1ee592af9e628d3ade87816e9958dcf7cdae2841def7"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c6147c6306f537cff59409609508a1d2eff81199f0302dd456bb9e7ea50c39"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e9d036a9a41fc78e8a3f10a86c2fc1098fca8fab8715ba9eb999ce4788d35df0"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2ac9fd83096df36728da8e2f4488ac3b5602238f602706606f3702f07a13a409"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d3108f0ad5c6b6d78eec5273219a5bbd884b4aacec17883ceefaac988850ce6e"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:96bbec47beb131bbf4bae05d8ef99ad9e5738f12717cfbbf16648b78b0232e87"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fc726c3fa8f606d07bd2b500e5dc4c0fd664c59be7788a16b9e34352c50b6b6b"}, - {file = "aiohttp-3.11.9-cp311-cp311-win32.whl", hash = "sha256:5720ebbc7a1b46c33a42d489d25d36c64c419f52159485e55589fbec648ea49a"}, - {file = "aiohttp-3.11.9-cp311-cp311-win_amd64.whl", hash = "sha256:17af09d963fa1acd7e4c280e9354aeafd9e3d47eaa4a6bfbd2171ad7da49f0c5"}, - {file = "aiohttp-3.11.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c1f2d7fd583fc79c240094b3e7237d88493814d4b300d013a42726c35a734bc9"}, - {file = "aiohttp-3.11.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4b8a1b6c7a68c73191f2ebd3bf66f7ce02f9c374e309bdb68ba886bbbf1b938"}, - {file = "aiohttp-3.11.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd3f711f4c99da0091ced41dccdc1bcf8be0281dc314d6d9c6b6cf5df66f37a9"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cb1a1326a0264480a789e6100dc3e07122eb8cd1ad6b784a3d47d13ed1d89c"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a7ddf981a0b953ade1c2379052d47ccda2f58ab678fca0671c7c7ca2f67aac2"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ffa45cc55b18d4ac1396d1ddb029f139b1d3480f1594130e62bceadf2e1a838"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cca505829cdab58c2495ff418c96092d225a1bbd486f79017f6de915580d3c44"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44d323aa80a867cb6db6bebb4bbec677c6478e38128847f2c6b0f70eae984d72"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b2fab23003c4bb2249729a7290a76c1dda38c438300fdf97d4e42bf78b19c810"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:be0c7c98e38a1e3ad7a6ff64af8b6d6db34bf5a41b1478e24c3c74d9e7f8ed42"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cc5e0d069c56645446c45a4b5010d4b33ac6c5ebfd369a791b5f097e46a3c08"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9bcf97b971289be69638d8b1b616f7e557e1342debc7fc86cf89d3f08960e411"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c7333e7239415076d1418dbfb7fa4df48f3a5b00f8fdf854fca549080455bc14"}, - {file = "aiohttp-3.11.9-cp312-cp312-win32.whl", hash = "sha256:9384b07cfd3045b37b05ed002d1c255db02fb96506ad65f0f9b776b762a7572e"}, - {file = "aiohttp-3.11.9-cp312-cp312-win_amd64.whl", hash = "sha256:f5252ba8b43906f206048fa569debf2cd0da0316e8d5b4d25abe53307f573941"}, - {file = "aiohttp-3.11.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:282e0a7ddd36ebc411f156aeaa0491e8fe7f030e2a95da532cf0c84b0b70bc66"}, - {file = "aiohttp-3.11.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ebd3e6b0c7d4954cca59d241970011f8d3327633d555051c430bd09ff49dc494"}, - {file = "aiohttp-3.11.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30f9f89ae625d412043f12ca3771b2ccec227cc93b93bb1f994db6e1af40a7d3"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a3b5b2c012d70c63d9d13c57ed1603709a4d9d7d473e4a9dfece0e4ea3d5f51"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ef1550bb5f55f71b97a6a395286db07f7f2c01c8890e613556df9a51da91e8d"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317251b9c9a2f1a9ff9cd093775b34c6861d1d7df9439ce3d32a88c275c995cd"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cbe97839b009826a61b143d3ca4964c8590d7aed33d6118125e5b71691ca46"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:618b18c3a2360ac940a5503da14fa4f880c5b9bc315ec20a830357bcc62e6bae"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0cf4d814689e58f57ecd5d8c523e6538417ca2e72ff52c007c64065cef50fb2"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:15c4e489942d987d5dac0ba39e5772dcbed4cc9ae3710d1025d5ba95e4a5349c"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ec8df0ff5a911c6d21957a9182402aad7bf060eaeffd77c9ea1c16aecab5adbf"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ed95d66745f53e129e935ad726167d3a6cb18c5d33df3165974d54742c373868"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:647ec5bee7e4ec9f1034ab48173b5fa970d9a991e565549b965e93331f1328fe"}, - {file = "aiohttp-3.11.9-cp313-cp313-win32.whl", hash = "sha256:ef2c9499b7bd1e24e473dc1a85de55d72fd084eea3d8bdeec7ee0720decb54fa"}, - {file = "aiohttp-3.11.9-cp313-cp313-win_amd64.whl", hash = "sha256:84de955314aa5e8d469b00b14d6d714b008087a0222b0f743e7ffac34ef56aff"}, - {file = "aiohttp-3.11.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e738aabff3586091221044b7a584865ddc4d6120346d12e28e788307cd731043"}, - {file = "aiohttp-3.11.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28f29bce89c3b401a53d6fd4bee401ee943083bf2bdc12ef297c1d63155070b0"}, - {file = "aiohttp-3.11.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31de2f10f63f96cc19e04bd2df9549559beadd0b2ee2da24a17e7ed877ca8c60"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f31cebd8c27a36af6c7346055ac564946e562080ee1a838da724585c67474f"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bcb7f6976dc0b6b56efde13294862adf68dd48854111b422a336fa729a82ea6"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8b13b9950d8b2f8f58b6e5842c4b842b5887e2c32e3f4644d6642f1659a530"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c23e62f3545c2216100603614f9e019e41b9403c47dd85b8e7e5015bf1bde0"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec656680fc53a13f849c71afd0c84a55c536206d524cbc831cde80abbe80489e"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:36df00e0541f264ce42d62280281541a47474dfda500bc5b7f24f70a7f87be7a"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8dcfd14c712aa9dd18049280bfb2f95700ff6a8bde645e09f17c3ed3f05a0130"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14624d96f0d69cf451deed3173079a68c322279be6030208b045ab77e1e8d550"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4b01d9cfcb616eeb6d40f02e66bebfe7b06d9f2ef81641fdd50b8dd981166e0b"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:928f92f80e2e8d6567b87d3316c1fd9860ccfe36e87a9a7f5237d4cda8baa1ba"}, - {file = "aiohttp-3.11.9-cp39-cp39-win32.whl", hash = "sha256:c8a02f74ae419e3955af60f570d83187423e42e672a6433c5e292f1d23619269"}, - {file = "aiohttp-3.11.9-cp39-cp39-win_amd64.whl", hash = "sha256:0a97d657f6cf8782a830bb476c13f7d777cfcab8428ac49dde15c22babceb361"}, - {file = "aiohttp-3.11.9.tar.gz", hash = "sha256:a9266644064779840feec0e34f10a89b3ff1d2d6b751fe90017abcad1864fa7c"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c"}, + {file = "aiohttp-3.11.11-cp310-cp310-win32.whl", hash = "sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745"}, + {file = "aiohttp-3.11.11-cp310-cp310-win_amd64.whl", hash = "sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773"}, + {file = "aiohttp-3.11.11-cp311-cp311-win32.whl", hash = "sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62"}, + {file = "aiohttp-3.11.11-cp311-cp311-win_amd64.whl", hash = "sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e"}, + {file = "aiohttp-3.11.11-cp312-cp312-win32.whl", hash = "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600"}, + {file = "aiohttp-3.11.11-cp312-cp312-win_amd64.whl", hash = "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5"}, + {file = "aiohttp-3.11.11-cp313-cp313-win32.whl", hash = "sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d"}, + {file = "aiohttp-3.11.11-cp313-cp313-win_amd64.whl", hash = "sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3e23419d832d969f659c208557de4a123e30a10d26e1e14b73431d3c13444c2e"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21fef42317cf02e05d3b09c028712e1d73a9606f02467fd803f7c1f39cc59add"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1f21bb8d0235fc10c09ce1d11ffbd40fc50d3f08a89e4cf3a0c503dc2562247a"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1642eceeaa5ab6c9b6dfeaaa626ae314d808188ab23ae196a34c9d97efb68350"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2170816e34e10f2fd120f603e951630f8a112e1be3b60963a1f159f5699059a6"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8be8508d110d93061197fd2d6a74f7401f73b6d12f8822bbcd6d74f2b55d71b1"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eed954b161e6b9b65f6be446ed448ed3921763cc432053ceb606f89d793927e"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6c9af134da4bc9b3bd3e6a70072509f295d10ee60c697826225b60b9959acdd"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44167fc6a763d534a6908bdb2592269b4bf30a03239bcb1654781adf5e49caf1"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:479b8c6ebd12aedfe64563b85920525d05d394b85f166b7873c8bde6da612f9c"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:10b4ff0ad793d98605958089fabfa350e8e62bd5d40aa65cdc69d6785859f94e"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b540bd67cfb54e6f0865ceccd9979687210d7ed1a1cc8c01f8e67e2f1e883d28"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dac54e8ce2ed83b1f6b1a54005c87dfed139cf3f777fdc8afc76e7841101226"}, + {file = "aiohttp-3.11.11-cp39-cp39-win32.whl", hash = "sha256:568c1236b2fde93b7720f95a890741854c1200fba4a3471ff48b2934d2d93fd3"}, + {file = "aiohttp-3.11.11-cp39-cp39-win_amd64.whl", hash = "sha256:943a8b052e54dfd6439fd7989f67fc6a7f2138d0a2cf0a7de5f18aa4fe7eb3b1"}, + {file = "aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e"}, ] [package.dependencies] @@ -153,13 +153,13 @@ packaging = ">=22.0" [[package]] name = "aiosignal" -version = "1.3.1" +version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [package.dependencies] @@ -236,19 +236,19 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -339,13 +339,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -440,116 +440,103 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] @@ -576,73 +563,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.8" +version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, - {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, - {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, - {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, - {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, - {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, - {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, - {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, - {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, - {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, - {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, - {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, - {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, - {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, - {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, - {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, ] [package.extras] @@ -661,7 +648,6 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -672,7 +658,6 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -720,37 +705,37 @@ tests = ["django", "hypothesis", "pytest", "pytest-asyncio"] [[package]] name = "debugpy" -version = "1.8.9" +version = "1.8.11" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.9-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:cfe1e6c6ad7178265f74981edf1154ffce97b69005212fbc90ca22ddfe3d017e"}, - {file = "debugpy-1.8.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada7fb65102a4d2c9ab62e8908e9e9f12aed9d76ef44880367bc9308ebe49a0f"}, - {file = "debugpy-1.8.9-cp310-cp310-win32.whl", hash = "sha256:c36856343cbaa448171cba62a721531e10e7ffb0abff838004701454149bc037"}, - {file = "debugpy-1.8.9-cp310-cp310-win_amd64.whl", hash = "sha256:17c5e0297678442511cf00a745c9709e928ea4ca263d764e90d233208889a19e"}, - {file = "debugpy-1.8.9-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:b74a49753e21e33e7cf030883a92fa607bddc4ede1aa4145172debc637780040"}, - {file = "debugpy-1.8.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d22dacdb0e296966d7d74a7141aaab4bec123fa43d1a35ddcb39bf9fd29d70"}, - {file = "debugpy-1.8.9-cp311-cp311-win32.whl", hash = "sha256:8138efff315cd09b8dcd14226a21afda4ca582284bf4215126d87342bba1cc66"}, - {file = "debugpy-1.8.9-cp311-cp311-win_amd64.whl", hash = "sha256:ff54ef77ad9f5c425398efb150239f6fe8e20c53ae2f68367eba7ece1e96226d"}, - {file = "debugpy-1.8.9-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:957363d9a7a6612a37458d9a15e72d03a635047f946e5fceee74b50d52a9c8e2"}, - {file = "debugpy-1.8.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e565fc54b680292b418bb809f1386f17081d1346dca9a871bf69a8ac4071afe"}, - {file = "debugpy-1.8.9-cp312-cp312-win32.whl", hash = "sha256:3e59842d6c4569c65ceb3751075ff8d7e6a6ada209ceca6308c9bde932bcef11"}, - {file = "debugpy-1.8.9-cp312-cp312-win_amd64.whl", hash = "sha256:66eeae42f3137eb428ea3a86d4a55f28da9bd5a4a3d369ba95ecc3a92c1bba53"}, - {file = "debugpy-1.8.9-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:957ecffff80d47cafa9b6545de9e016ae8c9547c98a538ee96ab5947115fb3dd"}, - {file = "debugpy-1.8.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1efbb3ff61487e2c16b3e033bc8595aea578222c08aaf3c4bf0f93fadbd662ee"}, - {file = "debugpy-1.8.9-cp313-cp313-win32.whl", hash = "sha256:7c4d65d03bee875bcb211c76c1d8f10f600c305dbd734beaed4077e902606fee"}, - {file = "debugpy-1.8.9-cp313-cp313-win_amd64.whl", hash = "sha256:e46b420dc1bea64e5bbedd678148be512442bc589b0111bd799367cde051e71a"}, - {file = "debugpy-1.8.9-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:472a3994999fe6c0756945ffa359e9e7e2d690fb55d251639d07208dbc37caea"}, - {file = "debugpy-1.8.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365e556a4772d7d0d151d7eb0e77ec4db03bcd95f26b67b15742b88cacff88e9"}, - {file = "debugpy-1.8.9-cp38-cp38-win32.whl", hash = "sha256:54a7e6d3014c408eb37b0b06021366ee985f1539e12fe49ca2ee0d392d9ceca5"}, - {file = "debugpy-1.8.9-cp38-cp38-win_amd64.whl", hash = "sha256:8e99c0b1cc7bf86d83fb95d5ccdc4ad0586d4432d489d1f54e4055bcc795f693"}, - {file = "debugpy-1.8.9-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:7e8b079323a56f719977fde9d8115590cb5e7a1cba2fcee0986ef8817116e7c1"}, - {file = "debugpy-1.8.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6953b335b804a41f16a192fa2e7851bdcfd92173cbb2f9f777bb934f49baab65"}, - {file = "debugpy-1.8.9-cp39-cp39-win32.whl", hash = "sha256:7e646e62d4602bb8956db88b1e72fe63172148c1e25c041e03b103a25f36673c"}, - {file = "debugpy-1.8.9-cp39-cp39-win_amd64.whl", hash = "sha256:3d9755e77a2d680ce3d2c5394a444cf42be4a592caaf246dbfbdd100ffcf7ae5"}, - {file = "debugpy-1.8.9-py2.py3-none-any.whl", hash = "sha256:cc37a6c9987ad743d9c3a14fa1b1a14b7e4e6041f9dd0c8abf8895fe7a97b899"}, - {file = "debugpy-1.8.9.zip", hash = "sha256:1339e14c7d980407248f09824d1b25ff5c5616651689f1e0f0e51bdead3ea13e"}, + {file = "debugpy-1.8.11-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:2b26fefc4e31ff85593d68b9022e35e8925714a10ab4858fb1b577a8a48cb8cd"}, + {file = "debugpy-1.8.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61bc8b3b265e6949855300e84dc93d02d7a3a637f2aec6d382afd4ceb9120c9f"}, + {file = "debugpy-1.8.11-cp310-cp310-win32.whl", hash = "sha256:c928bbf47f65288574b78518449edaa46c82572d340e2750889bbf8cd92f3737"}, + {file = "debugpy-1.8.11-cp310-cp310-win_amd64.whl", hash = "sha256:8da1db4ca4f22583e834dcabdc7832e56fe16275253ee53ba66627b86e304da1"}, + {file = "debugpy-1.8.11-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:85de8474ad53ad546ff1c7c7c89230db215b9b8a02754d41cb5a76f70d0be296"}, + {file = "debugpy-1.8.11-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ffc382e4afa4aee367bf413f55ed17bd91b191dcaf979890af239dda435f2a1"}, + {file = "debugpy-1.8.11-cp311-cp311-win32.whl", hash = "sha256:40499a9979c55f72f4eb2fc38695419546b62594f8af194b879d2a18439c97a9"}, + {file = "debugpy-1.8.11-cp311-cp311-win_amd64.whl", hash = "sha256:987bce16e86efa86f747d5151c54e91b3c1e36acc03ce1ddb50f9d09d16ded0e"}, + {file = "debugpy-1.8.11-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:84e511a7545d11683d32cdb8f809ef63fc17ea2a00455cc62d0a4dbb4ed1c308"}, + {file = "debugpy-1.8.11-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce291a5aca4985d82875d6779f61375e959208cdf09fcec40001e65fb0a54768"}, + {file = "debugpy-1.8.11-cp312-cp312-win32.whl", hash = "sha256:28e45b3f827d3bf2592f3cf7ae63282e859f3259db44ed2b129093ca0ac7940b"}, + {file = "debugpy-1.8.11-cp312-cp312-win_amd64.whl", hash = "sha256:44b1b8e6253bceada11f714acf4309ffb98bfa9ac55e4fce14f9e5d4484287a1"}, + {file = "debugpy-1.8.11-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:8988f7163e4381b0da7696f37eec7aca19deb02e500245df68a7159739bbd0d3"}, + {file = "debugpy-1.8.11-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c1f6a173d1140e557347419767d2b14ac1c9cd847e0b4c5444c7f3144697e4e"}, + {file = "debugpy-1.8.11-cp313-cp313-win32.whl", hash = "sha256:bb3b15e25891f38da3ca0740271e63ab9db61f41d4d8541745cfc1824252cb28"}, + {file = "debugpy-1.8.11-cp313-cp313-win_amd64.whl", hash = "sha256:d8768edcbeb34da9e11bcb8b5c2e0958d25218df7a6e56adf415ef262cd7b6d1"}, + {file = "debugpy-1.8.11-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:ad7efe588c8f5cf940f40c3de0cd683cc5b76819446abaa50dc0829a30c094db"}, + {file = "debugpy-1.8.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:189058d03a40103a57144752652b3ab08ff02b7595d0ce1f651b9acc3a3a35a0"}, + {file = "debugpy-1.8.11-cp38-cp38-win32.whl", hash = "sha256:32db46ba45849daed7ccf3f2e26f7a386867b077f39b2a974bb5c4c2c3b0a280"}, + {file = "debugpy-1.8.11-cp38-cp38-win_amd64.whl", hash = "sha256:116bf8342062246ca749013df4f6ea106f23bc159305843491f64672a55af2e5"}, + {file = "debugpy-1.8.11-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:654130ca6ad5de73d978057eaf9e582244ff72d4574b3e106fb8d3d2a0d32458"}, + {file = "debugpy-1.8.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23dc34c5e03b0212fa3c49a874df2b8b1b8fda95160bd79c01eb3ab51ea8d851"}, + {file = "debugpy-1.8.11-cp39-cp39-win32.whl", hash = "sha256:52d8a3166c9f2815bfae05f386114b0b2d274456980d41f320299a8d9a5615a7"}, + {file = "debugpy-1.8.11-cp39-cp39-win_amd64.whl", hash = "sha256:52c3cf9ecda273a19cc092961ee34eb9ba8687d67ba34cc7b79a521c1c64c4c0"}, + {file = "debugpy-1.8.11-py2.py3-none-any.whl", hash = "sha256:0e22f846f4211383e6a416d04b4c13ed174d24cc5d43f5fd52e7821d0ebc8920"}, + {file = "debugpy-1.8.11.tar.gz", hash = "sha256:6ad2688b69235c43b020e04fecccdf6a96c8943ca9c2fb340b8adc103c655e57"}, ] [[package]] @@ -766,13 +751,13 @@ files = [ [[package]] name = "django" -version = "5.0.9" +version = "5.0.11" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ - {file = "Django-5.0.9-py3-none-any.whl", hash = "sha256:f219576ba53be4e83f485130a7283f0efde06a9f2e3a7c3c5180327549f078fa"}, - {file = "Django-5.0.9.tar.gz", hash = "sha256:6333870d342329b60174da3a60dbd302e533f3b0bb0971516750e974a99b5a39"}, + {file = "Django-5.0.11-py3-none-any.whl", hash = "sha256:09e8128f717266bf382d82ffa4933f13da05d82579abf008ede86acb15dec88b"}, + {file = "Django-5.0.11.tar.gz", hash = "sha256:e7d98fa05ce09cb3e8d5ad6472fb602322acd1740bfdadc29c8404182d664f65"}, ] [package.dependencies] @@ -1534,6 +1519,7 @@ files = [ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, @@ -1605,18 +1591,18 @@ files = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.10.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, + {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, + {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] @@ -1625,111 +1611,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -1737,13 +1723,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.6.1" +version = "2.7.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"}, - {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"}, + {file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"}, + {file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"}, ] [package.dependencies] @@ -1799,17 +1785,18 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyopenssl" -version = "24.3.0" +version = "25.0.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, - {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, + {file = "pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90"}, + {file = "pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16"}, ] [package.dependencies] cryptography = ">=41.0.5,<45" +typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""} [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] @@ -1817,13 +1804,13 @@ test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pyparsing" -version = "3.2.0" +version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" files = [ - {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, - {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, ] [package.extras] @@ -1961,13 +1948,13 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "redis" -version = "5.2.0" +version = "5.2.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" files = [ - {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, - {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [package.dependencies] @@ -2040,101 +2027,114 @@ files = [ [[package]] name = "rpds-py" -version = "0.21.0" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, - {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, - {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, - {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, - {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, - {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, - {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, - {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, - {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, - {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, - {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, - {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, - {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -2163,44 +2163,44 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "75.6.0" +version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sqlparse" -version = "0.5.2" +version = "0.5.3" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" files = [ - {file = "sqlparse-0.5.2-py3-none-any.whl", hash = "sha256:e99bc85c78160918c3e1d9230834ab8d80fc06c59d03f8db2618f65f65dda55e"}, - {file = "sqlparse-0.5.2.tar.gz", hash = "sha256:9e37b35e16d1cc652a2545f0997c1deb23ea28fa1f3eefe609eee3063c3b105f"}, + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, ] [package.extras] @@ -2345,13 +2345,13 @@ twisted = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] [[package]] name = "types-python-dateutil" -version = "2.9.0.20241003" +version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, - {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] [[package]] @@ -2403,13 +2403,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] @@ -2420,13 +2420,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.28.0" +version = "20.28.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, - {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, + {file = "virtualenv-20.28.1-py3-none-any.whl", hash = "sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb"}, + {file = "virtualenv-20.28.1.tar.gz", hash = "sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329"}, ] [package.dependencies]