Skip to content

Commit

Permalink
pre-commit fixes: manual changes that ruff highlighted
Browse files Browse the repository at this point in the history
* datetime without tzinfo.
* combining if-statements.
* statements that seem to have no effect
* file open() without close()
* mark_safe() usage
* models without __str__()
  • Loading branch information
vdboor committed Jul 15, 2024
1 parent ad90532 commit 03c6119
Show file tree
Hide file tree
Showing 26 changed files with 103 additions and 84 deletions.
2 changes: 1 addition & 1 deletion dev-docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class GDALMockModule(_MockModule):
# -- Project information -----------------------------------------------------

project = "Amsterdam DSO-API"
copyright = f"{date.today().year}, Gemeente Amsterdam"
copyright = f"{date.today().year}, Gemeente Amsterdam" # noqa: DTZ011
author = (
"Team Datadiensten van het Dataplatform"
" onder de Directie Digitale Voorzieningen, Gemeente Amsterdam"
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,5 @@ max-complexity = 10
[tool.ruff.lint.per-file-ignores]
"**/migrations/*.py" = ["E501"] # line too long
"docs/_ext/djangodummy/settings.py" = ["S105"] # allow hardcoded SECRET_KEY
"src/tests/**/*.py" = ["S101", "S105", "S106", "S314", "S320"] # allow asserts, hardcoded passwords, lxml parsing
"src/tests/settings.py" = ["F405"] # allow unknown variables via import from *
"src/tests/**/*.py" = ["DJ008", "S101", "S105", "S106", "S314", "S320", "S608"] # allow asserts, hardcoded passwords, lxml parsing, SQL injection
2 changes: 1 addition & 1 deletion src/dso_api/dynamic_api/filters/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from . import lookups # noqa (import is needed for registration)
from . import lookups # noqa: F401 (needed for registration)
from .backends import DynamicFilterBackend, DynamicOrderingFilter
from .parser import FilterInput

Expand Down
5 changes: 2 additions & 3 deletions src/dso_api/dynamic_api/filters/backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,10 @@ class View(GenericAPIView):
ordering_param = "_sort" # Enforce DSO-specific name.

def get_ordering(self, request, queryset, view):
if self.ordering_param not in request.query_params:
if self.ordering_param not in request.query_params and "sorteer" in request.query_params:
# Allow DSO 1.0 Dutch "sorteer" parameter
# Can adjust 'self' as this instance is recreated each request.
if "sorteer" in request.query_params:
self.ordering_param = "sorteer"
self.ordering_param = "sorteer"

ordering = super().get_ordering(request, queryset, view)
if ordering is None:
Expand Down
6 changes: 4 additions & 2 deletions src/dso_api/dynamic_api/filters/values.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ def str2isodate(value: str) -> date | datetime | None:
# for something that has no time at inputs, allowing to check against a complete day
# instead of exactly midnight.
try:
return datetime.strptime(value, "%Y-%m-%d").date()
# while this uses strptime(), it only takes the date, which has no tzinfo
return datetime.strptime(value, "%Y-%m-%d").date() # noqa: DTZ007
except ValueError:
pass

Expand All @@ -61,7 +62,8 @@ def str2time(value: str) -> time:
"""Parse a HH:MM:SS or HH:MM time formatted string."""
for format in ("%H:%M:%S", "%H:%M", "%H:%M:%S.%f"):
try:
return datetime.strptime(value, format).time()
# while this uses strptime(), it only takes the time, so no tzinfo neeed.
return datetime.strptime(value, format).time() # noqa: DTZ007
except ValueError:
pass

Expand Down
9 changes: 3 additions & 6 deletions src/dso_api/dynamic_api/remote/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,10 @@ def _get_http_error(response: HTTPResponse) -> APIException:
# This translates some errors into a 502 "Bad Gateway" or 503 "Gateway Timeout"
# error to reflect the fact that this API is calling another service as backend.

# Consider the actual JSON response here,
# unless the request hit the completely wrong page (it got an HTML page).
content_type = response.headers.get("content-type", "")
if content_type.startswith("text/html"):
# HTML error, probably hit the completely wrong page.
detail_message = None
else:
# Consider the actual JSON response to be relevant here.
detail_message = response.data.decode()
detail_message = response.data.decode() if not content_type.startswith("text/html") else None

if response.status == 400: # "bad request"
if content_type == "application/problem+json":
Expand Down
2 changes: 1 addition & 1 deletion src/dso_api/dynamic_api/routers.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def _build_db_models(self, db_datasets: Iterable[Dataset]) -> list[type[DynamicM
# Because dataset are related, we need to 'prewarm'
# the datasets cache (in schematools)
for dataset in db_datasets:
dataset.schema
dataset.schema # noqa: B018 (load data early)

for dataset in db_datasets: # type: Dataset
dataset_id = dataset.schema.id # not dataset.name which is mangled.
Expand Down
25 changes: 13 additions & 12 deletions src/dso_api/dynamic_api/serializers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,18 +437,19 @@ def get_fields(self):
# Remove fields from the _links field too. This is not done in the serializer
# itself as that creates a cross-dependency between the parent/child.fields property.
links_field = fields.get("_links")
if links_field is not None and isinstance(links_field, DynamicLinksSerializer):
if self.fields_to_display.reduced():
# The 'invalid_fields' is not checked against here, as that already happened
# for the top-level fields reduction.
main_and_links_fields = self.get_valid_field_names(fields)
fields_to_keep, _ = self.fields_to_display.get_allow_list(main_and_links_fields)
fields_to_keep.update(links_field.fields_always_included)
links_field.fields = {
name: field
for name, field in links_field.fields.items()
if name in fields_to_keep
}
if (
links_field is not None
and isinstance(links_field, DynamicLinksSerializer)
and self.fields_to_display.reduced()
):
# The 'invalid_fields' is not checked against here, as that already happened
# for the top-level fields reduction.
main_and_links_fields = self.get_valid_field_names(fields)
fields_to_keep, _ = self.fields_to_display.get_allow_list(main_and_links_fields)
fields_to_keep.update(links_field.fields_always_included)
links_field.fields = {
name: field for name, field in links_field.fields.items() if name in fields_to_keep
}

return fields

Expand Down
5 changes: 2 additions & 3 deletions src/dso_api/dynamic_api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,9 +180,8 @@ def has_field_access(user_scopes: UserScopes, field: DatasetFieldSchema) -> bool
table_access = related and user_scopes.has_table_fields_access(related_table)
field_access = user_scopes.has_field_access(field)

if related and table_access and field_access:
return True
return bool(not related and field_access)
# Related fields need table access, others only need field access.
return (related and table_access and field_access) or (not related and field_access)


def limit_queryset_for_scopes(
Expand Down
6 changes: 3 additions & 3 deletions src/dso_api/dynamic_api/views/doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ class LookupContext(NamedTuple):

def lookup_context(op, example, descr):
# disable mark_safe() warnings because this is static HTML in this very file.
return LookupContext(op, mark_safe(example), mark_safe(descr)) # nosec B308 B703
return LookupContext(op, mark_safe(example), mark_safe(descr)) # noqa: B308, B703, S308


# This should match ALLOWED_SCALAR_LOOKUPS in filters.parser (except for the "exact" lookup).
Expand Down Expand Up @@ -321,7 +321,7 @@ def _table_context(ds: Dataset, table: DatasetTableSchema):
exports.append(export_info)

if (temporal := table.temporal) is not None:
for name, fields in temporal.dimensions.items():
for name in temporal.dimensions:
filters.append(
{
"name": name,
Expand Down Expand Up @@ -546,7 +546,7 @@ def _filter_payload(
"name": name,
"type": type.capitalize(),
"is_deprecated": is_deprecated,
"value_example": mark_safe(value_example or ""), # nosec B308 B703 (is static HTML)
"value_example": mark_safe(value_example or ""), # noqa: B308, B703, S308 (is static HTML)
"lookups": [LOOKUP_CONTEXT[op] for op in lookups],
"auth": _fix_auth(field.auth | field.table.auth | field.table.dataset.auth),
}
Expand Down
2 changes: 1 addition & 1 deletion src/dso_api/dynamic_api/views/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def get(self, request, *args, **kwargs):
# Due to too many of these issues, avoid breaking the whole index listing for this.
# Plus, having the front page give a 500 error is not that nice.
logging.exception(
f"Internal URL resolving is broken for schema {ds.schema.id}: {e}"
"Internal URL resolving is broken for schema {%s}: {%s}", ds.schema.id, str(e)
)
env = []
rel = []
Expand Down
2 changes: 1 addition & 1 deletion src/rest_framework_dso/embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def embedded_serializer(self) -> serializers.Serializer:
child = (
serializer.child if isinstance(serializer, serializers.ListSerializer) else serializer
)
child.fields # noqa: perform early checks
child.fields # noqa: B018, perform early checks

# Allow the output format to customize the serializer for the embedded relation.
renderer = self.serializer.context["request"].accepted_renderer
Expand Down
23 changes: 11 additions & 12 deletions src/rest_framework_dso/openapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,18 +285,17 @@ def _map_serializer_field(self, field, direction, bypass_extensions=False):
This method transforms the serializer field into a OpenAPI definition.
We've overwritten this to change the geometry field representation.
"""
if not hasattr(field, "_spectacular_annotation"):
# Fix support for missing field types:
if isinstance(field, GeometryField):
# Not using `rest_framework_gis.schema.GeoFeatureAutoSchema` here,
# as it duplicates components instead of using $ref.
if field.parent and hasattr(field.parent.Meta, "model"):
# model_field.geom_type is uppercase
model_field = field.parent.Meta.model._meta.get_field(field.source)
geojson_type = GEOM_TYPES_TO_GEOJSON.get(model_field.geom_type, "Geometry")
else:
geojson_type = "Geometry"
return {"$ref": f"#/components/schemas/{geojson_type}"}
# Fix support for missing field types:
if not hasattr(field, "_spectacular_annotation") and isinstance(field, GeometryField):
# Not using `rest_framework_gis.schema.GeoFeatureAutoSchema` here,
# as it duplicates components instead of using $ref.
if field.parent and hasattr(field.parent.Meta, "model"):
# model_field.geom_type is uppercase
model_field = field.parent.Meta.model._meta.get_field(field.source)
geojson_type = GEOM_TYPES_TO_GEOJSON.get(model_field.geom_type, "Geometry")
else:
geojson_type = "Geometry"
return {"$ref": f"#/components/schemas/{geojson_type}"}

return super()._map_serializer_field(field, direction, bypass_extensions=bypass_extensions)

Expand Down
8 changes: 5 additions & 3 deletions src/rest_framework_dso/pagination.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,14 @@ def paginate_queryset(self, queryset, request, view=None):

def get_page_size(self, request):
"""Allow the ``page_size`` parameter was fallback."""
if self.page_size_query_param not in request.query_params:
if (
self.page_size_query_param not in request.query_params
and "page_size" in request.query_params
):
# Allow our classic rest "page_size" setting that we leaked into
# the public API to be used as fallback. This only affects the
# current request (attribute is set on self, not the class).
if "page_size" in request.query_params:
self.page_size_query_param = "page_size"
self.page_size_query_param = "page_size"

return super().get_page_size(request)

Expand Down
1 change: 1 addition & 0 deletions src/rest_framework_dso/paginator.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def __init__(self, object_list, per_page, orphans=0, allow_empty_first_page=True
"DSOPaginator instantiated with non-zero value in orphans. \
Orphans are not supported by this class and will be ignored.",
RuntimeWarning,
stacklevel=2,
)
super().__init__(object_list, per_page, 0, allow_empty_first_page)

Expand Down
10 changes: 4 additions & 6 deletions src/rest_framework_dso/renderers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import orjson
from django.conf import settings
from django.urls import reverse
from django.utils.timezone import get_current_timezone
from rest_framework import renderers
from rest_framework.relations import HyperlinkedRelatedField
from rest_framework.serializers import ListSerializer, Serializer, SerializerMethodField
Expand Down Expand Up @@ -91,7 +92,7 @@ def finalize_response(self, response, renderer_context: dict):
def get_http_headers(self, renderer_context: dict):
"""Return the http headers for the response."""
if self.content_disposition:
now = datetime.now().isoformat()
now = datetime.now(tz=get_current_timezone()).isoformat()
dataset_id = renderer_context.get("dataset_id", "dataset")
table_id = renderer_context.get("table_id", "table")
return {
Expand Down Expand Up @@ -338,11 +339,8 @@ def _render_geojson(self, data, request=None):
yield self._render_geojson_detail(data, request=request)
return

if "_embed" in data:
# Must be a listing, not a detail view which may also have _embed.
collections = data["_embed"]
else:
collections = data
# If it's a listing, remove the _embed wrapper.
collections = data.get("_embed", data)
elif isinstance(data, (list, ReturnGenerator, GeneratorType)):
collections = {"gen": data}
else:
Expand Down
18 changes: 12 additions & 6 deletions src/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from django.core.handlers.wsgi import WSGIRequest
from django.db import connection
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import make_aware
from django.utils.timezone import get_current_timezone
from jwcrypto.jwt import JWT
from psycopg2.sql import SQL, Identifier
from rest_framework.request import Request
Expand All @@ -26,8 +26,8 @@
from tests.utils import api_request_with_scopes, to_drf_request

HERE = Path(__file__).parent
DATE_2021_FEB = make_aware(datetime(2021, 2, 28, 10, 0))
DATE_2021_JUNE = make_aware(datetime(2021, 6, 11, 10, 0))
DATE_2021_FEB = datetime(2021, 2, 28, 10, 0, tzinfo=get_current_timezone())
DATE_2021_JUNE = datetime(2021, 6, 11, 10, 0, tzinfo=get_current_timezone())


@pytest.fixture()
Expand Down Expand Up @@ -367,7 +367,7 @@ def afval_container(afval_container_model, afval_cluster):
eigenaar_naam="Dataservices",
# set to fixed dates to the CSV export can also check for desired formatting
datum_creatie=date(2021, 1, 3),
datum_leegmaken=make_aware(datetime(2021, 1, 3, 12, 13, 14)),
datum_leegmaken=datetime(2021, 1, 3, 12, 13, 14, tzinfo=get_current_timezone()),
cluster=afval_cluster,
geometry=Point(10, 10), # no SRID on purpose, should use django model field.
)
Expand Down Expand Up @@ -571,10 +571,16 @@ def movies_model(movies_dataset, dynamic_models):
def movies_data(movies_model, movies_category):
return [
movies_model.objects.create(
id=3, name="foo123", category=movies_category, date_added=datetime(2020, 1, 1, 0, 45)
id=3,
name="foo123",
category=movies_category,
date_added=datetime(2020, 1, 1, 0, 45, tzinfo=get_current_timezone()),
),
movies_model.objects.create(
id=4, name="test", category=movies_category, date_added=datetime(2020, 2, 2, 13, 15)
id=4,
name="test",
category=movies_category,
date_added=datetime(2020, 2, 2, 13, 15, tzinfo=get_current_timezone()),
),
]

Expand Down
4 changes: 1 addition & 3 deletions src/tests/settings.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from pathlib import Path

from dso_api.settings import *
from dso_api.settings import * # noqa: F403, F405

# The reason the settings are defined here, is to make them independent
# of the regular project sources. Otherwise, the project needs to have
Expand Down
4 changes: 2 additions & 2 deletions src/tests/test_dynamic_api/remote/test_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,8 +630,8 @@ def respond(request):
HCBRK_FILES = Path(__file__).parent.parent.parent / "files" / "haalcentraalbrk"
HCBRK_NATUURLIJKPERSOON = (HCBRK_FILES / "hcbrk_natuurlijkpersoon.json").read_text()
HCBRK_ONROERENDE_ZAAK = (HCBRK_FILES / "hcbrk_onroerendezaak.json").read_text()
DSO_NATUURLIJKPERSOON = json.load(open(HCBRK_FILES / "dso_natuurlijkpersoon.json"))
DSO_ONROERENDE_ZAAK = json.load(open(HCBRK_FILES / "dso_onroerendezaak.json"))
DSO_NATUURLIJKPERSOON = json.loads((HCBRK_FILES / "dso_natuurlijkpersoon.json").read_text())
DSO_ONROERENDE_ZAAK = json.loads((HCBRK_FILES / "dso_onroerendezaak.json").read_text())


@pytest.mark.django_db
Expand Down
8 changes: 4 additions & 4 deletions src/tests/test_dynamic_api/test_serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,10 +392,10 @@ def test_backwards_relation(drf_request, gebieden_models):
"volgnummer": 1,
"identificatie": "0363",
},
"schema": "https://schemas.data.amsterdam.nl/datasets/gebieden/dataset#stadsdelen", # NoQA
"schema": "https://schemas.data.amsterdam.nl/datasets/gebieden/dataset#stadsdelen",
"wijken": [
{
"href": "http://testserver/v1/gebieden/wijken/03630000000001/?volgnummer=1", # NoQA
"href": "http://testserver/v1/gebieden/wijken/03630000000001/?volgnummer=1",
"title": "03630000000001.1",
"volgnummer": 1,
"identificatie": "03630000000001",
Expand Down Expand Up @@ -1155,7 +1155,7 @@ def test_request_protected_fields_without_scopes_should_not_be_allowed(
# Trying to fetch the data (Serializer tries to render the data)
# should not be allowed.
with pytest.raises(PermissionDenied):
monumenten_serializer.data
monumenten_serializer.data # noqa: B018

@staticmethod
def test_request_expand_scope_for_protected_expand_should_not_be_allowed(
Expand Down Expand Up @@ -1196,4 +1196,4 @@ def test_request_expand_scope_for_protected_expand_should_not_be_allowed(
# Trying to fetch the data (Serializer tries to render the data)
# should not be allowed.
with pytest.raises(PermissionDenied):
monumenten_serializer.data
monumenten_serializer.data # noqa: B018
8 changes: 4 additions & 4 deletions src/tests/test_dynamic_api/test_temporal_actions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import date, datetime
from datetime import date, datetime, timezone
from urllib.parse import parse_qs, urlparse

import pytest
Expand All @@ -18,7 +18,7 @@ def stadsdelen(gebieden_models):
id="03630000000016.1",
identificatie="03630000000016",
volgnummer=1,
registratiedatum=datetime(2006, 6, 12, 5, 40, 12),
registratiedatum=datetime(2006, 6, 12, 5, 40, 12, tzinfo=timezone.utc),
begin_geldigheid=date(2006, 6, 1),
eind_geldigheid=date(2015, 1, 1),
naam="Zuidoost",
Expand All @@ -28,7 +28,7 @@ def stadsdelen(gebieden_models):
id="03630000000016.2",
identificatie="03630000000016",
volgnummer=2,
registratiedatum=datetime(2015, 1, 1, 5, 40, 12),
registratiedatum=datetime(2015, 1, 1, 5, 40, 12, tzinfo=timezone.utc),
begin_geldigheid=date(2015, 1, 1),
eind_geldigheid=None,
naam="Zuidoost",
Expand All @@ -46,7 +46,7 @@ def gebied(gebieden_models, stadsdelen, buurt):
id="03630950000019.1",
identificatie="03630950000019",
volgnummer=1,
registratiedatum=datetime(2015, 1, 1, 5, 40, 12),
registratiedatum=datetime(2015, 1, 1, 5, 40, 12, tzinfo=timezone.utc),
begin_geldigheid=date(2014, 2, 20),
naam="Bijlmer-Centrum",
)
Expand Down
Loading

0 comments on commit 03c6119

Please sign in to comment.