Skip to content

Commit

Permalink
fix: correct a rebase merge
Browse files Browse the repository at this point in the history
  • Loading branch information
olloz26 committed Nov 28, 2024
1 parent 5dfba17 commit e9b12e2
Show file tree
Hide file tree
Showing 8 changed files with 159 additions and 43 deletions.
2 changes: 1 addition & 1 deletion DEVELOPING.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ function if you prefer to keep your favorite shell.
You can run style checks using `make style_checks`.
To run the test suite, use `make tests` (you likely need to run in the devcontainer for this to work, as it needs some
surrounding services to run).
* Run a specific test e.g.: `poetry run pytest test/bases/renku_data_services/data_api/test_storage_v2.py::test_storage_v2_create_openbis_secret`
* Run a specific test e.g.: `poetry run pytest test/bases/renku_data_services/data_api/test_data_connectors.py::test_create_openbis_data_connector`
* Also run tests marked with `@pytest.mark.myskip`: `PYTEST_FORCE_RUN_MYSKIPS=1 make tests`

## Migrations
Expand Down
51 changes: 49 additions & 2 deletions components/renku_data_services/data_connectors/blueprints.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
"""Data connectors blueprint."""

from dataclasses import dataclass
from datetime import datetime
from typing import Any

from sanic import Request
from sanic.response import HTTPResponse, JSONResponse
from sanic_ext import validate
from ulid import ULID

from renku_data_services import base_models
from renku_data_services import base_models, errors
from renku_data_services.base_api.auth import (
authenticate,
only_authenticated,
Expand All @@ -31,6 +32,7 @@
DataConnectorSecretRepository,
)
from renku_data_services.storage.rclone import RCloneValidator
from renku_data_services.utils.core import get_openbis_pat


@dataclass(kw_only=True)
Expand Down Expand Up @@ -310,10 +312,55 @@ async def _patch_secrets(
user: base_models.APIUser,
data_connector_id: ULID,
body: apispec.DataConnectorSecretPatchList,
validator: RCloneValidator,
) -> JSONResponse:
unsaved_secrets = validate_data_connector_secrets_patch(put=body)
data_connector = await self.data_connector_repo.get_data_connector(
user=user, data_connector_id=data_connector_id
)
storage = data_connector.storage
provider = validator.providers[storage.storage_type]
sensitive_lookup = [o.name for o in provider.options if o.sensitive]
for secret in unsaved_secrets:
if secret.name in sensitive_lookup:
continue
raise errors.ValidationError(
message=f"The '{secret.name}' property is not marked sensitive and can not be saved in the secret "
f"storage."
)
expiration_timestamp = None

if storage.storage_type == "openbis":

async def openbis_transform_session_token_to_pat() -> (
tuple[list[models.DataConnectorSecretUpdate], datetime]
):
if len(unsaved_secrets) == 1 and unsaved_secrets[0].name == "session_token":
if unsaved_secrets[0].value is not None:
try:
openbis_pat = await get_openbis_pat(
storage.configuration["host"], unsaved_secrets[0].value
)
return (
[models.DataConnectorSecretUpdate(name="session_token", value=openbis_pat[0])],
openbis_pat[1],
)
except Exception as e:
raise errors.ProgrammingError(message=str(e))
raise errors.ValidationError(message="The openBIS session token must be a string value.")

raise errors.ValidationError(message="The openBIS storage has only one secret: session_token")

(
unsaved_secrets,
expiration_timestamp,
) = await openbis_transform_session_token_to_pat()

secrets = await self.data_connector_secret_repo.patch_data_connector_secrets(
user=user, data_connector_id=data_connector_id, secrets=unsaved_secrets
user=user,
data_connector_id=data_connector_id,
secrets=unsaved_secrets,
expiration_timestamp=expiration_timestamp,
)
return validated_json(
apispec.DataConnectorSecretsList, [self._dump_data_connector_secret(secret) for secret in secrets]
Expand Down
12 changes: 10 additions & 2 deletions components/renku_data_services/data_connectors/db.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Adapters for data connectors database classes."""

from collections.abc import AsyncIterator, Callable
from datetime import datetime
from typing import TypeVar

from cryptography.hazmat.primitives.asymmetric import rsa
Expand Down Expand Up @@ -539,7 +540,11 @@ async def get_data_connector_secrets(
return [secret.dump() for secret in secrets]

async def patch_data_connector_secrets(
self, user: base_models.APIUser, data_connector_id: ULID, secrets: list[models.DataConnectorSecretUpdate]
self,
user: base_models.APIUser,
data_connector_id: ULID,
secrets: list[models.DataConnectorSecretUpdate],
expiration_timestamp: datetime | None,
) -> list[models.DataConnectorSecret]:
"""Create, update or remove data connector secrets."""
if user.id is None:
Expand Down Expand Up @@ -583,7 +588,9 @@ async def patch_data_connector_secrets(

if data_connector_secret_orm := existing_secrets_as_dict.get(name):
data_connector_secret_orm.secret.update(
encrypted_value=encrypted_value, encrypted_key=encrypted_key
encrypted_value=encrypted_value,
encrypted_key=encrypted_key,
expiration_timestamp=expiration_timestamp,
)
else:
secret_orm = secrets_schemas.SecretORM(
Expand All @@ -592,6 +599,7 @@ async def patch_data_connector_secrets(
encrypted_value=encrypted_value,
encrypted_key=encrypted_key,
kind=SecretKind.storage,
expiration_timestamp=expiration_timestamp,
)
data_connector_secret_orm = schemas.DataConnectorSecretORM(
name=name,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
"""add_secret_expiration_timestamp
"""add secret expiration timestamp
Revision ID: 7bc32829ed2f
Revises: 9058bf0a1a12
Create Date: 2024-08-21 12:38:30.932694
Revision ID: 4d2a21441beb
Revises: 1ef98b967767
Create Date: 2024-11-21 17:01:56.468831
"""

import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "7bc32829ed2f"
down_revision = "9058bf0a1a12"
revision = "4d2a21441beb"
down_revision = "1ef98b967767"
branch_labels = None
depends_on = None

Expand Down
25 changes: 0 additions & 25 deletions components/renku_data_services/storage/core.py

This file was deleted.

4 changes: 2 additions & 2 deletions components/renku_data_services/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ async def get_openbis_session_token(
) -> str:
"""Requests an openBIS session token with the user's login credentials."""
login = {"method": "login", "params": [username, password], "id": "2", "jsonrpc": "2.0"}
async with httpx.AsyncClient(verify=get_ssl_context()) as client:
async with httpx.AsyncClient(verify=get_ssl_context(), timeout=5) as client:
response = await client.post(_get_url(host), json=login, timeout=timeout)
if response.status_code == 200:
json: dict[str, str] = response.json()
Expand All @@ -126,7 +126,7 @@ async def get_openbis_pat(
"""Requests an openBIS PAT with an openBIS session ID."""
url = _get_url(host)

async with httpx.AsyncClient(verify=get_ssl_context()) as client:
async with httpx.AsyncClient(verify=get_ssl_context(), timeout=5) as client:
get_server_information = {"method": "getServerInformation", "params": [session_id], "id": "2", "jsonrpc": "2.0"}
response = await client.post(url, json=get_server_information, timeout=timeout)
if response.status_code == 200:
Expand Down
96 changes: 91 additions & 5 deletions test/bases/renku_data_services/data_api/test_data_connectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from sanic_testing.testing import SanicASGITestClient

from renku_data_services.users.models import UserInfo
from renku_data_services.utils.core import get_openbis_session_token
from test.bases.renku_data_services.data_api.utils import merge_headers


Expand Down Expand Up @@ -40,6 +41,39 @@ async def create_data_connector_helper(
return create_data_connector_helper


@pytest.fixture
def create_openbis_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers):
async def create_openbis_data_connector_helper(
name: str, session_token: str, user: UserInfo | None = None, headers: dict[str, str] | None = None, **payload
) -> Any:
user = user or regular_user
headers = headers or user_headers
dc_payload = {
"name": name,
"description": "An openBIS data connector",
"visibility": "private",
"namespace": user.namespace.slug,
"storage": {
"configuration": {
"type": "openbis",
"host": "openbis-eln-lims.ethz.ch", # Public openBIS demo instance.
"session_token": session_token,
},
"source_path": "/",
"target_path": "my/target",
},
"keywords": ["keyword 1", "keyword.2", "keyword-3", "KEYWORD_4"],
}
dc_payload.update(payload)

_, response = await sanic_client.post("/api/data/data_connectors", headers=headers, json=dc_payload)

assert response.status_code == 201, response.text
return response.json

return create_openbis_data_connector_helper


@pytest.mark.asyncio
async def test_post_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers) -> None:
payload = {
Expand Down Expand Up @@ -1092,6 +1126,14 @@ async def test_patch_data_connector_secrets(
assert len(secrets) == 2
assert {s["name"] for s in secrets} == {"access_key_id", "secret_access_key"}

payload = [
{"name": "not_sensitive", "value": "not_sensitive_value"},
]
_, response = await sanic_client.patch(
f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload
)
assert response.status_code == 422, response.json


@pytest.mark.asyncio
async def test_patch_data_connector_secrets_update_secrets(
Expand Down Expand Up @@ -1161,7 +1203,7 @@ async def test_patch_data_connector_secrets_add_and_remove_secrets(
payload = [
{"name": "access_key_id", "value": "new access key id value"},
{"name": "secret_access_key", "value": None},
{"name": "password", "value": "password"},
{"name": "sse_kms_key_id", "value": "password"},
]
_, response = await sanic_client.patch(
f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload
Expand All @@ -1171,7 +1213,7 @@ async def test_patch_data_connector_secrets_add_and_remove_secrets(
assert response.json is not None
secrets = response.json
assert len(secrets) == 2
assert {s["name"] for s in secrets} == {"access_key_id", "password"}
assert {s["name"] for s in secrets} == {"access_key_id", "sse_kms_key_id"}
new_access_key_id_secret_id = next(filter(lambda s: s["name"] == "access_key_id", secrets), None)
assert new_access_key_id_secret_id == access_key_id_secret_id

Expand All @@ -1181,15 +1223,14 @@ async def test_patch_data_connector_secrets_add_and_remove_secrets(
assert response.json is not None
secrets = response.json
assert len(secrets) == 2
assert {s["name"] for s in secrets} == {"access_key_id", "password"}
assert {s["name"] for s in secrets} == {"access_key_id", "sse_kms_key_id"}

# Check the associated secrets
_, response = await sanic_client.get("/api/data/user/secrets", params={"kind": "storage"}, headers=user_headers)

assert response.status_code == 200
assert response.json is not None
assert len(response.json) == 2
assert {s["name"] for s in secrets} == {"access_key_id", "password"}
assert {s["name"] for s in secrets} == {"access_key_id", "sse_kms_key_id"}


@pytest.mark.asyncio
Expand Down Expand Up @@ -1229,6 +1270,51 @@ async def test_delete_data_connector_secrets(
assert response.json == [], response.json


@pytest.mark.myskip(1 == 1, reason="Depends on a remote openBIS host which may not always be available.")
@pytest.mark.asyncio
async def test_create_openbis_data_connector(sanic_client, create_openbis_data_connector, user_headers) -> None:
openbis_session_token = await get_openbis_session_token(
host="openbis-eln-lims.ethz.ch", # Public openBIS demo instance.
username="observer",
password="1234",
)
data_connector = await create_openbis_data_connector(
"openBIS data connector 1", session_token=openbis_session_token
)
data_connector_id = data_connector["id"]

payload = [
{"name": "session_token", "value": openbis_session_token},
]
_, response = await sanic_client.patch(
f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload
)
assert response.status_code == 200, response.json
assert {s["name"] for s in response.json} == {"session_token"}
created_secret_ids = {s["secret_id"] for s in response.json}
assert len(created_secret_ids) == 1
assert response.json[0].keys() == {"secret_id", "name"}


@pytest.mark.myskip(1 == 1, reason="Depends on a remote openBIS host which may not always be available.")
@pytest.mark.asyncio
async def test_create_openbis_data_connector_with_invalid_session_token(
sanic_client, create_openbis_data_connector, user_headers
) -> None:
invalid_openbis_session_token = "1234"
data_connector = await create_openbis_data_connector("openBIS data connector 1", invalid_openbis_session_token)
data_connector_id = data_connector["id"]

payload = [
{"name": "session_token", "value": invalid_openbis_session_token},
]
_, response = await sanic_client.patch(
f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload
)
assert response.status_code == 500, response.json
assert response.json["error"]["message"] == "An openBIS personal access token related request failed."


@pytest.mark.asyncio
async def test_get_project_permissions_unauthorized(
sanic_client, create_data_connector, admin_headers, admin_user, user_headers
Expand Down
Empty file.

0 comments on commit e9b12e2

Please sign in to comment.