diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py index ea278173db3..5249acdc5d6 100755 --- a/.ci/scripts/calculate_jobs.py +++ b/.ci/scripts/calculate_jobs.py @@ -60,7 +60,7 @@ def set_output(key: str, value: str): { "python-version": "3.9", "database": "postgres", - "postgres-version": "11", + "postgres-version": "13", "extras": "all", } ] diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index ebf866e3d5f..82cacdfeb3c 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -14,7 +14,7 @@ permissions: id-token: write # needed for signing the images with GitHub OIDC Token jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Set up QEMU id: qemu diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index 6d184a21e03..3962f750559 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 + uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 42a374fa190..10583bc0600 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -213,7 +213,7 @@ jobs: tar -cvJf debs.tar.xz debs - name: Attach to release # Pinned to work around https://github.com/softprops/action-gh-release/issues/445 - uses: softprops/action-gh-release@v2.0.5 + uses: softprops/action-gh-release@v0.1.15 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -221,3 +221,7 @@ jobs: Sdist/* Wheel*/* debs.tar.xz + # if it's not already published, keep the release as a draft. + draft: true + # mark it as a prerelease if the tag contains 'rc'. + prerelease: ${{ contains(github.ref, 'rc') }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d91f9c29187..084b08b2492 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -581,7 +581,7 @@ jobs: matrix: include: - python-version: "3.9" - postgres-version: "11" + postgres-version: "13" - python-version: "3.13" postgres-version: "17" diff --git a/CHANGES.md b/CHANGES.md index 0caac3f89e3..3ddc96a96a8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,173 @@ +# Synapse 1.122.0rc1 (2025-01-07) + +Please note that this version of Synapse drops support for PostgresQL 11 and 12. The minimum version of PostgreSQL supported is now version 13. + +### Deprecations and Removals + +- Remove support for PostgreSQL 11 and 12. Contributed by @clokep. ([\#18034](https://github.com/element-hq/synapse/issues/18034)) + +### Features + +- Added the `email.tlsname` config option. This allows specifying the domain name used to validate the SMTP server's TLS certificate separately from the `email.smtp_host` to connect to. ([\#17849](https://github.com/element-hq/synapse/issues/17849)) +- Module developers will have access to the user ID of the requester when adding `check_username_for_spam` callbacks to `spam_checker_module_callbacks`. Contributed by Wilson@Pangea.chat. ([\#17916](https://github.com/element-hq/synapse/issues/17916)) +- Add endpoints to the Admin API to fetch the number of invites the provided user has sent after a given timestamp, + fetch the number of rooms the provided user has joined after a given timestamp, and get report IDs of event + reports against a provided user (i.e. where the user was the sender of the reported event). ([\#17948](https://github.com/element-hq/synapse/issues/17948)) +- Support stable account suspension from [MSC3823](https://github.com/matrix-org/matrix-spec-proposals/pull/3823). ([\#17964](https://github.com/element-hq/synapse/issues/17964)) +- Add `macaroon_secret_key_path` config option. ([\#17983](https://github.com/element-hq/synapse/issues/17983)) + +### Bugfixes + +- Fix bug when rejecting withdrew invite with a `third_party_rules` module, where the invite would be stuck for the client. ([\#17930](https://github.com/element-hq/synapse/issues/17930)) +- Properly purge state groups tables when purging a room with the Admin API. ([\#18024](https://github.com/element-hq/synapse/issues/18024)) +- Fix a bug preventing the admin redaction endpoint from working on messages from remote users. ([\#18029](https://github.com/element-hq/synapse/issues/18029), [\#18043](https://github.com/element-hq/synapse/issues/18043)) + +### Improved Documentation + +- Update `synapse.app.generic_worker` documentation to only recommend `GET` requests for stream writer routes by default, unless the worker is also configured as a stream writer. Contributed by @evoL. ([\#17954](https://github.com/element-hq/synapse/issues/17954)) +- Add documentation for the previously-undocumented `last_seen_ts` query parameter to the query user Admin API. ([\#17976](https://github.com/element-hq/synapse/issues/17976)) +- Improve documentation for the `TaskScheduler` class. ([\#17992](https://github.com/element-hq/synapse/issues/17992)) +- Fix example in reverse proxy docs to include server port. ([\#17994](https://github.com/element-hq/synapse/issues/17994)) +- Update Alpine Linux Synapse Package Maintainer within the installation instructions. ([\#17846](https://github.com/element-hq/synapse/issues/17846)) + +### Internal Changes + +- Add `RoomID` & `EventID` rust types. ([\#17996](https://github.com/element-hq/synapse/issues/17996)) +- Fix various type errors across the codebase. ([\#17998](https://github.com/element-hq/synapse/issues/17998)) +- Disable DB statement timeout when doing a room purge since it can be quite long. ([\#18017](https://github.com/element-hq/synapse/issues/18017)) +- Remove some remaining uses of `twisted.internet.defer.returnValue`. Contributed by Colin Watson. ([\#18020](https://github.com/element-hq/synapse/issues/18020)) +- Refactor `get_profile` to no longer include fields with a value of `None`. ([\#18063](https://github.com/element-hq/synapse/issues/18063)) + +### Updates to locked dependencies + +* Bump anyhow from 1.0.93 to 1.0.95. ([\#18012](https://github.com/element-hq/synapse/issues/18012), [\#18045](https://github.com/element-hq/synapse/issues/18045)) +* Bump authlib from 1.3.2 to 1.4.0. ([\#18048](https://github.com/element-hq/synapse/issues/18048)) +* Bump dawidd6/action-download-artifact from 6 to 7. ([\#17981](https://github.com/element-hq/synapse/issues/17981)) +* Bump http from 1.1.0 to 1.2.0. ([\#18013](https://github.com/element-hq/synapse/issues/18013)) +- Bump mypy from 1.11.2 to 1.12.1. ([\#17999](https://github.com/element-hq/synapse/issues/17999)) +* Bump mypy-zope from 1.0.8 to 1.0.9. ([\#18047](https://github.com/element-hq/synapse/issues/18047)) +* Bump pillow from 10.4.0 to 11.0.0. ([\#18015](https://github.com/element-hq/synapse/issues/18015)) +* Bump pydantic from 2.9.2 to 2.10.3. ([\#18014](https://github.com/element-hq/synapse/issues/18014)) +* Bump pyicu from 2.13.1 to 2.14. ([\#18060](https://github.com/element-hq/synapse/issues/18060)) +* Bump pyo3 from 0.23.2 to 0.23.3. ([\#18001](https://github.com/element-hq/synapse/issues/18001)) +* Bump python-multipart from 0.0.16 to 0.0.18. ([\#17985](https://github.com/element-hq/synapse/issues/17985)) +* Bump sentry-sdk from 2.17.0 to 2.19.2. ([\#18061](https://github.com/element-hq/synapse/issues/18061)) +* Bump serde from 1.0.215 to 1.0.217. ([\#18031](https://github.com/element-hq/synapse/issues/18031), [\#18059](https://github.com/element-hq/synapse/issues/18059)) +* Bump serde_json from 1.0.133 to 1.0.134. ([\#18044](https://github.com/element-hq/synapse/issues/18044)) +* Bump twine from 5.1.1 to 6.0.1. ([\#18049](https://github.com/element-hq/synapse/issues/18049)) + +# Synapse 1.121.1 (2024-12-11) + +This release contains a fix for our docker build CI. It is functionally identical to 1.121.0, whose changelog is below. + +### Internal Changes + +- Downgrade the Ubuntu GHA runner when building docker images. ([\#18026](https://github.com/element-hq/synapse/issues/18026)) + + + + +# Synapse 1.121.0 (2024-12-11) + +### Internal Changes + +- Fix release process to not create duplicate releases. ([\#18025](https://github.com/element-hq/synapse/issues/18025)) + + + +# Synapse 1.121.0rc1 (2024-12-04) + +### Features + +- Support for [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190): device management for Application Services. ([\#17705](https://github.com/element-hq/synapse/issues/17705)) +- Update [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) Sliding Sync to include invite, ban, kick, targets when `$LAZY`-loading room members. ([\#17947](https://github.com/element-hq/synapse/issues/17947)) +- Use stable `M_USER_LOCKED` error code for locked accounts, as per [Matrix 1.12](https://spec.matrix.org/v1.12/client-server-api/#account-locking). ([\#17965](https://github.com/element-hq/synapse/issues/17965)) +- [MSC4076](https://github.com/matrix-org/matrix-spec-proposals/pull/4076): Add `disable_badge_count` to pusher configuration. ([\#17975](https://github.com/element-hq/synapse/issues/17975)) + +### Bugfixes + +- Fix long-standing bug where read receipts could get overly delayed being sent over federation. ([\#17933](https://github.com/element-hq/synapse/issues/17933)) + +### Improved Documentation + +- Add OIDC example configuration for Forgejo (fork of Gitea). ([\#17872](https://github.com/element-hq/synapse/issues/17872)) +- Link to element-docker-demo from contrib/docker*. ([\#17953](https://github.com/element-hq/synapse/issues/17953)) + +### Internal Changes + +- [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/pull/4108): Add a `Content-Type` header on the `PUT` response to work around a faulty behavior in some caching reverse proxies. ([\#17253](https://github.com/element-hq/synapse/issues/17253)) +- Fix incorrect comment in new schema delta. ([\#17936](https://github.com/element-hq/synapse/issues/17936)) +- Raise setuptools_rust version cap to 1.10.2. ([\#17944](https://github.com/element-hq/synapse/issues/17944)) +- Enable encrypted appservice related experimental features in the complement docker image. ([\#17945](https://github.com/element-hq/synapse/issues/17945)) +- Return whether the user is suspended when querying the user account in the Admin API. ([\#17952](https://github.com/element-hq/synapse/issues/17952)) +- Fix new scheduled tasks jumping the queue. ([\#17962](https://github.com/element-hq/synapse/issues/17962)) +- Bump pyo3 and dependencies to v0.23.2. ([\#17966](https://github.com/element-hq/synapse/issues/17966)) +- Update setuptools-rust and fix building abi3 wheels in latest version. ([\#17969](https://github.com/element-hq/synapse/issues/17969)) +- Consolidate SSO redirects through `/_matrix/client/v3/login/sso/redirect(/{idpId})`. ([\#17972](https://github.com/element-hq/synapse/issues/17972)) +- Fix Docker and Complement config to be able to use `public_baseurl`. ([\#17986](https://github.com/element-hq/synapse/issues/17986)) +- Fix building wheels for MacOS which was temporarily disabled in Synapse 1.120.2. ([\#17993](https://github.com/element-hq/synapse/issues/17993)) +- Fix release process to not create duplicate releases. ([\#17970](https://github.com/element-hq/synapse/issues/17970), [\#17995](https://github.com/element-hq/synapse/issues/17995)) + + +### Updates to locked dependencies + +* Bump bytes from 1.8.0 to 1.9.0. ([\#17982](https://github.com/element-hq/synapse/issues/17982)) +* Bump pysaml2 from 7.3.1 to 7.5.0. ([\#17978](https://github.com/element-hq/synapse/issues/17978)) +* Bump serde_json from 1.0.132 to 1.0.133. ([\#17939](https://github.com/element-hq/synapse/issues/17939)) +* Bump tomli from 2.0.2 to 2.1.0. ([\#17959](https://github.com/element-hq/synapse/issues/17959)) +* Bump tomli from 2.1.0 to 2.2.1. ([\#17979](https://github.com/element-hq/synapse/issues/17979)) +* Bump tornado from 6.4.1 to 6.4.2. ([\#17955](https://github.com/element-hq/synapse/issues/17955)) + +# Synapse 1.120.2 (2024-12-03) + +This version has building of wheels for macOS disabled. +It is functionally identical to 1.120.1, which contains multiple security fixes. +If you are already using 1.120.1, there is no need to upgrade to this version. + + + +# Synapse 1.120.1 (2024-12-03) + +This patch release fixes multiple security vulnerabilities, some affecting all prior versions of Synapse. Server administrators are encouraged to update Synapse as soon as possible. We are not aware of these vulnerabilities being exploited in the wild. + +Administrators who are unable to update Synapse may use the workarounds described in the linked GitHub Security Advisory below. + +### Security advisory + +The following issues are fixed in 1.120.1. + +- [GHSA-rfq8-j7rh-8hf2](https://github.com/element-hq/synapse/security/advisories/GHSA-rfq8-j7rh-8hf2) / [CVE-2024-52805](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-52805): **Unsupported content types can lead to memory exhaustion** + + Synapse instances which have a high `max_upload_size` and which don't have a reverse proxy in front of them that would otherwise limit upload size are affected. + + Fixed by [4b7154c58501b4bf5e1c2d6c11ebef96529f2fdf](https://github.com/element-hq/synapse/commit/4b7154c58501b4bf5e1c2d6c11ebef96529f2fdf). + +- [GHSA-f3r3-h2mq-hx2h](https://github.com/element-hq/synapse/security/advisories/GHSA-f3r3-h2mq-hx2h) / [CVE-2024-52815](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-52815): **Malicious invites via federation can break a user's sync** + + Fixed by [d82e1ed357b7ee21dff83d06cba7a67840cfd464](https://github.com/element-hq/synapse/commit/d82e1ed357b7ee21dff83d06cba7a67840cfd464). + +- [GHSA-vp6v-whfm-rv3g](https://github.com/element-hq/synapse/security/advisories/GHSA-vp6v-whfm-rv3g) / [CVE-2024-53863](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-53863): **Synapse can be forced to thumbnail unexpected file formats, invoking potentially untrustworthy decoders** + + Synapse instances can disable dynamic thumbnailing by setting `dynamic_thumbnails` to `false` in the configuration file. + + Fixed by [b64a4e5fbbbf119b6c65aedf0d999b4237d55503](https://github.com/element-hq/synapse/commit/b64a4e5fbbbf119b6c65aedf0d999b4237d55503). + +- [GHSA-56w4-5538-8v8h](https://github.com/element-hq/synapse/security/advisories/GHSA-56w4-5538-8v8h) / [CVE-2024-53867](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-53867): **The Sliding Sync feature on Synapse versions between 1.113.0rc1 and 1.120.0 can leak partial room state changes to users no longer in a room** + + Non-state events, like messages, are unaffected. + + Synapse instances can disable the Sliding Sync feature by setting `experimental_features.msc3575_enabled` to `false` in the configuration file. + + Fixed by [4daa533e82f345ce87b9495d31781af570ba3ead](https://github.com/element-hq/synapse/commit/4daa533e82f345ce87b9495d31781af570ba3ead). + +See the advisories for more details. If you have any questions, email [security at element.io](mailto:security@element.io). + +### Bugfixes + +- Fix release process to not create duplicate releases. ([\#17970](https://github.com/element-hq/synapse/issues/17970)) + + + # Synapse 1.120.0 (2024-11-26) ### Bugfixes diff --git a/Cargo.lock b/Cargo.lock index b7084165ee2..859dd2086d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "arc-swap" @@ -61,9 +61,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "cfg-if" @@ -168,9 +168,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -272,9 +272,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.23.2" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f54b3d09cbdd1f8c20650b28e7b09e338881482f4aa908a5f61a00c98fba2690" +checksum = "e484fd2c8b4cb67ab05a318f1fd6fa8f199fcc30819f08f07d200809dba26c15" dependencies = [ "anyhow", "cfg-if", @@ -291,9 +291,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.23.2" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3015cf985888fe66cfb63ce0e321c603706cd541b7aec7ddd35c281390af45d8" +checksum = "dc0e0469a84f208e20044b98965e1561028180219e35352a2afaf2b942beff3b" dependencies = [ "once_cell", "target-lexicon", @@ -301,9 +301,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.23.2" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fca7cd8fd809b5ac4eefb89c1f98f7a7651d3739dfb341ca6980090f554c270" +checksum = "eb1547a7f9966f6f1a0f0227564a9945fe36b90da5a93b3933fc3dc03fae372d" dependencies = [ "libc", "pyo3-build-config", @@ -322,9 +322,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.23.2" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34e657fa5379a79151b6ff5328d9216a84f55dc93b17b08e7c3609a969b73aa0" +checksum = "fdb6da8ec6fa5cedd1626c886fc8749bdcbb09424a86461eb8cdf096b7c33257" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -334,9 +334,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.23.2" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "295548d5ffd95fd1981d2d3cf4458831b21d60af046b729b6fd143b0ba7aee2f" +checksum = "38a385202ff5a92791168b1136afae5059d3ac118457bb7bc304c197c2d33e7d" dependencies = [ "heck", "proc-macro2", @@ -431,18 +431,18 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "serde" -version = "1.0.215" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.215" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", @@ -451,9 +451,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" dependencies = [ "itoa", "memchr", diff --git a/changelog.d/17253.misc b/changelog.d/17253.misc deleted file mode 100644 index 868691624d0..00000000000 --- a/changelog.d/17253.misc +++ /dev/null @@ -1 +0,0 @@ -[MSC4108](https://github.com/matrix-org/matrix-spec-proposals/pull/4108): Add a `Content-Type` header on the `PUT` response to work around a faulty behavior in some caching reverse proxies. diff --git a/changelog.d/17732.bugfix b/changelog.d/17732.bugfix new file mode 100644 index 00000000000..572c13fc573 --- /dev/null +++ b/changelog.d/17732.bugfix @@ -0,0 +1 @@ +Fix membership caches not updating in state reset scenarios. diff --git a/changelog.d/17872.doc b/changelog.d/17872.doc deleted file mode 100644 index 7f8b2d34953..00000000000 --- a/changelog.d/17872.doc +++ /dev/null @@ -1 +0,0 @@ -Add OIDC example configuration for Forgejo (fork of Gitea). diff --git a/changelog.d/17933.bugfix b/changelog.d/17933.bugfix deleted file mode 100644 index 8d30ac587eb..00000000000 --- a/changelog.d/17933.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix long-standing bug where read receipts could get overly delayed being sent over federation. diff --git a/changelog.d/17936.misc b/changelog.d/17936.misc deleted file mode 100644 index 91d976fbd9c..00000000000 --- a/changelog.d/17936.misc +++ /dev/null @@ -1 +0,0 @@ -Fix incorrect comment in new schema delta. diff --git a/changelog.d/17944.misc b/changelog.d/17944.misc deleted file mode 100644 index a8a645103f4..00000000000 --- a/changelog.d/17944.misc +++ /dev/null @@ -1 +0,0 @@ -Raise setuptools_rust version cap to 1.10.2. \ No newline at end of file diff --git a/changelog.d/17945.misc b/changelog.d/17945.misc deleted file mode 100644 index eeebb921699..00000000000 --- a/changelog.d/17945.misc +++ /dev/null @@ -1 +0,0 @@ -Enable encrypted appservice related experimental features in the complement docker image. diff --git a/changelog.d/17952.misc b/changelog.d/17952.misc deleted file mode 100644 index 84fc8bfc290..00000000000 --- a/changelog.d/17952.misc +++ /dev/null @@ -1 +0,0 @@ -Return whether the user is suspended when querying the user account in the Admin API. \ No newline at end of file diff --git a/changelog.d/17953.doc b/changelog.d/17953.doc deleted file mode 100644 index 10f5a27ba9f..00000000000 --- a/changelog.d/17953.doc +++ /dev/null @@ -1 +0,0 @@ -Link to element-docker-demo from contrib/docker*. diff --git a/changelog.d/17962.misc b/changelog.d/17962.misc deleted file mode 100644 index adf63487079..00000000000 --- a/changelog.d/17962.misc +++ /dev/null @@ -1 +0,0 @@ -Fix new scheduled tasks jumping the queue. diff --git a/changelog.d/17966.misc b/changelog.d/17966.misc deleted file mode 100644 index c6d6e55fbf9..00000000000 --- a/changelog.d/17966.misc +++ /dev/null @@ -1 +0,0 @@ -Bump pyo3 and dependencies to v0.23.2. \ No newline at end of file diff --git a/changelog.d/17969.misc b/changelog.d/17969.misc deleted file mode 100644 index 05506daaa06..00000000000 --- a/changelog.d/17969.misc +++ /dev/null @@ -1 +0,0 @@ -Update setuptools-rust and fix building abi3 wheels in latest version. diff --git a/changelog.d/17970.bugfix b/changelog.d/17970.bugfix deleted file mode 100644 index 835079de3f3..00000000000 --- a/changelog.d/17970.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix release process to not create duplicate releases. diff --git a/changelog.d/17972.misc b/changelog.d/17972.misc deleted file mode 100644 index e7f009d20d4..00000000000 --- a/changelog.d/17972.misc +++ /dev/null @@ -1 +0,0 @@ -Consolidate SSO redirects through `/_matrix/client/v3/login/sso/redirect(/{idpId})`. diff --git a/changelog.d/18035.feature b/changelog.d/18035.feature new file mode 100644 index 00000000000..99b68a9e45d --- /dev/null +++ b/changelog.d/18035.feature @@ -0,0 +1 @@ +Add a unit test for the `type` parameter of the [Room State Admin API](https://element-hq.github.io/synapse/develop/admin_api/rooms.html#room-state-api). \ No newline at end of file diff --git a/changelog.d/18052.removal b/changelog.d/18052.removal new file mode 100644 index 00000000000..f173b9ba4d7 --- /dev/null +++ b/changelog.d/18052.removal @@ -0,0 +1 @@ +Remove the unstable [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151) implementation. The stable support remains, per [Matrix 1.13](https://spec.matrix.org/v1.13/client-server-api/#post_matrixclientv3roomsroomidreport). \ No newline at end of file diff --git a/contrib/cmdclient/console.py b/contrib/cmdclient/console.py index ca2e72b5e8e..9b5d33d2b1f 100755 --- a/contrib/cmdclient/console.py +++ b/contrib/cmdclient/console.py @@ -245,7 +245,7 @@ def _check_can_login(self): if "flows" not in json_res: print("Failed to find any login flows.") - defer.returnValue(False) + return False flow = json_res["flows"][0] # assume first is the one we want. if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow: @@ -254,8 +254,8 @@ def _check_can_login(self): "Unable to login via the command line client. Please visit " "%s to login." % fallback_url ) - defer.returnValue(False) - defer.returnValue(True) + return False + return True def do_emailrequest(self, line): """Requests the association of a third party identifier diff --git a/contrib/cmdclient/http.py b/contrib/cmdclient/http.py index e6a10b5f329..54363e42592 100644 --- a/contrib/cmdclient/http.py +++ b/contrib/cmdclient/http.py @@ -78,7 +78,7 @@ def put_json(self, url, data): url, data, headers_dict={"Content-Type": ["application/json"]} ) body = yield readBody(response) - defer.returnValue((response.code, body)) + return response.code, body @defer.inlineCallbacks def get_json(self, url, args=None): @@ -88,7 +88,7 @@ def get_json(self, url, args=None): url = "%s?%s" % (url, qs) response = yield self._create_get_request(url) body = yield readBody(response) - defer.returnValue(json.loads(body)) + return json.loads(body) def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None): """Wrapper of _create_request to issue a PUT request""" @@ -134,7 +134,7 @@ def do_request( response = yield self._create_request(method, url) body = yield readBody(response) - defer.returnValue(json.loads(body)) + return json.loads(body) @defer.inlineCallbacks def _create_request( @@ -173,7 +173,7 @@ def _create_request( if self.verbose: print("Status %s %s" % (response.code, response.phrase)) print(pformat(list(response.headers.getAllRawHeaders()))) - defer.returnValue(response) + return response def sleep(self, seconds): d = defer.Deferred() diff --git a/debian/changelog b/debian/changelog index bd4466d7aaa..e08a212309a 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,39 @@ +matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium + + * New Synapse release 1.122.0rc1. + + -- Synapse Packaging team Tue, 07 Jan 2025 14:06:19 +0000 + +matrix-synapse-py3 (1.121.1) stable; urgency=medium + + * New Synapse release 1.121.1. + + -- Synapse Packaging team Wed, 11 Dec 2024 18:24:48 +0000 + +matrix-synapse-py3 (1.121.0) stable; urgency=medium + + * New Synapse release 1.121.0. + + -- Synapse Packaging team Wed, 11 Dec 2024 13:12:30 +0100 + +matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium + + * New Synapse release 1.121.0rc1. + + -- Synapse Packaging team Wed, 04 Dec 2024 14:47:23 +0000 + +matrix-synapse-py3 (1.120.2) stable; urgency=medium + + * New synapse release 1.120.2. + + -- Synapse Packaging team Tue, 03 Dec 2024 15:43:37 +0000 + +matrix-synapse-py3 (1.120.1) stable; urgency=medium + + * New synapse release 1.120.1. + + -- Synapse Packaging team Tue, 03 Dec 2024 09:07:57 +0000 + matrix-synapse-py3 (1.120.0) stable; urgency=medium * New synapse release 1.120.0. diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2 index 9a74c617bc4..c5228af72d0 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2 @@ -7,6 +7,7 @@ #} ## Server ## +public_baseurl: http://127.0.0.1:8008/ report_stats: False trusted_key_servers: [] enable_registration: true diff --git a/docker/conf-workers/nginx.conf.j2 b/docker/conf-workers/nginx.conf.j2 index d1e02af7232..95d2f760d2f 100644 --- a/docker/conf-workers/nginx.conf.j2 +++ b/docker/conf-workers/nginx.conf.j2 @@ -38,10 +38,13 @@ server { {% if using_unix_sockets %} proxy_pass http://unix:/run/main_public.sock; {% else %} + # note: do not add a path (even a single /) after the port in `proxy_pass`, + # otherwise nginx will canonicalise the URI and cause signature verification + # errors. proxy_pass http://localhost:8080; {% endif %} proxy_set_header X-Forwarded-For $remote_addr; proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $host; + proxy_set_header Host $host:$server_port; } } diff --git a/docs/admin_api/event_reports.md b/docs/admin_api/event_reports.md index 83f7dc37f41..9075e928822 100644 --- a/docs/admin_api/event_reports.md +++ b/docs/admin_api/event_reports.md @@ -60,10 +60,11 @@ paginate through. anything other than the return value of `next_token` from a previous call. Defaults to `0`. * `dir`: string - Direction of event report order. Whether to fetch the most recent first (`b`) or the oldest first (`f`). Defaults to `b`. -* `user_id`: string - Is optional and filters to only return users with user IDs that - contain this value. This is the user who reported the event and wrote the reason. -* `room_id`: string - Is optional and filters to only return rooms with room IDs that - contain this value. +* `user_id`: optional string - Filter by the user ID of the reporter. This is the user who reported the event + and wrote the reason. +* `room_id`: optional string - Filter by room id. +* `event_sender_user_id`: optional string - Filter by the sender of the reported event. This is the user who + the report was made against. **Response** diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md index 8e3a367e90a..bfc2cd4376b 100644 --- a/docs/admin_api/rooms.md +++ b/docs/admin_api/rooms.md @@ -385,6 +385,13 @@ The API is: GET /_synapse/admin/v1/rooms//state ``` +**Parameters** + +The following query parameter is available: + +* `type` - The type of room state event to filter by, eg "m.room.create". If provided, only state events + of this type will be returned (regardless of their `state_key` value). + A response body like the following is returned: ```json diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index a6e2e0a1537..2742d2d2cd3 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -40,6 +40,7 @@ It returns a JSON body like the following: "erased": false, "shadow_banned": 0, "creation_ts": 1560432506, + "last_seen_ts": 1732919539393, "appservice_id": null, "consent_server_notice_sent": null, "consent_version": null, @@ -477,9 +478,9 @@ with a body of: } ``` -## List room memberships of a user +## List joined rooms of a user -Gets a list of all `room_id` that a specific `user_id` is member. +Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in). The API is: @@ -516,6 +517,73 @@ The following fields are returned in the JSON response body: - `joined_rooms` - An array of `room_id`. - `total` - Number of rooms. +## Get the number of invites sent by the user + +Fetches the number of invites sent by the provided user ID across all rooms +after the given timestamp. + +``` +GET /_synapse/admin/v1/users/$user_id/sent_invite_count +``` + +**Parameters** + +The following parameters should be set in the URL: + +* `user_id`: fully qualified: for example, `@user:server.com` + +The following should be set as query parameters in the URL: + +* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only + invites sent at or after the provided timestamp will be returned. + This works by comparing the provided timestamp to the `received_ts` + column in the `events` table. + Note: https://currentmillis.com/ is a useful tool for converting dates + into timestamps and vice versa. + +A response body like the following is returned: + +```json +{ + "invite_count": 30 +} +``` + +_Added in Synapse 1.122.0_ + +## Get the cumulative number of rooms a user has joined after a given timestamp + +Fetches the number of rooms that the user joined after the given timestamp, even +if they have subsequently left/been banned from those rooms. + +``` +GET /_synapse/admin/v1/users/$ bool +async def check_username_for_spam(user_profile: synapse.module_api.UserProfile, requester_id: str) -> bool ``` Called when computing search results in the user directory. The module must return a @@ -264,6 +264,8 @@ The profile is represented as a dictionary with the following keys: The module is given a copy of the original dictionary, so modifying it from within the module cannot modify a user's profile when included in user directory search results. +The requester_id parameter is the ID of the user that called the user directory API. + If multiple modules implement this callback, they will be considered in order. If a callback returns `False`, Synapse falls through to the next one. The value of the first callback that does not return `False` will be used. If this happens, Synapse will not call diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md index 7128af114e9..45de2b1f65b 100644 --- a/docs/reverse_proxy.md +++ b/docs/reverse_proxy.md @@ -74,7 +74,7 @@ server { proxy_pass http://localhost:8008; proxy_set_header X-Forwarded-For $remote_addr; proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $host; + proxy_set_header Host $host:$server_port; # Nginx by default only allows file uploads up to 1M in size # Increase client_max_body_size to match max_upload_size defined in homeserver.yaml diff --git a/docs/setup/installation.md b/docs/setup/installation.md index bfeacab3755..d0081a88d9b 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -157,7 +157,7 @@ sudo pip install py-bcrypt #### Alpine Linux -6543 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with: +Jahway603 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with: ```sh sudo apk add synapse diff --git a/docs/spam_checker.md b/docs/spam_checker.md index 1b6d814937c..4ace3512b33 100644 --- a/docs/spam_checker.md +++ b/docs/spam_checker.md @@ -72,8 +72,8 @@ class ExampleSpamChecker: async def user_may_publish_room(self, userid, room_id): return True # allow publishing of all rooms - async def check_username_for_spam(self, user_profile): - return False # allow all usernames + async def check_username_for_spam(self, user_profile, requester_id): + return False # allow all usernames regardless of requester async def check_registration_for_spam( self, diff --git a/docs/upgrade.md b/docs/upgrade.md index 45e63b0c5de..6c96cb91a31 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -117,6 +117,14 @@ each upgrade are complete before moving on to the next upgrade, to avoid stacking them up. You can monitor the currently running background updates with [the Admin API](usage/administration/admin_api/background_updates.html#status). +# Upgrading to v1.122.0 + +## Dropping support for PostgreSQL 11 and 12 + +In line with our [deprecation policy](deprecation_policy.md), we've dropped +support for PostgreSQL 11 and 12, as they are no longer supported upstream. +This release of Synapse requires PostgreSQL 13+. + # Upgrading to v1.120.0 ## Removal of experimental MSC3886 feature diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index c88f0682366..714d6bd7c5b 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -673,8 +673,9 @@ This setting has the following sub-options: TLS via STARTTLS *if the SMTP server supports it*. If this option is set, Synapse will refuse to connect unless the server supports STARTTLS. * `enable_tls`: By default, if the server supports TLS, it will be used, and the server - must present a certificate that is valid for 'smtp_host'. If this option + must present a certificate that is valid for `tlsname`. If this option is set to false, TLS will not be used. +* `tlsname`: The domain name the SMTP server's TLS certificate must be valid for, defaulting to `smtp_host`. * `notif_from`: defines the "From" address to use when sending emails. It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name, which is normally set in `app_name`, but may be overridden by the @@ -741,6 +742,7 @@ email: force_tls: true require_transport_security: true enable_tls: false + tlsname: mail.server.example.com notif_from: "Your Friendly %(app)s homeserver " app_name: my_branded_matrix_server enable_notifs: true @@ -3123,6 +3125,22 @@ Example configuration: ```yaml macaroon_secret_key: ``` +--- +### `macaroon_secret_key_path` + +An alternative to [`macaroon_secret_key`](#macaroon_secret_key): +allows the secret key to be specified in an external file. + +The file should be a plain text file, containing only the secret key. +Synapse reads the secret key from the given file once at startup. + +Example configuration: +```yaml +macaroon_secret_key_path: /path/to/secrets/file +``` + +_Added in Synapse 1.121.0._ + --- ### `form_secret` diff --git a/docs/workers.md b/docs/workers.md index 0116c455bc7..6c614bd6d0b 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -273,17 +273,6 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable)/knock/ ^/_matrix/client/(api/v1|r0|v3|unstable)/profile/ - # Account data requests - ^/_matrix/client/(r0|v3|unstable)/.*/tags - ^/_matrix/client/(r0|v3|unstable)/.*/account_data - - # Receipts requests - ^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt - ^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers - - # Presence requests - ^/_matrix/client/(api/v1|r0|v3|unstable)/presence/ - # User directory search requests ^/_matrix/client/(r0|v3|unstable)/user_directory/search$ @@ -292,6 +281,13 @@ Additionally, the following REST endpoints can be handled for GET requests: ^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/ ^/_matrix/client/unstable/org.matrix.msc4140/delayed_events + # Account data requests + ^/_matrix/client/(r0|v3|unstable)/.*/tags + ^/_matrix/client/(r0|v3|unstable)/.*/account_data + + # Presence requests + ^/_matrix/client/(api/v1|r0|v3|unstable)/presence/ + Pagination requests can also be handled, but all requests for a given room must be routed to the same instance. Additionally, care must be taken to ensure that the purge history admin API is not used while pagination requests diff --git a/poetry.lock b/poetry.lock index f43fe2489ae..4655d295c47 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "annotated-types" @@ -32,13 +32,13 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "authlib" -version = "1.3.2" +version = "1.4.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, - {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, + {file = "Authlib-1.4.0-py2.py3-none-any.whl", hash = "sha256:4bb20b978c8b636222b549317c1815e1fe62234fc1c5efe8855d84aebf3a74e3"}, + {file = "authlib-1.4.0.tar.gz", hash = "sha256:1c1e6608b5ed3624aeeee136ca7f8c120d6f51f731aa152b153d54741840e1f2"}, ] [package.dependencies] @@ -842,13 +842,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -1314,38 +1314,43 @@ files = [ [[package]] name = "mypy" -version = "1.11.2" +version = "1.12.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, - {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, - {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, - {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, - {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, - {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, - {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, - {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, - {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, - {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, - {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, - {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, - {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, - {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, - {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, - {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, - {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, - {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, - {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, + {file = "mypy-1.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d7d4371829184e22fda4015278fbfdef0327a4b955a483012bd2d423a788801"}, + {file = "mypy-1.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f59f1dfbf497d473201356966e353ef09d4daec48caeacc0254db8ef633a28a5"}, + {file = "mypy-1.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b947097fae68004b8328c55161ac9db7d3566abfef72d9d41b47a021c2fba6b1"}, + {file = "mypy-1.12.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96af62050971c5241afb4701c15189ea9507db89ad07794a4ee7b4e092dc0627"}, + {file = "mypy-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:d90da248f4c2dba6c44ddcfea94bb361e491962f05f41990ff24dbd09969ce20"}, + {file = "mypy-1.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1230048fec1380faf240be6385e709c8570604d2d27ec6ca7e573e3bc09c3735"}, + {file = "mypy-1.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:02dcfe270c6ea13338210908f8cadc8d31af0f04cee8ca996438fe6a97b4ec66"}, + {file = "mypy-1.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5a437c9102a6a252d9e3a63edc191a3aed5f2fcb786d614722ee3f4472e33f6"}, + {file = "mypy-1.12.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:186e0c8346efc027ee1f9acf5ca734425fc4f7dc2b60144f0fbe27cc19dc7931"}, + {file = "mypy-1.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:673ba1140a478b50e6d265c03391702fa11a5c5aff3f54d69a62a48da32cb811"}, + {file = "mypy-1.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9fb83a7be97c498176fb7486cafbb81decccaef1ac339d837c377b0ce3743a7f"}, + {file = "mypy-1.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:389e307e333879c571029d5b93932cf838b811d3f5395ed1ad05086b52148fb0"}, + {file = "mypy-1.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94b2048a95a21f7a9ebc9fbd075a4fcd310410d078aa0228dbbad7f71335e042"}, + {file = "mypy-1.12.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5932370ccf7ebf83f79d1c157a5929d7ea36313027b0d70a488493dc1b179"}, + {file = "mypy-1.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:19bf51f87a295e7ab2894f1d8167622b063492d754e69c3c2fed6563268cb42a"}, + {file = "mypy-1.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d34167d43613ffb1d6c6cdc0cc043bb106cac0aa5d6a4171f77ab92a3c758bcc"}, + {file = "mypy-1.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:427878aa54f2e2c5d8db31fa9010c599ed9f994b3b49e64ae9cd9990c40bd635"}, + {file = "mypy-1.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fcde63ea2c9f69d6be859a1e6dd35955e87fa81de95bc240143cf00de1f7f81"}, + {file = "mypy-1.12.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d54d840f6c052929f4a3d2aab2066af0f45a020b085fe0e40d4583db52aab4e4"}, + {file = "mypy-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:20db6eb1ca3d1de8ece00033b12f793f1ea9da767334b7e8c626a4872090cf02"}, + {file = "mypy-1.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b16fe09f9c741d85a2e3b14a5257a27a4f4886c171d562bc5a5e90d8591906b8"}, + {file = "mypy-1.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0dcc1e843d58f444fce19da4cce5bd35c282d4bde232acdeca8279523087088a"}, + {file = "mypy-1.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e10ba7de5c616e44ad21005fa13450cd0de7caaa303a626147d45307492e4f2d"}, + {file = "mypy-1.12.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e6fe449223fa59fbee351db32283838a8fee8059e0028e9e6494a03802b4004"}, + {file = "mypy-1.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:dc6e2a2195a290a7fd5bac3e60b586d77fc88e986eba7feced8b778c373f9afe"}, + {file = "mypy-1.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:de5b2a8988b4e1269a98beaf0e7cc71b510d050dce80c343b53b4955fff45f19"}, + {file = "mypy-1.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843826966f1d65925e8b50d2b483065c51fc16dc5d72647e0236aae51dc8d77e"}, + {file = "mypy-1.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9fe20f89da41a95e14c34b1ddb09c80262edcc295ad891f22cc4b60013e8f78d"}, + {file = "mypy-1.12.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8135ffec02121a75f75dc97c81af7c14aa4ae0dda277132cfcd6abcd21551bfd"}, + {file = "mypy-1.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:a7b76fa83260824300cc4834a3ab93180db19876bce59af921467fd03e692810"}, + {file = "mypy-1.12.1-py3-none-any.whl", hash = "sha256:ce561a09e3bb9863ab77edf29ae3a50e65685ad74bba1431278185b7e5d5486e"}, + {file = "mypy-1.12.1.tar.gz", hash = "sha256:f5b3936f7a6d0e8280c9bdef94c7ce4847f5cdfc258fbb2c29a8c1711e8bb96d"}, ] [package.dependencies] @@ -1372,17 +1377,17 @@ files = [ [[package]] name = "mypy-zope" -version = "1.0.8" +version = "1.0.9" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" files = [ - {file = "mypy_zope-1.0.8-py3-none-any.whl", hash = "sha256:8794a77dae0c7e2f28b8ac48569091310b3ee45bb9d6cd4797dcb837c40f9976"}, - {file = "mypy_zope-1.0.8.tar.gz", hash = "sha256:854303a95aefc4289e8a0796808e002c2c7ecde0a10a8f7b8f48092f94ef9b9f"}, + {file = "mypy_zope-1.0.9-py3-none-any.whl", hash = "sha256:6666c1556891a3cb186137519dbd7a58cb30fb72b2504798cad47b35391921ba"}, + {file = "mypy_zope-1.0.9.tar.gz", hash = "sha256:37d6985dfb05a4c27b35cff47577fd5bad878db4893ddedf54d165f7389a1cdb"}, ] [package.dependencies] -mypy = ">=1.0.0,<1.13.0" +mypy = ">=1.0.0,<1.14.0" "zope.interface" = "*" "zope.schema" = "*" @@ -1454,95 +1459,90 @@ files = [ [[package]] name = "pillow" -version = "10.4.0" +version = "11.0.0" description = "Python Imaging Library (Fork)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, + {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, + {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, + {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, + {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, + {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, + {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, + {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, + {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, + {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, + {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -1590,6 +1590,7 @@ files = [ {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, @@ -1660,22 +1661,19 @@ files = [ [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, -] +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1683,100 +1681,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -1817,12 +1826,12 @@ plugins = ["importlib-metadata"] [[package]] name = "pyicu" -version = "2.13.1" +version = "2.14" description = "Python extension wrapping the ICU C++ API" optional = true python-versions = "*" files = [ - {file = "PyICU-2.13.1.tar.gz", hash = "sha256:d4919085eaa07da12bade8ee721e7bbf7ade0151ca0f82946a26c8f4b98cdceb"}, + {file = "PyICU-2.14.tar.gz", hash = "sha256:acc7eb92bd5c554ed577249c6978450a4feda0aa6f01470152b3a7b382a02132"}, ] [[package]] @@ -1899,31 +1908,31 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "24.2.1" +version = "24.3.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, - {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, + {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, + {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, ] [package.dependencies] -cryptography = ">=41.0.5,<44" +cryptography = ">=41.0.5,<45" [package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pysaml2" -version = "7.3.1" +version = "7.5.0" description = "Python implementation of SAML Version 2 Standard" optional = true -python-versions = ">=3.6.2,<4.0.0" +python-versions = ">=3.9,<4.0" files = [ - {file = "pysaml2-7.3.1-py3-none-any.whl", hash = "sha256:2cc66e7a371d3f5ff9601f0ed93b5276cca816fce82bb38447d5a0651f2f5193"}, - {file = "pysaml2-7.3.1.tar.gz", hash = "sha256:eab22d187c6dd7707c58b5bb1688f9b8e816427667fc99d77f54399e15cd0a0a"}, + {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"}, + {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"}, ] [package.dependencies] @@ -1933,7 +1942,7 @@ pyopenssl = "*" python-dateutil = "*" pytz = "*" requests = ">=2,<3" -xmlschema = ">=1.2.1" +xmlschema = ">=2,<3" [package.extras] s2repoze = ["paste", "repoze.who", "zope.interface"] @@ -1954,13 +1963,13 @@ six = ">=1.5" [[package]] name = "python-multipart" -version = "0.0.16" +version = "0.0.18" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" files = [ - {file = "python_multipart-0.0.16-py3-none-any.whl", hash = "sha256:c2759b7b976ef3937214dfb592446b59dfaa5f04682a076f78b117c94776d87a"}, - {file = "python_multipart-0.0.16.tar.gz", hash = "sha256:8dee37b88dab9b59922ca173c35acb627cc12ec74019f5cd4578369c6df36554"}, + {file = "python_multipart-0.0.18-py3-none-any.whl", hash = "sha256:efe91480f485f6a361427a541db4796f9e1591afc0fb8e7a4ba06bfbc6708996"}, + {file = "python_multipart-0.0.18.tar.gz", hash = "sha256:7a68db60c8bfb82e460637fa4750727b45af1d5e2ed215593f917f64694d34fe"}, ] [[package]] @@ -2313,13 +2322,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "2.17.0" +version = "2.19.2" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = ">=3.6" files = [ - {file = "sentry_sdk-2.17.0-py2.py3-none-any.whl", hash = "sha256:625955884b862cc58748920f9e21efdfb8e0d4f98cca4ab0d3918576d5b606ad"}, - {file = "sentry_sdk-2.17.0.tar.gz", hash = "sha256:dd0a05352b78ffeacced73a94e86f38b32e2eae15fff5f30ca5abb568a72eacf"}, + {file = "sentry_sdk-2.19.2-py2.py3-none-any.whl", hash = "sha256:ebdc08228b4d131128e568d696c210d846e5b9d70aa0327dec6b1272d9d40b84"}, + {file = "sentry_sdk-2.19.2.tar.gz", hash = "sha256:467df6e126ba242d39952375dd816fbee0f217d119bf454a8ce74cf1e7909e8d"}, ] [package.dependencies] @@ -2345,14 +2354,16 @@ grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] http2 = ["httpcore[http2] (==1.*)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] langchain = ["langchain (>=0.0.210)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] litestar = ["litestar (>=2.0.0)"] loguru = ["loguru (>=0.5)"] openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure-eval"] +pure-eval = ["asttokens", "executing", "pure_eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] @@ -2514,13 +2525,43 @@ twisted = ["twisted"] [[package]] name = "tomli" -version = "2.1.0" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, - {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -2589,19 +2630,20 @@ docs = ["sphinx (<7.0.0)"] [[package]] name = "twine" -version = "5.1.1" +version = "6.0.1" description = "Collection of utilities for publishing packages on PyPI" optional = false python-versions = ">=3.8" files = [ - {file = "twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997"}, - {file = "twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db"}, + {file = "twine-6.0.1-py3-none-any.whl", hash = "sha256:9c6025b203b51521d53e200f4a08b116dee7500a38591668c6a6033117bdc218"}, + {file = "twine-6.0.1.tar.gz", hash = "sha256:36158b09df5406e1c9c1fb8edb24fc2be387709443e7376689b938531582ee27"}, ] [package.dependencies] -importlib-metadata = ">=3.6" -keyring = ">=15.1" -pkginfo = ">=1.8.1,<1.11" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +keyring = {version = ">=15.1", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""} +packaging = "*" +pkginfo = ">=1.8.1" readme-renderer = ">=35.0" requests = ">=2.20" requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" @@ -2609,6 +2651,9 @@ rfc3986 = ">=1.4.0" rich = ">=12.0.0" urllib3 = ">=1.26.0" +[package.extras] +keyring = ["keyring (>=15.1)"] + [[package]] name = "twisted" version = "24.7.0" diff --git a/pyproject.toml b/pyproject.toml index eccce104556..af6f82bcf93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.120.0" +version = "1.122.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" @@ -386,8 +386,11 @@ build-backend = "poetry.core.masonry.api" # c.f. https://github.com/matrix-org/synapse/pull/14259 skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64" -# We need a rust compiler -before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal" +# We need a rust compiler. +# +# We temporarily pin Rust to 1.82.0 to work around +# https://github.com/element-hq/synapse/issues/17988 +before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal" environment= { PATH = "$PATH:$HOME/.cargo/bin" } # For some reason if we don't manually clean the build directory we diff --git a/rust/src/identifier.rs b/rust/src/identifier.rs index b199c5838eb..b70f6a30c73 100644 --- a/rust/src/identifier.rs +++ b/rust/src/identifier.rs @@ -71,6 +71,34 @@ impl TryFrom<&str> for UserID { } } +impl TryFrom for UserID { + type Error = IdentifierError; + + /// Will try creating a `UserID` from the provided `&str`. + /// Can fail if the user_id is incorrectly formatted. + fn try_from(s: String) -> Result { + if !s.starts_with('@') { + return Err(IdentifierError::IncorrectSigil); + } + + if s.find(':').is_none() { + return Err(IdentifierError::MissingColon); + } + + Ok(UserID(s)) + } +} + +impl<'de> serde::Deserialize<'de> for UserID { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s: String = serde::Deserialize::deserialize(deserializer)?; + UserID::try_from(s).map_err(serde::de::Error::custom) + } +} + impl Deref for UserID { type Target = str; @@ -84,3 +112,141 @@ impl fmt::Display for UserID { write!(f, "{}", self.0) } } + +/// A Matrix room_id. +#[derive(Clone, Debug, PartialEq)] +pub struct RoomID(String); + +impl RoomID { + /// Returns the `localpart` of the room_id. + pub fn localpart(&self) -> &str { + &self[1..self.colon_pos()] + } + + /// Returns the `server_name` / `domain` of the room_id. + pub fn server_name(&self) -> &str { + &self[self.colon_pos() + 1..] + } + + /// Returns the position of the ':' inside of the room_id. + /// Used when splitting the room_id into it's respective parts. + fn colon_pos(&self) -> usize { + self.find(':').unwrap() + } +} + +impl TryFrom<&str> for RoomID { + type Error = IdentifierError; + + /// Will try creating a `RoomID` from the provided `&str`. + /// Can fail if the room_id is incorrectly formatted. + fn try_from(s: &str) -> Result { + if !s.starts_with('!') { + return Err(IdentifierError::IncorrectSigil); + } + + if s.find(':').is_none() { + return Err(IdentifierError::MissingColon); + } + + Ok(RoomID(s.to_string())) + } +} + +impl TryFrom for RoomID { + type Error = IdentifierError; + + /// Will try creating a `RoomID` from the provided `String`. + /// Can fail if the room_id is incorrectly formatted. + fn try_from(s: String) -> Result { + if !s.starts_with('!') { + return Err(IdentifierError::IncorrectSigil); + } + + if s.find(':').is_none() { + return Err(IdentifierError::MissingColon); + } + + Ok(RoomID(s)) + } +} + +impl<'de> serde::Deserialize<'de> for RoomID { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s: String = serde::Deserialize::deserialize(deserializer)?; + RoomID::try_from(s).map_err(serde::de::Error::custom) + } +} + +impl Deref for RoomID { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl fmt::Display for RoomID { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +/// A Matrix event_id. +#[derive(Clone, Debug, PartialEq)] +pub struct EventID(String); + +impl TryFrom<&str> for EventID { + type Error = IdentifierError; + + /// Will try creating a `EventID` from the provided `&str`. + /// Can fail if the event_id is incorrectly formatted. + fn try_from(s: &str) -> Result { + if !s.starts_with('$') { + return Err(IdentifierError::IncorrectSigil); + } + + Ok(EventID(s.to_string())) + } +} + +impl TryFrom for EventID { + type Error = IdentifierError; + + /// Will try creating a `EventID` from the provided `String`. + /// Can fail if the event_id is incorrectly formatted. + fn try_from(s: String) -> Result { + if !s.starts_with('$') { + return Err(IdentifierError::IncorrectSigil); + } + + Ok(EventID(s)) + } +} + +impl<'de> serde::Deserialize<'de> for EventID { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s: String = serde::Deserialize::deserialize(deserializer)?; + EventID::try_from(s).map_err(serde::de::Error::custom) + } +} + +impl Deref for EventID { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl fmt::Display for EventID { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index b6dcb96e2c9..6be9177f110 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -195,6 +195,10 @@ if [ -z "$skip_docker_build" ]; then # Build the unified Complement image (from the worker Synapse image we just built). echo_if_github "::group::Build Docker image: complement/Dockerfile" $CONTAINER_RUNTIME build -t complement-synapse \ + `# This is the tag we end up pushing to the registry (see` \ + `# .github/workflows/push_complement_image.yml) so let's just label it now` \ + `# so people can reference it by the same name locally.` \ + -t ghcr.io/element-hq/synapse/complement-synapse \ -f "docker/complement/Dockerfile" "docker/complement" echo_if_github "::endgroup::" diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 8db302b3d8b..9806e2b0fe3 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -231,6 +231,8 @@ class EventContentFields: ROOM_NAME: Final = "name" MEMBERSHIP: Final = "membership" + MEMBERSHIP_DISPLAYNAME: Final = "displayname" + MEMBERSHIP_AVATAR_URL: Final = "avatar_url" # Used in m.room.guest_access events. GUEST_ACCESS: Final = "guest_access" @@ -318,3 +320,8 @@ class ApprovalNoticeMedium: class Direction(enum.Enum): BACKWARDS = "b" FORWARDS = "f" + + +class ProfileFields: + DISPLAYNAME: Final = "displayname" + AVATAR_URL: Final = "avatar_url" diff --git a/synapse/api/errors.py b/synapse/api/errors.py index e6efa7a4249..21989b6e0e8 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -87,8 +87,7 @@ class Codes(str, Enum): WEAK_PASSWORD = "M_WEAK_PASSWORD" INVALID_SIGNATURE = "M_INVALID_SIGNATURE" USER_DEACTIVATED = "M_USER_DEACTIVATED" - # USER_LOCKED = "M_USER_LOCKED" - USER_LOCKED = "ORG_MATRIX_MSC3939_USER_LOCKED" + USER_LOCKED = "M_USER_LOCKED" NOT_YET_UPLOADED = "M_NOT_YET_UPLOADED" CANNOT_OVERWRITE_MEDIA = "M_CANNOT_OVERWRITE_MEDIA" @@ -101,8 +100,9 @@ class Codes(str, Enum): # The account has been suspended on the server. # By opposition to `USER_DEACTIVATED`, this is a reversible measure # that can possibly be appealed and reverted. - # Part of MSC3823. - USER_ACCOUNT_SUSPENDED = "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" + # Introduced by MSC3823 + # https://github.com/matrix-org/matrix-spec-proposals/pull/3823 + USER_ACCOUNT_SUSPENDED = "M_USER_SUSPENDED" BAD_ALIAS = "M_BAD_ALIAS" # For restricted join rules. diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index a96cdbf1e7e..6ee5240c4ee 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -87,6 +87,7 @@ def __init__( ip_range_whitelist: Optional[IPSet] = None, supports_ephemeral: bool = False, msc3202_transaction_extensions: bool = False, + msc4190_device_management: bool = False, ): self.token = token self.url = ( @@ -100,6 +101,7 @@ def __init__( self.ip_range_whitelist = ip_range_whitelist self.supports_ephemeral = supports_ephemeral self.msc3202_transaction_extensions = msc3202_transaction_extensions + self.msc4190_device_management = msc4190_device_management if "|" in self.id: raise Exception("application service ID cannot contain '|' character") diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 6ff00e1ff8b..dda6bcd1b79 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -183,6 +183,18 @@ def _load_appservice( "The `org.matrix.msc3202` option should be true or false if specified." ) + # Opt-in flag for the MSC4190 behaviours. + # When enabled, the following C-S API endpoints change for appservices: + # - POST /register does not return an access token + # - PUT /devices/{device_id} creates a new device if one does not exist + # - DELETE /devices/{device_id} no longer requires UIA + # - POST /delete_devices/{device_id} no longer requires UIA + msc4190_enabled = as_info.get("io.element.msc4190", False) + if not isinstance(msc4190_enabled, bool): + raise ValueError( + "The `io.element.msc4190` option should be true or false if specified." + ) + return ApplicationService( token=as_info["as_token"], url=as_info["url"], @@ -195,4 +207,5 @@ def _load_appservice( ip_range_whitelist=ip_range_whitelist, supports_ephemeral=supports_ephemeral, msc3202_transaction_extensions=msc3202_transaction_extensions, + msc4190_device_management=msc4190_enabled, ) diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index 8033fa2e526..c3a3e05a825 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -110,6 +110,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: raise ConfigError( "email.require_transport_security requires email.enable_tls to be true" ) + self.email_tlsname = email_config.get("tlsname", None) if "app_name" in email_config: self.email_app_name = email_config["app_name"] diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 3411179a2a3..90d19849ffd 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -436,15 +436,11 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ("experimental", "msc4108_delegation_endpoint"), ) - self.msc3823_account_suspension = experimental.get( - "msc3823_account_suspension", False - ) - - # MSC4151: Report room API (Client-Server API) - self.msc4151_enabled: bool = experimental.get("msc4151_enabled", False) - # MSC4210: Remove legacy mentions self.msc4210_enabled: bool = experimental.get("msc4210_enabled", False) # MSC4222: Adding `state_after` to sync v2 self.msc4222_enabled: bool = experimental.get("msc4222_enabled", False) + + # MSC4076: Add `disable_badge_count`` to pusher configuration + self.msc4076_enabled: bool = experimental.get("msc4076_enabled", False) diff --git a/synapse/config/key.py b/synapse/config/key.py index bc968889676..01aae09c135 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -43,7 +43,7 @@ from synapse.types import JsonDict from synapse.util.stringutils import random_string, random_string_with_symbols -from ._base import Config, ConfigError +from ._base import Config, ConfigError, read_file if TYPE_CHECKING: from signedjson.key import VerifyKeyWithExpiry @@ -91,6 +91,11 @@ 'suppress_key_server_warning' to 'true' in homeserver.yaml. --------------------------------------------------------------------------------""" +CONFLICTING_MACAROON_SECRET_KEY_OPTS_ERROR = """\ +Conflicting options 'macaroon_secret_key' and 'macaroon_secret_key_path' are +both defined in config file. +""" + logger = logging.getLogger(__name__) @@ -166,10 +171,16 @@ def read_config( ) ) - macaroon_secret_key: Optional[str] = config.get( - "macaroon_secret_key", self.root.registration.registration_shared_secret - ) - + macaroon_secret_key = config.get("macaroon_secret_key") + macaroon_secret_key_path = config.get("macaroon_secret_key_path") + if macaroon_secret_key_path: + if macaroon_secret_key: + raise ConfigError(CONFLICTING_MACAROON_SECRET_KEY_OPTS_ERROR) + macaroon_secret_key = read_file( + macaroon_secret_key_path, "macaroon_secret_key_path" + ).strip() + if not macaroon_secret_key: + macaroon_secret_key = self.root.registration.registration_shared_secret if not macaroon_secret_key: # Unfortunately, there are people out there that don't have this # set. Lets just be "nice" and derive one from their secret key. diff --git a/synapse/config/logger.py b/synapse/config/logger.py index cfc1a57107f..e5aca36b75e 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -360,5 +360,6 @@ def setup_logging( "Licensed under the AGPL 3.0 license. Website: https://github.com/element-hq/synapse" ) logging.info("Server hostname: %s", config.server.server_name) + logging.info("Public Base URL: %s", config.server.public_baseurl) logging.info("Instance name: %s", hs.get_instance_name()) logging.info("Twisted reactor: %s", type(reactor).__name__) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 27860154e1c..fc5a90c85a6 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -22,7 +22,7 @@ import logging import os from typing import Any, Dict, List, Tuple -from urllib.request import getproxies_environment # type: ignore +from urllib.request import getproxies_environment import attr diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index dd21a6136b1..0bca4c188bf 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -248,7 +248,7 @@ def state_group(self) -> Optional[int]: @tag_args async def get_current_state_ids( self, state_filter: Optional["StateFilter"] = None - ) -> Optional[StateMap[str]]: + ) -> StateMap[str]: """ Gets the room state map, including this event - ie, the state in ``state_group`` @@ -256,13 +256,12 @@ async def get_current_state_ids( not make it into the room state. This method will raise an exception if ``rejected`` is set. + It is also an error to access this for an outlier event. + Arg: state_filter: specifies the type of state event to fetch from DB, example: EventTypes.JoinRules Returns: - Returns None if state_group is None, which happens when the associated - event is an outlier. - Maps a (type, state_key) to the event ID of the state event matching this tuple. """ @@ -300,7 +299,8 @@ async def get_prev_state_ids( this tuple. """ - assert self.state_group_before_event is not None + if self.state_group_before_event is None: + return {} return await self._storage.state.get_state_ids_for_group( self.state_group_before_event, state_filter ) diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index a05e5d5319c..093ba30d315 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -509,6 +509,9 @@ async def on_PUT( event = content["event"] invite_room_state = content.get("invite_room_state", []) + if not isinstance(invite_room_state, list): + invite_room_state = [] + # Synapse expects invite_room_state to be in unsigned, as it is in v1 # API diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index d1194545aeb..f3e7790d435 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -473,7 +473,7 @@ async def _redact_all_events( "type": EventTypes.Redaction, "content": {"reason": reason} if reason else {}, "room_id": room, - "sender": user_id, + "sender": requester.user.to_string(), } if room_version.updated_redaction_rules: event_dict["content"]["redacts"] = event.event_id diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 4b33e1330d0..b7d10333510 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -896,10 +896,10 @@ async def claim_e2e_one_time_keys( results = await make_deferred_yieldable( defer.DeferredList( [ - run_in_background( + run_in_background( # type: ignore[call-overload] self.appservice_api.claim_client_keys, # We know this must be an app service. - self.store.get_app_service_by_id(service_id), # type: ignore[arg-type] + self.store.get_app_service_by_id(service_id), service_query, ) for service_id, service_query in query_by_appservice.items() @@ -952,10 +952,10 @@ async def query_keys( results = await make_deferred_yieldable( defer.DeferredList( [ - run_in_background( + run_in_background( # type: ignore[call-overload] self.appservice_api.query_keys, # We know this must be an app service. - self.store.get_app_service_by_id(service_id), # type: ignore[arg-type] + self.store.get_app_service_by_id(service_id), service_query, ) for service_id, service_query in query_by_appservice.items() diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d88660e273b..d9622080b4f 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -729,6 +729,40 @@ async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: await self.notify_device_update(user_id, device_ids) + async def upsert_device( + self, user_id: str, device_id: str, display_name: Optional[str] = None + ) -> bool: + """Create or update a device + + Args: + user_id: The user to update devices of. + device_id: The device to update. + display_name: The new display name for this device. + + Returns: + True if the device was created, False if it was updated. + + """ + + # Reject a new displayname which is too long. + self._check_device_name_length(display_name) + + created = await self.store.store_device( + user_id, + device_id, + initial_device_display_name=display_name, + ) + + if not created: + await self.store.update_device( + user_id, + device_id, + new_display_name=display_name, + ) + + await self.notify_device_update(user_id, [device_id]) + return created + async def update_device(self, user_id: str, device_id: str, content: dict) -> None: """Update the given device diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2b7aad5b581..17dd4af13ed 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -880,6 +880,9 @@ async def do_knock( if stripped_room_state is None: raise KeyError("Missing 'knock_room_state' field in send_knock response") + if not isinstance(stripped_room_state, list): + raise TypeError("'knock_room_state' has wrong type") + event.unsigned["knock_room_state"] = stripped_room_state context = EventContext.for_outlier(self._storage_controllers) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index ac4544ca4c0..22eedcb54f6 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -22,6 +22,7 @@ import random from typing import TYPE_CHECKING, List, Optional, Union +from synapse.api.constants import ProfileFields from synapse.api.errors import ( AuthError, Codes, @@ -83,7 +84,7 @@ async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDi Returns: A JSON dictionary. For local queries this will include the displayname and avatar_url - fields. For remote queries it may contain arbitrary information. + fields, if set. For remote queries it may contain arbitrary information. """ target_user = UserID.from_string(user_id) @@ -92,10 +93,13 @@ async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDi if profileinfo.display_name is None and profileinfo.avatar_url is None: raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) - return { - "displayname": profileinfo.display_name, - "avatar_url": profileinfo.avatar_url, - } + # Do not include display name or avatar if unset. + ret = {} + if profileinfo.display_name is not None: + ret[ProfileFields.DISPLAYNAME] = profileinfo.display_name + if profileinfo.avatar_url is not None: + ret[ProfileFields.AVATAR_URL] = profileinfo.avatar_url + return ret else: try: result = await self.federation.make_query( diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c200e295695..c49db83ce7a 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -630,7 +630,9 @@ async def post_consent_actions(self, user_id: str) -> None: """ await self._auto_join_rooms(user_id) - async def appservice_register(self, user_localpart: str, as_token: str) -> str: + async def appservice_register( + self, user_localpart: str, as_token: str + ) -> Tuple[str, ApplicationService]: user = UserID(user_localpart, self.hs.hostname) user_id = user.to_string() service = self.store.get_app_service_by_token(as_token) @@ -653,7 +655,7 @@ async def appservice_register(self, user_localpart: str, as_token: str) -> str: appservice_id=service_id, create_profile_with_displayname=user.localpart, ) - return user_id + return (user_id, service) def check_user_id_not_appservice_exclusive( self, user_id: str, allowed_appservice: Optional[ApplicationService] = None diff --git a/synapse/handlers/send_email.py b/synapse/handlers/send_email.py index 70cdb0721c9..8cf8d2badab 100644 --- a/synapse/handlers/send_email.py +++ b/synapse/handlers/send_email.py @@ -47,15 +47,45 @@ _is_old_twisted = parse_version(twisted.__version__) < parse_version("21") -class _NoTLSESMTPSender(ESMTPSender): - """Extend ESMTPSender to disable TLS +class _BackportESMTPSender(ESMTPSender): + """Extend old versions of ESMTPSender to configure TLS. - Unfortunately, before Twisted 21.2, ESMTPSender doesn't give an easy way to disable - TLS, so we override its internal method which it uses to generate a context factory. + Unfortunately, before Twisted 21.2, ESMTPSender doesn't give an easy way to + disable TLS, or to configure the hostname used for TLS certificate validation. + This backports the `hostname` parameter for that functionality. """ + __hostname: Optional[str] + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """""" + self.__hostname = kwargs.pop("hostname", None) + super().__init__(*args, **kwargs) + def _getContextFactory(self) -> Optional[IOpenSSLContextFactory]: - return None + if self.context is not None: + return self.context + elif self.__hostname is None: + return None # disable TLS if hostname is None + return optionsForClientTLS(self.__hostname) + + +class _BackportESMTPSenderFactory(ESMTPSenderFactory): + """An ESMTPSenderFactory for _BackportESMTPSender. + + This backports the `hostname` parameter, to disable or configure TLS. + """ + + __hostname: Optional[str] + + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.__hostname = kwargs.pop("hostname", None) + super().__init__(*args, **kwargs) + + def protocol(self, *args: Any, **kwargs: Any) -> ESMTPSender: # type: ignore + # this overrides ESMTPSenderFactory's `protocol` attribute, with a Callable + # instantiating our _BackportESMTPSender, providing the hostname parameter + return _BackportESMTPSender(*args, **kwargs, hostname=self.__hostname) async def _sendmail( @@ -71,6 +101,7 @@ async def _sendmail( require_tls: bool = False, enable_tls: bool = True, force_tls: bool = False, + tlsname: Optional[str] = None, ) -> None: """A simple wrapper around ESMTPSenderFactory, to allow substitution in tests @@ -88,39 +119,33 @@ async def _sendmail( enable_tls: True to enable STARTTLS. If this is False and require_tls is True, the request will fail. force_tls: True to enable Implicit TLS. + tlsname: the domain name expected as the TLS certificate's commonname, + defaults to smtphost. """ msg = BytesIO(msg_bytes) d: "Deferred[object]" = Deferred() - - def build_sender_factory(**kwargs: Any) -> ESMTPSenderFactory: - return ESMTPSenderFactory( - username, - password, - from_addr, - to_addr, - msg, - d, - heloFallback=True, - requireAuthentication=require_auth, - requireTransportSecurity=require_tls, - **kwargs, - ) - - factory: IProtocolFactory - if _is_old_twisted: - # before twisted 21.2, we have to override the ESMTPSender protocol to disable - # TLS - factory = build_sender_factory() - - if not enable_tls: - factory.protocol = _NoTLSESMTPSender - else: - # for twisted 21.2 and later, there is a 'hostname' parameter which we should - # set to enable TLS. - factory = build_sender_factory(hostname=smtphost if enable_tls else None) + if not enable_tls: + tlsname = None + elif tlsname is None: + tlsname = smtphost + + factory: IProtocolFactory = ( + _BackportESMTPSenderFactory if _is_old_twisted else ESMTPSenderFactory + )( + username, + password, + from_addr, + to_addr, + msg, + d, + heloFallback=True, + requireAuthentication=require_auth, + requireTransportSecurity=require_tls, + hostname=tlsname, + ) if force_tls: - factory = TLSMemoryBIOFactory(optionsForClientTLS(smtphost), True, factory) + factory = TLSMemoryBIOFactory(optionsForClientTLS(tlsname), True, factory) endpoint = HostnameEndpoint( reactor, smtphost, smtpport, timeout=30, bindAddress=None @@ -148,6 +173,7 @@ def __init__(self, hs: "HomeServer"): self._require_transport_security = hs.config.email.require_transport_security self._enable_tls = hs.config.email.enable_smtp_tls self._force_tls = hs.config.email.force_tls + self._tlsname = hs.config.email.email_tlsname self._sendmail = _sendmail @@ -227,4 +253,5 @@ async def send_email( require_tls=self._require_transport_security, enable_tls=self._enable_tls, force_tls=self._force_tls, + tlsname=self._tlsname, ) diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py index 85cfbc6dbf5..459d3c3e248 100644 --- a/synapse/handlers/sliding_sync/__init__.py +++ b/synapse/handlers/sliding_sync/__init__.py @@ -39,6 +39,7 @@ trace, ) from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary +from synapse.storage.databases.main.state_deltas import StateDelta from synapse.storage.databases.main.stream import PaginateFunction from synapse.storage.roommember import ( MemberSummary, @@ -48,6 +49,7 @@ MutableStateMap, PersistedEventPosition, Requester, + RoomStreamToken, SlidingSyncStreamToken, StateMap, StrCollection, @@ -470,6 +472,64 @@ async def get_current_state_at( return state_map + @trace + async def get_current_state_deltas_for_room( + self, + room_id: str, + room_membership_for_user_at_to_token: RoomsForUserType, + from_token: RoomStreamToken, + to_token: RoomStreamToken, + ) -> List[StateDelta]: + """ + Get the state deltas between two tokens taking into account the user's + membership. If the user is LEAVE/BAN, we will only get the state deltas up to + their LEAVE/BAN event (inclusive). + + (> `from_token` and <= `to_token`) + """ + membership = room_membership_for_user_at_to_token.membership + # We don't know how to handle `membership` values other than these. The + # code below would need to be updated. + assert membership in ( + Membership.JOIN, + Membership.INVITE, + Membership.KNOCK, + Membership.LEAVE, + Membership.BAN, + ) + + # People shouldn't see past their leave/ban event + if membership in ( + Membership.LEAVE, + Membership.BAN, + ): + to_bound = ( + room_membership_for_user_at_to_token.event_pos.to_room_stream_token() + ) + # If we are participating in the room, we can get the latest current state in + # the room + elif membership == Membership.JOIN: + to_bound = to_token + # We can only rely on the stripped state included in the invite/knock event + # itself so there will never be any state deltas to send down. + elif membership in (Membership.INVITE, Membership.KNOCK): + return [] + else: + # We don't know how to handle this type of membership yet + # + # FIXME: We should use `assert_never` here but for some reason + # the exhaustive matching doesn't recognize the `Never` here. + # assert_never(membership) + raise AssertionError( + f"Unexpected membership {membership} that we don't know how to handle yet" + ) + + return await self.store.get_current_state_deltas_for_room( + room_id=room_id, + from_token=from_token, + to_token=to_bound, + ) + @trace async def get_room_sync_data( self, @@ -755,13 +815,19 @@ async def get_room_sync_data( stripped_state = [] if invite_or_knock_event.membership == Membership.INVITE: - stripped_state.extend( - invite_or_knock_event.unsigned.get("invite_room_state", []) + invite_state = invite_or_knock_event.unsigned.get( + "invite_room_state", [] ) + if not isinstance(invite_state, list): + invite_state = [] + + stripped_state.extend(invite_state) elif invite_or_knock_event.membership == Membership.KNOCK: - stripped_state.extend( - invite_or_knock_event.unsigned.get("knock_room_state", []) - ) + knock_state = invite_or_knock_event.unsigned.get("knock_room_state", []) + if not isinstance(knock_state, list): + knock_state = [] + + stripped_state.extend(knock_state) stripped_state.append(strip_event(invite_or_knock_event)) @@ -790,8 +856,9 @@ async def get_room_sync_data( # TODO: Limit the number of state events we're about to send down # the room, if its too many we should change this to an # `initial=True`? - deltas = await self.store.get_current_state_deltas_for_room( + deltas = await self.get_current_state_deltas_for_room( room_id=room_id, + room_membership_for_user_at_to_token=room_membership_for_user_at_to_token, from_token=from_bound, to_token=to_token.room_key, ) @@ -955,15 +1022,21 @@ async def get_room_sync_data( and state_key == StateValues.LAZY ): lazy_load_room_members = True + # Everyone in the timeline is relevant - # - # FIXME: We probably also care about invite, ban, kick, targets, etc - # but the spec only mentions "senders". timeline_membership: Set[str] = set() if timeline_events is not None: for timeline_event in timeline_events: + # Anyone who sent a message is relevant timeline_membership.add(timeline_event.sender) + # We also care about invite, ban, kick, targets, + # etc. + if timeline_event.type == EventTypes.Member: + timeline_membership.add( + timeline_event.state_key + ) + # Update the required state filter so we pick up the new # membership for user_id in timeline_membership: diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index ee74289b6c4..cee2eefbb37 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -43,7 +43,7 @@ from twisted.web.iweb import IRequest from twisted.web.server import Request -from synapse.api.constants import LoginType +from synapse.api.constants import LoginType, ProfileFields from synapse.api.errors import Codes, NotFoundError, RedirectException, SynapseError from synapse.config.sso import SsoAttributeRequirement from synapse.handlers.device import DeviceHandler @@ -813,9 +813,10 @@ def is_allowed_mime_type(content_type: str) -> bool: # bail if user already has the same avatar profile = await self._profile_handler.get_profile(user_id) - if profile["avatar_url"] is not None: - server_name = profile["avatar_url"].split("/")[-2] - media_id = profile["avatar_url"].split("/")[-1] + if ProfileFields.AVATAR_URL in profile: + avatar_url_parts = profile[ProfileFields.AVATAR_URL].split("/") + server_name = avatar_url_parts[-2] + media_id = avatar_url_parts[-1] if self._is_mine_server_name(server_name): media = await self._media_repo.store.get_local_media(media_id) # type: ignore[has-type] if media is not None and upload_name == media.upload_name: diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index a343637b82a..f88d39b38f0 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -26,7 +26,13 @@ from twisted.internet.interfaces import IDelayedCall import synapse.metrics -from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules, Membership +from synapse.api.constants import ( + EventTypes, + HistoryVisibility, + JoinRules, + Membership, + ProfileFields, +) from synapse.api.errors import Codes, SynapseError from synapse.handlers.state_deltas import MatchChange, StateDeltasHandler from synapse.metrics.background_process_metrics import run_as_background_process @@ -161,7 +167,7 @@ async def search_users( non_spammy_users = [] for user in results["results"]: if not await self._spam_checker_module_callbacks.check_username_for_spam( - user + user, user_id ): non_spammy_users.append(user) results["results"] = non_spammy_users @@ -756,6 +762,10 @@ async def _unsafe_refresh_remote_profiles_for_remote_server( await self.store.update_profile_in_user_dir( user_id, - display_name=non_null_str_or_none(profile.get("displayname")), - avatar_url=non_null_str_or_none(profile.get("avatar_url")), + display_name=non_null_str_or_none( + profile.get(ProfileFields.DISPLAYNAME) + ), + avatar_url=non_null_str_or_none( + profile.get(ProfileFields.AVATAR_URL) + ), ) diff --git a/synapse/http/client.py b/synapse/http/client.py index 85923d956bc..559b1febf09 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -41,7 +41,7 @@ from netaddr import AddrFormatError, IPAddress, IPSet from prometheus_client import Counter from typing_extensions import Protocol -from zope.interface import implementer, provider +from zope.interface import implementer from OpenSSL import SSL from OpenSSL.SSL import VERIFY_NONE @@ -225,7 +225,7 @@ def _callback() -> None: recv.addressResolved(address) recv.resolutionComplete() - @provider(IResolutionReceiver) + @implementer(IResolutionReceiver) class EndpointReceiver: @staticmethod def resolutionBegan(resolutionInProgress: IHostResolution) -> None: @@ -239,8 +239,9 @@ def addressResolved(address: IAddress) -> None: def resolutionComplete() -> None: _callback() + endpoint_receiver_wrapper = EndpointReceiver() self._reactor.nameResolver.resolveHostName( - EndpointReceiver, hostname, portNumber=portNumber + endpoint_receiver_wrapper, hostname, portNumber=portNumber ) return recv diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index f80f67acc68..c91cf30fd1b 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -21,7 +21,7 @@ import logging import random import re -from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple +from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple, Union from urllib.parse import urlparse from urllib.request import ( # type: ignore[attr-defined] getproxies_environment, @@ -351,7 +351,9 @@ def http_proxy_endpoint( proxy: Optional[bytes], reactor: IReactorCore, tls_options_factory: Optional[IPolicyForHTTPS], - **kwargs: object, + timeout: float = 30, + bindAddress: Optional[Union[bytes, str, tuple[Union[bytes, str], int]]] = None, + attemptDelay: Optional[float] = None, ) -> Tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]: """Parses an http proxy setting and returns an endpoint for the proxy @@ -382,12 +384,15 @@ def http_proxy_endpoint( # 3.9+) on scheme-less proxies, e.g. host:port. scheme, host, port, credentials = parse_proxy(proxy) - proxy_endpoint = HostnameEndpoint(reactor, host, port, **kwargs) + proxy_endpoint = HostnameEndpoint( + reactor, host, port, timeout, bindAddress, attemptDelay + ) if scheme == b"https": if tls_options_factory: tls_options = tls_options_factory.creatorForNetloc(host, port) - proxy_endpoint = wrapClientTLS(tls_options, proxy_endpoint) + wrapped_proxy_endpoint = wrapClientTLS(tls_options, proxy_endpoint) + return wrapped_proxy_endpoint, credentials else: raise RuntimeError( f"No TLS options for a https connection via proxy {proxy!s}" diff --git a/synapse/http/replicationagent.py b/synapse/http/replicationagent.py index ee8c7070621..4eabbc8af95 100644 --- a/synapse/http/replicationagent.py +++ b/synapse/http/replicationagent.py @@ -89,7 +89,7 @@ def endpointForURI(self, uri: URI) -> IStreamClientEndpoint: location_config.port, ) if scheme == "https": - endpoint = wrapClientTLS( + wrapped_endpoint = wrapClientTLS( # The 'port' argument below isn't actually used by the function self.context_factory.creatorForNetloc( location_config.host.encode("utf-8"), @@ -97,6 +97,8 @@ def endpointForURI(self, uri: URI) -> IStreamClientEndpoint: ), endpoint, ) + return wrapped_endpoint + return endpoint elif isinstance(location_config, InstanceUnixLocationConfig): return UNIXClientEndpoint(self.reactor, location_config.path) diff --git a/synapse/http/site.py b/synapse/http/site.py index 1cd90cb9b72..e83a4447b2f 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -21,6 +21,7 @@ import contextlib import logging import time +from http import HTTPStatus from typing import TYPE_CHECKING, Any, Generator, Optional, Tuple, Union import attr @@ -139,6 +140,41 @@ def __repr__(self) -> str: self.synapse_site.site_tag, ) + # Twisted machinery: this method is called by the Channel once the full request has + # been received, to dispatch the request to a resource. + # + # We're patching Twisted to bail/abort early when we see someone trying to upload + # `multipart/form-data` so we can avoid Twisted parsing the entire request body into + # in-memory (specific problem of this specific `Content-Type`). This protects us + # from an attacker uploading something bigger than the available RAM and crashing + # the server with a `MemoryError`, or carefully block just enough resources to cause + # all other requests to fail. + # + # FIXME: This can be removed once we Twisted releases a fix and we update to a + # version that is patched + def requestReceived(self, command: bytes, path: bytes, version: bytes) -> None: + if command == b"POST": + ctype = self.requestHeaders.getRawHeaders(b"content-type") + if ctype and b"multipart/form-data" in ctype[0]: + self.method, self.uri = command, path + self.clientproto = version + self.code = HTTPStatus.UNSUPPORTED_MEDIA_TYPE.value + self.code_message = bytes( + HTTPStatus.UNSUPPORTED_MEDIA_TYPE.phrase, "ascii" + ) + self.responseHeaders.setRawHeaders(b"content-length", [b"0"]) + + logger.warning( + "Aborting connection from %s because `content-type: multipart/form-data` is unsupported: %s %s", + self.client, + command, + path, + ) + self.write(b"") + self.loseConnection() + return + return super().requestReceived(command, path, version) + def handleContentChunk(self, data: bytes) -> None: # we should have a `content` by now. assert self.content, "handleContentChunk() called before gotLength()" diff --git a/synapse/logging/scopecontextmanager.py b/synapse/logging/scopecontextmanager.py index 581e6d6411f..feaadc4d87a 100644 --- a/synapse/logging/scopecontextmanager.py +++ b/synapse/logging/scopecontextmanager.py @@ -20,13 +20,10 @@ # import logging -from types import TracebackType -from typing import Optional, Type +from typing import Optional from opentracing import Scope, ScopeManager, Span -import twisted - from synapse.logging.context import ( LoggingContext, current_context, @@ -112,9 +109,6 @@ class _LogContextScope(Scope): """ A custom opentracing scope, associated with a LogContext - * filters out _DefGen_Return exceptions which arise from calling - `defer.returnValue` in Twisted code - * When the scope is closed, the logcontext's active scope is reset to None. and - if enter_logcontext was set - the logcontext is finished too. """ @@ -146,17 +140,6 @@ def __init__( self._finish_on_close = finish_on_close self._enter_logcontext = enter_logcontext - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - if exc_type == twisted.internet.defer._DefGen_Return: - # filter out defer.returnValue() calls - exc_type = value = traceback = None - super().__exit__(exc_type, value, traceback) - def __str__(self) -> str: return f"Scope<{self.span}>" diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py index 3845067835a..d6b8ce4a09f 100644 --- a/synapse/media/thumbnailer.py +++ b/synapse/media/thumbnailer.py @@ -67,6 +67,11 @@ class ThumbnailError(Exception): class Thumbnailer: FORMATS = {"image/jpeg": "JPEG", "image/png": "PNG"} + # Which image formats we allow Pillow to open. + # This should intentionally be kept restrictive, because the decoder of any + # format in this list becomes part of our trusted computing base. + PILLOW_FORMATS = ("jpeg", "png", "webp", "gif") + @staticmethod def set_limits(max_image_pixels: int) -> None: Image.MAX_IMAGE_PIXELS = max_image_pixels @@ -76,7 +81,7 @@ def __init__(self, input_path: str): self._closed = False try: - self.image = Image.open(input_path) + self.image = Image.open(input_path, formats=self.PILLOW_FORMATS) except OSError as e: # If an error occurs opening the image, a thumbnail won't be able to # be generated. diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index f6bfd93d3ce..2a2f821427d 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -45,6 +45,7 @@ from twisted.web.resource import Resource from synapse.api import errors +from synapse.api.constants import ProfileFields from synapse.api.errors import SynapseError from synapse.api.presence import UserPresenceState from synapse.config import ConfigError @@ -1086,7 +1087,10 @@ async def update_room_membership( content = {} # Set the profile if not already done by the module. - if "avatar_url" not in content or "displayname" not in content: + if ( + ProfileFields.AVATAR_URL not in content + or ProfileFields.DISPLAYNAME not in content + ): try: # Try to fetch the user's profile. profile = await self._hs.get_profile_handler().get_profile( @@ -1095,8 +1099,8 @@ async def update_room_membership( except SynapseError as e: # If the profile couldn't be found, use default values. profile = { - "displayname": target_user_id.localpart, - "avatar_url": None, + ProfileFields.DISPLAYNAME: target_user_id.localpart, + ProfileFields.AVATAR_URL: None, } if e.code != 404: @@ -1109,11 +1113,9 @@ async def update_room_membership( ) # Set the profile where it needs to be set. - if "avatar_url" not in content: - content["avatar_url"] = profile["avatar_url"] - - if "displayname" not in content: - content["displayname"] = profile["displayname"] + for field_name in [ProfileFields.AVATAR_URL, ProfileFields.DISPLAYNAME]: + if field_name not in content and field_name in profile: + content[field_name] = profile[field_name] event_id, _ = await self._hs.get_room_member_handler().update_membership( requester=requester, diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 17079ff781c..a2f328cafed 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -31,6 +31,7 @@ Optional, Tuple, Union, + cast, ) # `Literal` appears with Python 3.8. @@ -168,7 +169,10 @@ ] ], ] -CHECK_USERNAME_FOR_SPAM_CALLBACK = Callable[[UserProfile], Awaitable[bool]] +CHECK_USERNAME_FOR_SPAM_CALLBACK = Union[ + Callable[[UserProfile], Awaitable[bool]], + Callable[[UserProfile, str], Awaitable[bool]], +] LEGACY_CHECK_REGISTRATION_FOR_SPAM_CALLBACK = Callable[ [ Optional[dict], @@ -716,7 +720,9 @@ async def user_may_publish_room( return self.NOT_SPAM - async def check_username_for_spam(self, user_profile: UserProfile) -> bool: + async def check_username_for_spam( + self, user_profile: UserProfile, requester_id: str + ) -> bool: """Checks if a user ID or display name are considered "spammy" by this server. If the server considers a username spammy, then it will not be included in @@ -727,15 +733,33 @@ async def check_username_for_spam(self, user_profile: UserProfile) -> bool: * user_id * display_name * avatar_url + requester_id: The user ID of the user making the user directory search request. Returns: True if the user is spammy. """ for callback in self._check_username_for_spam_callbacks: with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"): + checker_args = inspect.signature(callback) # Make a copy of the user profile object to ensure the spam checker cannot # modify it. - res = await delay_cancellation(callback(user_profile.copy())) + # Also ensure backwards compatibility with spam checker callbacks + # that don't expect the requester_id argument. + if len(checker_args.parameters) == 2: + callback_with_requester_id = cast( + Callable[[UserProfile, str], Awaitable[bool]], callback + ) + res = await delay_cancellation( + callback_with_requester_id(user_profile.copy(), requester_id) + ) + else: + callback_without_requester_id = cast( + Callable[[UserProfile], Awaitable[bool]], callback + ) + res = await delay_cancellation( + callback_without_requester_id(user_profile.copy()) + ) + if res: return True diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 9c0592a9026..8c106f9649b 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -371,7 +371,7 @@ async def _action_for_event_by_user( "Deferred[Tuple[int, Tuple[dict, Optional[int]], Dict[str, Dict[str, JsonValue]], Mapping[str, ProfileInfo]]]", gather_results( ( - run_in_background( # type: ignore[call-arg] + run_in_background( # type: ignore[call-overload] self.store.get_number_joined_users_in_room, event.room_id, # type: ignore[arg-type] ), @@ -382,10 +382,10 @@ async def _action_for_event_by_user( event_id_to_event, ), run_in_background(self._related_events, event), - run_in_background( # type: ignore[call-arg] + run_in_background( # type: ignore[call-overload] self.store.get_subset_users_in_room_with_profiles, - event.room_id, # type: ignore[arg-type] - rules_by_user.keys(), # type: ignore[arg-type] + event.room_id, + rules_by_user.keys(), ), ), consumeErrors=True, diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index dd9b64d6eff..69790ecab54 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -127,6 +127,11 @@ def __init__(self, hs: "HomeServer", pusher_config: PusherConfig): if self.data is None: raise PusherConfigException("'data' key can not be null for HTTP pusher") + # Check if badge counts should be disabled for this push gateway + self.disable_badge_count = self.hs.config.experimental.msc4076_enabled and bool( + self.data.get("org.matrix.msc4076.disable_badge_count", False) + ) + self.name = "%s/%s/%s" % ( pusher_config.user_name, pusher_config.app_id, @@ -461,9 +466,10 @@ async def dispatch_push_event( content: JsonDict = { "event_id": event.event_id, "room_id": event.room_id, - "counts": {"unread": badge}, "prio": priority, } + if not self.disable_badge_count: + content["counts"] = {"unread": badge} # event_id_only doesn't include the tweaks, so override them. tweaks = {} else: @@ -478,11 +484,11 @@ async def dispatch_push_event( "type": event.type, "sender": event.user_id, "prio": priority, - "counts": { - "unread": badge, - # 'missed_calls': 2 - }, } + if not self.disable_badge_count: + content["counts"] = { + "unread": badge, + } if event.type == "m.room.member" and event.is_state(): content["membership"] = event.content["membership"] content["user_is_target"] = event.state_key == self.user_id diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index 1ef881f7024..3f3e4a92343 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -74,9 +74,13 @@ async def get_context_for_event( room_state = [] if ev.content.get("membership") == Membership.INVITE: - room_state = ev.unsigned.get("invite_room_state", []) + invite_room_state = ev.unsigned.get("invite_room_state", []) + if isinstance(invite_room_state, list): + room_state = invite_room_state elif ev.content.get("membership") == Membership.KNOCK: - room_state = ev.unsigned.get("knock_room_state", []) + knock_room_state = ev.unsigned.get("knock_room_state", []) + if isinstance(knock_room_state, list): + room_state = knock_room_state # Ideally we'd reuse the logic in `calculate_room_name`, but that gets # complicated to handle partial events vs pulling events from the DB. diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 4db89756747..f3c99663e84 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -107,6 +107,8 @@ UserAdminServlet, UserByExternalId, UserByThreePid, + UserInvitesCount, + UserJoinedRoomCount, UserMembershipRestServlet, UserRegisterServlet, UserReplaceMasterCrossSigningKeyRestServlet, @@ -323,6 +325,8 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: UserByThreePid(hs).register(http_server) RedactUser(hs).register(http_server) RedactUserStatus(hs).register(http_server) + UserInvitesCount(hs).register(http_server) + UserJoinedRoomCount(hs).register(http_server) DeviceRestServlet(hs).register(http_server) DevicesRestServlet(hs).register(http_server) @@ -332,8 +336,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: BackgroundUpdateRestServlet(hs).register(http_server) BackgroundUpdateStartJobRestServlet(hs).register(http_server) ExperimentalFeaturesRestServlet(hs).register(http_server) - if hs.config.experimental.msc3823_account_suspension: - SuspendAccountRestServlet(hs).register(http_server) + SuspendAccountRestServlet(hs).register(http_server) def register_servlets_for_client_rest_resource( diff --git a/synapse/rest/admin/event_reports.py b/synapse/rest/admin/event_reports.py index 9fb68bfa462..ff1abc0697c 100644 --- a/synapse/rest/admin/event_reports.py +++ b/synapse/rest/admin/event_reports.py @@ -50,8 +50,10 @@ class EventReportsRestServlet(RestServlet): The parameters `from` and `limit` are required only for pagination. By default, a `limit` of 100 is used. The parameter `dir` can be used to define the order of results. - The parameter `user_id` can be used to filter by user id. - The parameter `room_id` can be used to filter by room id. + The `user_id` query parameter filters by the user ID of the reporter of the event. + The `room_id` query parameter filters by room id. + The `event_sender_user_id` query parameter can be used to filter by the user id + of the sender of the reported event. Returns: A list of reported events and an integer representing the total number of reported events that exist given this query @@ -71,6 +73,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: direction = parse_enum(request, "dir", Direction, Direction.BACKWARDS) user_id = parse_string(request, "user_id") room_id = parse_string(request, "room_id") + event_sender_user_id = parse_string(request, "event_sender_user_id") if start < 0: raise SynapseError( @@ -87,7 +90,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: ) event_reports, total = await self._store.get_event_reports_paginate( - start, limit, direction, user_id, room_id + start, limit, direction, user_id, room_id, event_sender_user_id ) ret = {"event_reports": event_reports, "total": total} if (start + limit) < total: diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 01f9de9ffa5..3097cb1a9d1 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -23,6 +23,7 @@ from typing import TYPE_CHECKING, List, Optional, Tuple, cast import attr +from immutabledict import immutabledict from synapse.api.constants import Direction, EventTypes, JoinRules, Membership from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError @@ -463,7 +464,18 @@ async def on_GET( if not room: raise NotFoundError("Room not found") - event_ids = await self._storage_controllers.state.get_current_state_ids(room_id) + state_filter = None + type = parse_string(request, "type") + + if type: + state_filter = StateFilter( + types=immutabledict({type: None}), + include_others=False, + ) + + event_ids = await self._storage_controllers.state.get_current_state_ids( + room_id, state_filter + ) events = await self.store.get_events(event_ids.values()) now = self.clock.time_msec() room_state = await self._event_serializer.serialize_events(events.values(), now) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index b146c2754d6..7b8f1d1b2a9 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -983,7 +983,7 @@ async def on_PUT( class UserMembershipRestServlet(RestServlet): """ - Get room list of an user. + Get list of joined room ID's for a user. """ PATTERNS = admin_patterns("/users/(?P[^/]*)/joined_rooms$") @@ -999,8 +999,9 @@ async def on_GET( await assert_requester_is_admin(self.auth, request) room_ids = await self.store.get_rooms_for_user(user_id) - ret = {"joined_rooms": list(room_ids), "total": len(room_ids)} - return HTTPStatus.OK, ret + rooms_response = {"joined_rooms": list(room_ids), "total": len(room_ids)} + + return HTTPStatus.OK, rooms_response class PushersRestServlet(RestServlet): @@ -1501,3 +1502,50 @@ async def on_GET( } else: raise NotFoundError("redact id '%s' not found" % redact_id) + + +class UserInvitesCount(RestServlet): + """ + Return the count of invites that the user has sent after the given timestamp + """ + + PATTERNS = admin_patterns("/users/(?P[^/]*)/sent_invite_count") + + def __init__(self, hs: "HomeServer"): + self._auth = hs.get_auth() + self.store = hs.get_datastores().main + + async def on_GET( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self._auth, request) + from_ts = parse_integer(request, "from_ts", required=True) + + sent_invite_count = await self.store.get_sent_invite_count_by_user( + user_id, from_ts + ) + + return HTTPStatus.OK, {"invite_count": sent_invite_count} + + +class UserJoinedRoomCount(RestServlet): + """ + Return the count of rooms that the user has joined at or after the given timestamp, even + if they have subsequently left/been banned from those rooms. + """ + + PATTERNS = admin_patterns("/users/(?P[^/]*)/cumulative_joined_room_count") + + def __init__(self, hs: "HomeServer"): + self._auth = hs.get_auth() + self.store = hs.get_datastores().main + + async def on_GET( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self._auth, request) + from_ts = parse_integer(request, "from_ts", required=True) + + joined_rooms = await self.store.get_rooms_for_user_by_date(user_id, from_ts) + + return HTTPStatus.OK, {"cumulative_joined_room_count": len(joined_rooms)} diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 6a45a5d1307..4607b234945 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -114,15 +114,19 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: else: raise e - await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body.dict(exclude_unset=True), - "remove device(s) from your account", - # Users might call this multiple times in a row while cleaning up - # devices, allow a single UI auth session to be re-used. - can_skip_ui_auth=True, - ) + if requester.app_service and requester.app_service.msc4190_device_management: + # MSC4190 can skip UIA for this endpoint + pass + else: + await self.auth_handler.validate_user_via_ui_auth( + requester, + request, + body.dict(exclude_unset=True), + "remove device(s) from your account", + # Users might call this multiple times in a row while cleaning up + # devices, allow a single UI auth session to be re-used. + can_skip_ui_auth=True, + ) await self.device_handler.delete_devices( requester.user.to_string(), body.devices @@ -175,9 +179,6 @@ class DeleteBody(RequestBodyModel): async def on_DELETE( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: - if self._msc3861_oauth_delegation_enabled: - raise UnrecognizedRequestError(code=404) - requester = await self.auth.get_user_by_req(request) try: @@ -192,15 +193,24 @@ async def on_DELETE( else: raise - await self.auth_handler.validate_user_via_ui_auth( - requester, - request, - body.dict(exclude_unset=True), - "remove a device from your account", - # Users might call this multiple times in a row while cleaning up - # devices, allow a single UI auth session to be re-used. - can_skip_ui_auth=True, - ) + if requester.app_service and requester.app_service.msc4190_device_management: + # MSC4190 allows appservices to delete devices through this endpoint without UIA + # It's also allowed with MSC3861 enabled + pass + + else: + if self._msc3861_oauth_delegation_enabled: + raise UnrecognizedRequestError(code=404) + + await self.auth_handler.validate_user_via_ui_auth( + requester, + request, + body.dict(exclude_unset=True), + "remove a device from your account", + # Users might call this multiple times in a row while cleaning up + # devices, allow a single UI auth session to be re-used. + can_skip_ui_auth=True, + ) await self.device_handler.delete_devices( requester.user.to_string(), [device_id] @@ -216,6 +226,16 @@ async def on_PUT( requester = await self.auth.get_user_by_req(request, allow_guest=True) body = parse_and_validate_json_object_from_request(request, self.PutBody) + + # MSC4190 allows appservices to create devices through this endpoint + if requester.app_service and requester.app_service.msc4190_device_management: + created = await self.device_handler.upsert_device( + user_id=requester.user.to_string(), + device_id=device_id, + display_name=body.display_name, + ) + return 201 if created else 200, {} + await self.device_handler.update_device( requester.user.to_string(), device_id, body.dict() ) diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index 7a95b9445d1..ef59582865f 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -227,14 +227,7 @@ async def on_GET( user = UserID.from_string(user_id) await self.profile_handler.check_profile_query_allowed(user, requester_user) - displayname = await self.profile_handler.get_displayname(user) - avatar_url = await self.profile_handler.get_avatar_url(user) - - ret = {} - if displayname is not None: - ret["displayname"] = displayname - if avatar_url is not None: - ret["avatar_url"] = avatar_url + ret = await self.profile_handler.get_profile(user_id) return 200, ret diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 61e14368418..ad76f188ab4 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -771,9 +771,12 @@ async def _do_appservice_registration( body: JsonDict, should_issue_refresh_token: bool = False, ) -> JsonDict: - user_id = await self.registration_handler.appservice_register( + user_id, appservice = await self.registration_handler.appservice_register( username, as_token ) + if appservice.msc4190_device_management: + body["inhibit_login"] = True + return await self._create_registration_details( user_id, body, @@ -937,7 +940,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: as_token = self.auth.get_access_token_from_request(request) - user_id = await self.registration_handler.appservice_register( + user_id, _ = await self.registration_handler.appservice_register( desired_username, as_token ) return 200, {"user_id": user_id} diff --git a/synapse/rest/client/reporting.py b/synapse/rest/client/reporting.py index 949f0770351..c5037be8b75 100644 --- a/synapse/rest/client/reporting.py +++ b/synapse/rest/client/reporting.py @@ -20,13 +20,11 @@ # import logging -import re from http import HTTPStatus from typing import TYPE_CHECKING, Tuple from synapse._pydantic_compat import StrictStr from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError -from synapse.api.urls import CLIENT_API_PREFIX from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, @@ -127,16 +125,6 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self.store = hs.get_datastores().main - # TODO: Remove the unstable variant after 2-3 releases - # https://github.com/element-hq/synapse/issues/17373 - if hs.config.experimental.msc4151_enabled: - self.PATTERNS.append( - re.compile( - f"^{CLIENT_API_PREFIX}/unstable/org.matrix.msc4151" - "/rooms/(?P[^/]*)/report$" - ) - ) - class PostBody(RequestBodyModel): reason: StrictStr diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 8883cd6bc0c..03e7bc0a24d 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -783,9 +783,9 @@ async def on_GET( # decorator on `get_number_joined_users_in_room` doesn't play well with # the type system. Maybe in the future, it can use some ParamSpec # wizardry to fix it up. - room_member_count_deferred = run_in_background( # type: ignore[call-arg] + room_member_count_deferred = run_in_background( # type: ignore[call-overload] self.store.get_number_joined_users_in_room, - room_id, # type: ignore[arg-type] + room_id, ) requester = await self.auth.get_user_by_req(request, allow_guest=True) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 5c62a74f41c..f4ef84a038d 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -436,7 +436,12 @@ async def encode_invited( ) unsigned = dict(invite.get("unsigned", {})) invite["unsigned"] = unsigned - invited_state = list(unsigned.pop("invite_room_state", [])) + + invited_state = unsigned.pop("invite_room_state", []) + if not isinstance(invited_state, list): + invited_state = [] + + invited_state = list(invited_state) invited_state.append(invite) invited[room.room_id] = {"invite_state": {"events": invited_state}} @@ -476,7 +481,10 @@ async def encode_knocked( # Extract the stripped room state from the unsigned dict # This is for clients to get a little bit of information about # the room they've knocked on, without revealing any sensitive information - knocked_state = list(unsigned.pop("knock_room_state", [])) + knocked_state = unsigned.pop("knock_room_state", []) + if not isinstance(knocked_state, list): + knocked_state = [] + knocked_state = list(knocked_state) # Append the actual knock membership event itself as well. This provides # the client with: diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index ba1141bbe59..a1d089ebac8 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -170,8 +170,6 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: ), # MSC4140: Delayed events "org.matrix.msc4140": bool(self.config.server.max_event_delay_ms), - # MSC4151: Report room API (Client-Server API) - "org.matrix.msc4151": self.config.experimental.msc4151_enabled, # Simplified sliding sync "org.matrix.simplified_msc3575": msc3575_enabled, }, diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index e14d711c764..7251e72e3ad 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -86,7 +86,9 @@ def process_replication_position( # noqa: B027 (no-op by design) """ def _invalidate_state_caches( - self, room_id: str, members_changed: Collection[str] + self, + room_id: str, + members_changed: Collection[str], ) -> None: """Invalidates caches that are based on the current state, but does not stream invalidations down replication. diff --git a/synapse/storage/controllers/purge_events.py b/synapse/storage/controllers/purge_events.py index e794b370c25..15c04ffef89 100644 --- a/synapse/storage/controllers/purge_events.py +++ b/synapse/storage/controllers/purge_events.py @@ -42,8 +42,8 @@ async def purge_room(self, room_id: str) -> None: """Deletes all record of a room""" with nested_logging_context(room_id): - state_groups_to_delete = await self.stores.main.purge_room(room_id) - await self.stores.state.purge_room_state(room_id, state_groups_to_delete) + await self.stores.main.purge_room(room_id) + await self.stores.state.purge_room_state(room_id) async def purge_history( self, room_id: str, token: str, delete_local_events: bool diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 707d18de78a..f364464c23a 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -219,6 +219,11 @@ def process_replication_rows( room_id = row.keys[0] members_changed = set(row.keys[1:]) self._invalidate_state_caches(room_id, members_changed) + self._curr_state_delta_stream_cache.entity_has_changed( # type: ignore[attr-defined] + room_id, token + ) + for user_id in members_changed: + self._membership_stream_cache.entity_has_changed(user_id, token) # type: ignore[attr-defined] elif row.cache_func == PURGE_HISTORY_CACHE_NAME: if row.keys is None: raise Exception( @@ -236,6 +241,35 @@ def process_replication_rows( room_id = row.keys[0] self._invalidate_caches_for_room_events(room_id) self._invalidate_caches_for_room(room_id) + self._curr_state_delta_stream_cache.entity_has_changed( # type: ignore[attr-defined] + room_id, token + ) + # Note: This code is commented out to improve cache performance. + # While uncommenting would provide complete correctness, our + # automatic forgotten room purge logic (see + # `forgotten_room_retention_period`) means this would frequently + # clear the entire cache (effectively) and probably have a noticable + # impact on the cache hit ratio. + # + # Not updating the cache here is safe because: + # + # 1. `_membership_stream_cache` is only used to indicate the + # *absence* of changes, i.e. "nothing has changed between tokens + # X and Y and so return early and don't query the database". + # 2. `_membership_stream_cache` is used when we query data from + # `current_state_delta_stream` and `room_memberships` but since + # nothing new is written to the database for those tables when + # purging/deleting a room (only deleting rows), there is nothing + # changed to care about. + # + # At worst, the cache might indicate a change at token X, at which + # point, we will query the database and discover nothing is there. + # + # Ideally, we would make it so that we could clear the cache on a + # more granular level but that's a bit complex and fiddly to do with + # room membership. + # + # self._membership_stream_cache.all_entities_changed(token) # type: ignore[attr-defined] else: self._attempt_to_invalidate_cache(row.cache_func, row.keys) @@ -275,6 +309,7 @@ def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None: self._attempt_to_invalidate_cache( "get_sliding_sync_rooms_for_user", None ) + self._membership_stream_cache.entity_has_changed(data.state_key, token) # type: ignore[attr-defined] elif data.type == EventTypes.RoomEncryption: self._attempt_to_invalidate_cache( "get_room_encryption", (data.room_id,) @@ -291,6 +326,7 @@ def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None: # Similar to the above, but the entire caches are invalidated. This is # unfortunate for the membership caches, but should recover quickly. self._curr_state_delta_stream_cache.entity_has_changed(data.room_id, token) # type: ignore[attr-defined] + self._membership_stream_cache.all_entities_changed(token) # type: ignore[attr-defined] self._attempt_to_invalidate_cache("get_rooms_for_user", None) self._attempt_to_invalidate_cache("get_room_type", (data.room_id,)) self._attempt_to_invalidate_cache("get_room_encryption", (data.room_id,)) diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index dd6ac909e9f..a23aaf50962 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1605,7 +1605,13 @@ def _update_current_state_txn( room_id delta_state: Deltas that are going to be used to update the `current_state_events` table. Changes to the current state of the room. - stream_id: TODO + stream_id: This is expected to be the minimum `stream_ordering` for the + batch of events that we are persisting; which means we do not end up in a + situation where workers see events before the `current_state_delta` updates. + FIXME: However, this function also gets called with next upcoming + `stream_ordering` when we re-sync the state of a partial stated room (see + `update_current_state(...)`) which may be "correct" but it would be good to + nail down what exactly is the expected value here. sliding_sync_table_changes: Changes to the `sliding_sync_membership_snapshots` and `sliding_sync_joined_rooms` tables derived from the given `delta_state` (see @@ -1908,6 +1914,13 @@ def _update_current_state_txn( stream_id, ) + for user_id in members_to_cache_bust: + txn.call_after( + self.store._membership_stream_cache.entity_has_changed, + user_id, + stream_id, + ) + # Invalidate the various caches self.store._invalidate_state_caches_and_stream( txn, room_id, members_to_cache_bust diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 825fd00993a..222df8757ac 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -339,6 +339,16 @@ def get_chain_id_txn(txn: Cursor) -> int: writers=["master"], ) + # Added to accommodate some queries for the admin API in order to fetch/filter + # membership events by when it was received + self.db_pool.updates.register_background_index_update( + update_name="events_received_ts_index", + index_name="received_ts_idx", + table="events", + columns=("received_ts",), + where_clause="type = 'm.room.member'", + ) + def get_un_partial_stated_events_token(self, instance_name: str) -> int: return ( self._un_partial_stated_events_stream_id_gen.get_current_token_for_writer( @@ -2589,6 +2599,44 @@ async def have_finished_sliding_sync_background_jobs(self) -> bool: ) ) + async def get_sent_invite_count_by_user(self, user_id: str, from_ts: int) -> int: + """ + Get the number of invites sent by the given user at or after the provided timestamp. + + Args: + user_id: user ID to search against + from_ts: a timestamp in milliseconds from the unix epoch. Filters against + `events.received_ts` + + """ + + def _get_sent_invite_count_by_user_txn( + txn: LoggingTransaction, user_id: str, from_ts: int + ) -> int: + sql = """ + SELECT COUNT(rm.event_id) + FROM room_memberships AS rm + INNER JOIN events AS e USING(event_id) + WHERE rm.sender = ? + AND rm.membership = 'invite' + AND e.type = 'm.room.member' + AND e.received_ts >= ? + """ + + txn.execute(sql, (user_id, from_ts)) + res = txn.fetchone() + + if res is None: + return 0 + return int(res[0]) + + return await self.db_pool.runInteraction( + "_get_sent_invite_count_by_user_txn", + _get_sent_invite_count_by_user_txn, + user_id, + from_ts, + ) + @cached(tree=True) async def get_metadata_for_event( self, room_id: str, event_id: str diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index 08244153a39..ebdeb8fbd70 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Set, Tuple, cast +from typing import Any, Set, Tuple, cast from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -332,7 +332,7 @@ def _purge_history_txn( return referenced_state_groups - async def purge_room(self, room_id: str) -> List[int]: + async def purge_room(self, room_id: str) -> None: """Deletes all record of a room Args: @@ -348,7 +348,7 @@ async def purge_room(self, room_id: str) -> List[int]: # purge any of those rows which were added during the first. logger.info("[purge] Starting initial main purge of [1/2]") - state_groups_to_delete = await self.db_pool.runInteraction( + await self.db_pool.runInteraction( "purge_room", self._purge_room_txn, room_id=room_id, @@ -356,18 +356,15 @@ async def purge_room(self, room_id: str) -> List[int]: ) logger.info("[purge] Starting secondary main purge of [2/2]") - state_groups_to_delete.extend( - await self.db_pool.runInteraction( - "purge_room", - self._purge_room_txn, - room_id=room_id, - ), + await self.db_pool.runInteraction( + "purge_room", + self._purge_room_txn, + room_id=room_id, ) - logger.info("[purge] Done with main purge") - return state_groups_to_delete + logger.info("[purge] Done with main purge") - def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: + def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> None: # This collides with event persistence so we cannot write new events and metadata into # a room while deleting it or this transaction will fail. if isinstance(self.database_engine, PostgresEngine): @@ -376,18 +373,10 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: (room_id,), ) - # First, fetch all the state groups that should be deleted, before - # we delete that information. - txn.execute( - """ - SELECT DISTINCT state_group FROM events - INNER JOIN event_to_state_groups USING(event_id) - WHERE events.room_id = ? - """, - (room_id,), - ) - - state_groups = [row[0] for row in txn] + if isinstance(self.database_engine, PostgresEngine): + # Disable statement timeouts for this transaction; purging rooms can + # take a while! + txn.execute("SET LOCAL statement_timeout = 0") # Get all the auth chains that are referenced by events that are to be # deleted. @@ -508,5 +497,3 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: # periodically anyway (https://github.com/matrix-org/synapse/issues/5888) self._invalidate_caches_for_room_and_stream(txn, room_id) - - return state_groups diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index cc3ce0951e7..2522bebd728 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -1586,6 +1586,7 @@ async def get_event_reports_paginate( direction: Direction = Direction.BACKWARDS, user_id: Optional[str] = None, room_id: Optional[str] = None, + event_sender_user_id: Optional[str] = None, ) -> Tuple[List[Dict[str, Any]], int]: """Retrieve a paginated list of event reports @@ -1596,6 +1597,8 @@ async def get_event_reports_paginate( oldest first (forwards) user_id: search for user_id. Ignored if user_id is None room_id: search for room_id. Ignored if room_id is None + event_sender_user_id: search for the sender of the reported event. Ignored if + event_sender_user_id is None Returns: Tuple of: json list of event reports @@ -1615,6 +1618,10 @@ def _get_event_reports_paginate_txn( filters.append("er.room_id LIKE ?") args.extend(["%" + room_id + "%"]) + if event_sender_user_id: + filters.append("events.sender = ?") + args.extend([event_sender_user_id]) + if direction == Direction.BACKWARDS: order = "DESC" else: @@ -1630,6 +1637,7 @@ def _get_event_reports_paginate_txn( sql = """ SELECT COUNT(*) as total_event_reports FROM event_reports AS er + LEFT JOIN events USING(event_id) JOIN room_stats_state ON room_stats_state.room_id = er.room_id {} """.format(where_clause) @@ -1648,8 +1656,7 @@ def _get_event_reports_paginate_txn( room_stats_state.canonical_alias, room_stats_state.name FROM event_reports AS er - LEFT JOIN events - ON events.event_id = er.event_id + LEFT JOIN events USING(event_id) JOIN room_stats_state ON room_stats_state.room_id = er.room_id {where_clause} diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 4249cf77e55..50ed6a28bf0 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1572,6 +1572,40 @@ def get_sliding_sync_room_for_user_batch_txn( get_sliding_sync_room_for_user_batch_txn, ) + async def get_rooms_for_user_by_date( + self, user_id: str, from_ts: int + ) -> FrozenSet[str]: + """ + Fetch a list of rooms that the user has joined at or after the given timestamp, including + those they subsequently have left/been banned from. + + Args: + user_id: user ID of the user to search for + from_ts: a timestamp in ms from the unix epoch at which to begin the search at + """ + + def _get_rooms_for_user_by_join_date_txn( + txn: LoggingTransaction, user_id: str, timestamp: int + ) -> frozenset: + sql = """ + SELECT rm.room_id + FROM room_memberships AS rm + INNER JOIN events AS e USING (event_id) + WHERE rm.user_id = ? + AND rm.membership = 'join' + AND e.type = 'm.room.member' + AND e.received_ts >= ? + """ + txn.execute(sql, (user_id, timestamp)) + return frozenset([r[0] for r in txn]) + + return await self.db_pool.runInteraction( + "_get_rooms_for_user_by_join_date_txn", + _get_rooms_for_user_by_join_date_txn, + user_id, + from_ts, + ) + class RoomMemberBackgroundUpdateStore(SQLBaseStore): def __init__( diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index 117ee89d0a0..b90f667da86 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -243,6 +243,13 @@ async def get_current_state_deltas_for_room( (> `from_token` and <= `to_token`) """ + # We can bail early if the `from_token` is after the `to_token` + if ( + to_token is not None + and from_token is not None + and to_token.is_before_or_eq(from_token) + ): + return [] if ( from_token is not None diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index f7a59c8992d..9944f90015c 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -840,60 +840,42 @@ async def get_previous_state_groups( return dict(rows) - async def purge_room_state( - self, room_id: str, state_groups_to_delete: Collection[int] - ) -> None: - """Deletes all record of a room from state tables - - Args: - room_id: - state_groups_to_delete: State groups to delete - """ - - logger.info("[purge] Starting state purge") - await self.db_pool.runInteraction( + async def purge_room_state(self, room_id: str) -> None: + return await self.db_pool.runInteraction( "purge_room_state", self._purge_room_state_txn, room_id, - state_groups_to_delete, ) - logger.info("[purge] Done with state purge") def _purge_room_state_txn( self, txn: LoggingTransaction, room_id: str, - state_groups_to_delete: Collection[int], ) -> None: - # first we have to delete the state groups states - logger.info("[purge] removing %s from state_groups_state", room_id) - - self.db_pool.simple_delete_many_txn( - txn, - table="state_groups_state", - column="state_group", - values=state_groups_to_delete, - keyvalues={}, - ) - - # ... and the state group edges + # Delete all edges that reference a state group linked to room_id logger.info("[purge] removing %s from state_group_edges", room_id) + txn.execute( + """ + DELETE FROM state_group_edges AS sge WHERE sge.state_group IN ( + SELECT id FROM state_groups AS sg WHERE sg.room_id = ? + )""", + (room_id,), + ) - self.db_pool.simple_delete_many_txn( - txn, - table="state_group_edges", - column="state_group", - values=state_groups_to_delete, - keyvalues={}, + # state_groups_state table has a room_id column but no index on it, unlike state_groups, + # so we delete them by matching the room_id through the state_groups table. + logger.info("[purge] removing %s from state_groups_state", room_id) + txn.execute( + """ + DELETE FROM state_groups_state AS sgs WHERE sgs.state_group IN ( + SELECT id FROM state_groups AS sg WHERE sg.room_id = ? + )""", + (room_id,), ) - # ... and the state groups logger.info("[purge] removing %s from state_groups", room_id) - - self.db_pool.simple_delete_many_txn( + self.db_pool.simple_delete_txn( txn, table="state_groups", - column="id", - values=state_groups_to_delete, - keyvalues={}, + keyvalues={"room_id": room_id}, ) diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index 8c8c6d04144..e4cd359201b 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -99,8 +99,8 @@ def check_database( allow_unsafe_locale = self.config.get("allow_unsafe_locale", False) # Are we on a supported PostgreSQL version? - if not allow_outdated_version and self._version < 110000: - raise RuntimeError("Synapse requires PostgreSQL 11 or above.") + if not allow_outdated_version and self._version < 130000: + raise RuntimeError("Synapse requires PostgreSQL 13 or above.") with db_conn.cursor() as txn: txn.execute("SHOW SERVER_ENCODING") diff --git a/synapse/storage/schema/main/delta/88/06_events_received_ts_index.sql b/synapse/storage/schema/main/delta/88/06_events_received_ts_index.sql new file mode 100644 index 00000000000..d70a4a8dbcb --- /dev/null +++ b/synapse/storage/schema/main/delta/88/06_events_received_ts_index.sql @@ -0,0 +1,17 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2024 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + +-- Add an index on `events.received_ts` for `m.room.member` events to allow for +-- efficient lookup of events by timestamp in some Admin API's +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (8806, 'events_received_ts_index', '{}'); diff --git a/synapse/types/handlers/sliding_sync.py b/synapse/types/handlers/sliding_sync.py index aae60fddeab..3ebd334a6d5 100644 --- a/synapse/types/handlers/sliding_sync.py +++ b/synapse/types/handlers/sliding_sync.py @@ -407,8 +407,8 @@ class StateValues: # Include all state events of the given type WILDCARD: Final = "*" # Lazy-load room membership events (include room membership events for any event - # `sender` in the timeline). We only give special meaning to this value when it's a - # `state_key`. + # `sender` or membership change target in the timeline). We only give special + # meaning to this value when it's a `state_key`. LAZY: Final = "$LAZY" # Subsitute with the requester's user ID. Typically used by clients to get # the user's membership. @@ -641,9 +641,10 @@ def must_await_full_state( if user_id == StateValues.ME: continue # We're lazy-loading membership so we can just return the state we have. - # Lazy-loading means we include membership for any event `sender` in the - # timeline but since we had to auth those timeline events, we will have the - # membership state for them (including from remote senders). + # Lazy-loading means we include membership for any event `sender` or + # membership change target in the timeline but since we had to auth those + # timeline events, we will have the membership state for them (including + # from remote senders). elif user_id == StateValues.LAZY: continue elif user_id == StateValues.WILDCARD: diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 03503abe0f6..5ac8643eefc 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -314,6 +314,15 @@ def entity_has_changed(self, entity: EntityType, stream_pos: int) -> None: self._entity_to_key[entity] = stream_pos self._evict() + def all_entities_changed(self, stream_pos: int) -> None: + """ + Mark all entities as changed. This is useful when the cache is invalidated and + there may be some potential change for all of the entities. + """ + self._cache.clear() + self._entity_to_key.clear() + self._earliest_known_stream_pos = stream_pos + def _evict(self) -> None: """ Ensure the cache has not exceeded the maximum size. diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py index 56bdf451dad..beea4d28881 100644 --- a/synapse/util/patch_inline_callbacks.py +++ b/synapse/util/patch_inline_callbacks.py @@ -162,7 +162,7 @@ def check_yield_points_inner( d = result.throwExceptionIntoGenerator(gen) else: d = gen.send(result) - except (StopIteration, defer._DefGen_Return) as e: + except StopIteration as e: if current_context() != expected_context: # This happens when the context is lost sometime *after* the # final yield and returning. E.g. we forgot to yield on a @@ -183,7 +183,7 @@ def check_yield_points_inner( ) ) changes.append(err) - # The `StopIteration` or `_DefGen_Return` contains the return value from the + # The `StopIteration` contains the return value from the # generator. return cast(T, e.value) diff --git a/synapse/util/task_scheduler.py b/synapse/util/task_scheduler.py index 3ed457bd307..4683d09cd75 100644 --- a/synapse/util/task_scheduler.py +++ b/synapse/util/task_scheduler.py @@ -46,33 +46,43 @@ class TaskScheduler: """ - This is a simple task sheduler aimed at resumable tasks: usually we use `run_in_background` - to launch a background task, or Twisted `deferLater` if we want to do so later on. - - The problem with that is that the tasks will just stop and never be resumed if synapse - is stopped for whatever reason. - - How this works: - - A function mapped to a named action should first be registered with `register_action`. - This function will be called when trying to resuming tasks after a synapse shutdown, - so this registration should happen when synapse is initialised, NOT right before scheduling - a task. - - A task can then be launched using this named action with `schedule_task`. A `params` dict - can be passed, and it will be available to the registered function when launched. This task - can be launch either now-ish, or later on by giving a `timestamp` parameter. - - The function may call `update_task` at any time to update the `result` of the task, - and this can be used to resume the task at a specific point and/or to convey a result to - the code launching the task. - You can also specify the `result` (and/or an `error`) when returning from the function. - - The reconciliation loop runs every minute, so this is not a precise scheduler. - There is a limit of 10 concurrent tasks, so tasks may be delayed if the pool is already - full. In this regard, please take great care that scheduled tasks can actually finished. - For now there is no mechanism to stop a running task if it is stuck. - - Tasks will be run on the worker specified with `run_background_tasks_on` config, - or the main one by default. + This is a simple task scheduler designed for resumable tasks. Normally, + you'd use `run_in_background` to start a background task or Twisted's + `deferLater` if you want to run it later. + + The issue is that these tasks stop completely and won't resume if Synapse is + shut down for any reason. + + Here's how it works: + + - Register an Action: First, you need to register a function to a named + action using `register_action`. This function will be called to resume tasks + after a Synapse shutdown. Make sure to register it when Synapse initializes, + not right before scheduling the task. + + - Schedule a Task: You can launch a task linked to the named action + using `schedule_task`. You can pass a `params` dictionary, which will be + passed to the registered function when it's executed. Tasks can be scheduled + to run either immediately or later by specifying a `timestamp`. + + - Update Task: The function handling the task can call `update_task` at + any point to update the task's `result`. This lets you resume the task from + a specific point or pass results back to the code that scheduled it. When + the function completes, you can also return a `result` or an `error`. + + Things to keep in mind: + + - The reconciliation loop runs every minute, so this is not a high-precision + scheduler. + + - Only 10 tasks can run at the same time. If the pool is full, tasks may be + delayed. Make sure your scheduled tasks can actually finish. + + - Currently, there's no way to stop a task if it gets stuck. + + - Tasks will run on the worker defined by the `run_background_tasks_on` + setting in your configuration. If no worker is specified, they'll run on + the main one by default. """ # Precision of the scheduler, evaluation of tasks to run will only happen @@ -157,7 +167,7 @@ async def schedule_task( params: Optional[JsonMapping] = None, ) -> str: """Schedule a new potentially resumable task. A function matching the specified - `action` should have be registered with `register_action` before the task is run. + `action` should've been registered with `register_action` before the task is run. Args: action: the name of a previously registered action @@ -210,15 +220,15 @@ async def update_task( result: Optional[JsonMapping] = None, error: Optional[str] = None, ) -> bool: - """Update some task associated values. This is exposed publicly so it can - be used inside task functions, mainly to update the result and be able to - resume a task at a specific step after a restart of synapse. + """Update some task-associated values. This is exposed publicly so it can + be used inside task functions, mainly to update the result or resume + a task at a specific step after a restart of synapse. It can also be used to stage a task, by setting the `status` to `SCHEDULED` with a new timestamp. - The `status` can only be set to `ACTIVE` or `SCHEDULED`, `COMPLETE` and `FAILED` - are terminal status and can only be set by returning it in the function. + The `status` can only be set to `ACTIVE` or `SCHEDULED`. `COMPLETE` and `FAILED` + are terminal statuses and can only be set by returning them from the function. Args: id: the id of the task to update @@ -226,6 +236,12 @@ async def update_task( status: the new `TaskStatus` of the task result: the new result of the task error: the new error of the task + + Returns: + True if the update was successful, False otherwise. + + Raises: + Exception: If a status other than `ACTIVE` or `SCHEDULED` was passed. """ if status == TaskStatus.COMPLETE or status == TaskStatus.FAILED: raise Exception( @@ -263,9 +279,9 @@ async def get_tasks( max_timestamp: Optional[int] = None, limit: Optional[int] = None, ) -> List[ScheduledTask]: - """Get a list of tasks. Returns all the tasks if no args is provided. + """Get a list of tasks. Returns all the tasks if no args are provided. - If an arg is `None` all tasks matching the other args will be selected. + If an arg is `None`, all tasks matching the other args will be selected. If an arg is an empty list, the corresponding value of the task needs to be `None` to be selected. @@ -277,8 +293,8 @@ async def get_tasks( a timestamp inferior to the specified one limit: Only return `limit` number of rows if set. - Returns - A list of `ScheduledTask`, ordered by increasing timestamps + Returns: + A list of `ScheduledTask`, ordered by increasing timestamps. """ return await self._store.get_scheduled_tasks( actions=actions, diff --git a/tests/config/test_load.py b/tests/config/test_load.py index 82b63b61c69..f0c4058e9f2 100644 --- a/tests/config/test_load.py +++ b/tests/config/test_load.py @@ -39,7 +39,7 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase): def test_load_fails_if_server_name_missing(self) -> None: - self.generate_config_and_remove_lines_containing("server_name") + self.generate_config_and_remove_lines_containing(["server_name"]) with self.assertRaises(ConfigError): HomeServerConfig.load_config("", ["-c", self.config_file]) with self.assertRaises(ConfigError): @@ -76,7 +76,7 @@ def test_generates_and_loads_macaroon_secret_key(self) -> None: ) def test_load_succeeds_if_macaroon_secret_key_missing(self) -> None: - self.generate_config_and_remove_lines_containing("macaroon") + self.generate_config_and_remove_lines_containing(["macaroon"]) config1 = HomeServerConfig.load_config("", ["-c", self.config_file]) config2 = HomeServerConfig.load_config("", ["-c", self.config_file]) config3 = HomeServerConfig.load_or_generate_config("", ["-c", self.config_file]) @@ -111,7 +111,7 @@ def test_disable_registration(self) -> None: self.assertTrue(config3.registration.enable_registration) def test_stats_enabled(self) -> None: - self.generate_config_and_remove_lines_containing("enable_metrics") + self.generate_config_and_remove_lines_containing(["enable_metrics"]) self.add_lines_to_config(["enable_metrics: true"]) # The default Metrics Flags are off by default. @@ -131,6 +131,7 @@ def test_depreciated_identity_server_flag_throws_error(self) -> None: [ "turn_shared_secret_path: /does/not/exist", "registration_shared_secret_path: /does/not/exist", + "macaroon_secret_key_path: /does/not/exist", "recaptcha_private_key_path: /does/not/exist", "recaptcha_public_key_path: /does/not/exist", *["redis:\n enabled: true\n password_path: /does/not/exist"] @@ -148,24 +149,28 @@ def test_secret_files_missing(self, config_str: str) -> None: [ ( "turn_shared_secret_path: {}", - lambda c: c.voip.turn_shared_secret, + lambda c: c.voip.turn_shared_secret.encode("utf-8"), ), ( "registration_shared_secret_path: {}", - lambda c: c.registration.registration_shared_secret, + lambda c: c.registration.registration_shared_secret.encode("utf-8"), + ), + ( + "macaroon_secret_key_path: {}", + lambda c: c.key.macaroon_secret_key, ), ( "recaptcha_private_key_path: {}", - lambda c: c.captcha.recaptcha_private_key, + lambda c: c.captcha.recaptcha_private_key.encode("utf-8"), ), ( "recaptcha_public_key_path: {}", - lambda c: c.captcha.recaptcha_public_key, + lambda c: c.captcha.recaptcha_public_key.encode("utf-8"), ), *[ ( "redis:\n enabled: true\n password_path: {}", - lambda c: c.redis.redis_password, + lambda c: c.redis.redis_password.encode("utf-8"), ) ] * (hiredis is not None), @@ -174,11 +179,13 @@ def test_secret_files_missing(self, config_str: str) -> None: def test_secret_files_existing( self, config_line: str, get_secret: Callable[[RootConfig], str] ) -> None: - self.generate_config_and_remove_lines_containing("registration_shared_secret") + self.generate_config_and_remove_lines_containing( + ["registration_shared_secret", "macaroon_secret_key"] + ) with tempfile.NamedTemporaryFile(buffering=0) as secret_file: secret_file.write(b"53C237") self.add_lines_to_config(["", config_line.format(secret_file.name)]) config = HomeServerConfig.load_config("", ["-c", self.config_file]) - self.assertEqual(get_secret(config), "53C237") + self.assertEqual(get_secret(config), b"53C237") diff --git a/tests/config/utils.py b/tests/config/utils.py index 11140ff9790..3cba4ac5889 100644 --- a/tests/config/utils.py +++ b/tests/config/utils.py @@ -51,12 +51,13 @@ def generate_config(self) -> None: ], ) - def generate_config_and_remove_lines_containing(self, needle: str) -> None: + def generate_config_and_remove_lines_containing(self, needles: list[str]) -> None: self.generate_config() with open(self.config_file) as f: contents = f.readlines() - contents = [line for line in contents if needle not in line] + for needle in needles: + contents = [line for line in contents if needle not in line] with open(self.config_file, "w") as f: f.write("".join(contents)) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 1eec0d43b7e..1db630e9e47 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -1165,12 +1165,23 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs.get_datastores().main.services_cache = [self._service] # Register some appservice users - self._sender_user, self._sender_device = self.register_appservice_user( + user_id, device_id = self.register_appservice_user( "as.sender", self._service_token ) - self._namespaced_user, self._namespaced_device = self.register_appservice_user( + # With MSC4190 enabled, there will not be a device created + # during AS registration. However MSC4190 is not enabled + # in this test. It may become the default behaviour in the + # future, in which case this test will need to be updated. + assert device_id is not None + self._sender_user = user_id + self._sender_device = device_id + + user_id, device_id = self.register_appservice_user( "_as_user1", self._service_token ) + assert device_id is not None + self._namespaced_user = user_id + self._namespaced_device = device_id # Register a real user as well. self._real_user = self.register_user("real.user", "meow") diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 5b5dc713d17..5f73469daa4 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -560,9 +560,15 @@ def expect_unauthorized( self.assertEqual(channel.code, 401, channel.json_body) def expect_unrecognized( - self, method: str, path: str, content: Union[bytes, str, JsonDict] = "" + self, + method: str, + path: str, + content: Union[bytes, str, JsonDict] = "", + auth: bool = False, ) -> None: - channel = self.make_request(method, path, content) + channel = self.make_request( + method, path, content, access_token="token" if auth else None + ) self.assertEqual(channel.code, 404, channel.json_body) self.assertEqual( @@ -648,8 +654,25 @@ def test_session_management_endpoints_removed(self) -> None: def test_device_management_endpoints_removed(self) -> None: """Test that device management endpoints that were removed in MSC2964 are no longer available.""" - self.expect_unrecognized("POST", "/_matrix/client/v3/delete_devices") - self.expect_unrecognized("DELETE", "/_matrix/client/v3/devices/{DEVICE}") + + # Because we still support those endpoints with ASes, it checks the + # access token before returning 404 + self.http_client.request = AsyncMock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join([MATRIX_USER_SCOPE, MATRIX_DEVICE_SCOPE]), + "username": USERNAME, + }, + ) + ) + + self.expect_unrecognized("POST", "/_matrix/client/v3/delete_devices", auth=True) + self.expect_unrecognized( + "DELETE", "/_matrix/client/v3/devices/{DEVICE}", auth=True + ) def test_openid_endpoints_removed(self) -> None: """Test that OpenID id_token endpoints that were removed in MSC2964 are no longer available.""" diff --git a/tests/handlers/test_send_email.py b/tests/handlers/test_send_email.py index cedcea27d93..5f7839c82c1 100644 --- a/tests/handlers/test_send_email.py +++ b/tests/handlers/test_send_email.py @@ -163,6 +163,7 @@ def test_send_email(self) -> None: "email": { "notif_from": "noreply@test", "force_tls": True, + "tlsname": "example.org", }, } ) @@ -186,10 +187,9 @@ def test_send_email_force_tls(self) -> None: self.assertEqual(host, self.reactor.lookups["localhost"]) self.assertEqual(port, 465) # We need to make sure that TLS is happenning - self.assertIsInstance( - client_factory._wrappedFactory._testingContextFactory, - ClientTLSOptions, - ) + context_factory = client_factory._wrappedFactory._testingContextFactory + self.assertIsInstance(context_factory, ClientTLSOptions) + self.assertEqual(context_factory._hostname, "example.org") # tlsname # And since we use endpoints, they go through reactor.connectTCP # which works differently to connectSSL on the testing reactor diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 878d9683b6a..a75095a79f1 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -796,6 +796,7 @@ def test_spam_checker(self) -> None: s = self.get_success(self.handler.search_users(u1, "user2", 10)) self.assertEqual(len(s["results"]), 1) + # Kept old spam checker without `requester_id` tests for backwards compatibility. async def allow_all(user_profile: UserProfile) -> bool: # Allow all users. return False @@ -809,6 +810,7 @@ async def allow_all(user_profile: UserProfile) -> bool: s = self.get_success(self.handler.search_users(u1, "user2", 10)) self.assertEqual(len(s["results"]), 1) + # Kept old spam checker without `requester_id` tests for backwards compatibility. # Configure a spam checker that filters all users. async def block_all(user_profile: UserProfile) -> bool: # All users are spammy. @@ -820,6 +822,40 @@ async def block_all(user_profile: UserProfile) -> bool: s = self.get_success(self.handler.search_users(u1, "user2", 10)) self.assertEqual(len(s["results"]), 0) + async def allow_all_expects_requester_id( + user_profile: UserProfile, requester_id: str + ) -> bool: + self.assertEqual(requester_id, u1) + # Allow all users. + return False + + # Configure a spam checker that does not filter any users. + spam_checker = self.hs.get_module_api_callbacks().spam_checker + spam_checker._check_username_for_spam_callbacks = [ + allow_all_expects_requester_id + ] + + # The results do not change: + # We get one search result when searching for user2 by user1. + s = self.get_success(self.handler.search_users(u1, "user2", 10)) + self.assertEqual(len(s["results"]), 1) + + # Configure a spam checker that filters all users. + async def block_all_expects_requester_id( + user_profile: UserProfile, requester_id: str + ) -> bool: + self.assertEqual(requester_id, u1) + # All users are spammy. + return True + + spam_checker._check_username_for_spam_callbacks = [ + block_all_expects_requester_id + ] + + # User1 now gets no search results for any of the other users. + s = self.get_success(self.handler.search_users(u1, "user2", 10)) + self.assertEqual(len(s["results"]), 0) + @override_config( { "spam_checker": { diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py index f71e4c2b8ff..80b0856a56e 100644 --- a/tests/http/test_proxyagent.py +++ b/tests/http/test_proxyagent.py @@ -854,7 +854,7 @@ def test_https_request_via_uppercase_proxy_with_blocklist(self) -> None: def test_proxy_with_no_scheme(self) -> None: http_proxy_agent = ProxyAgent(self.reactor, use_proxy=True) proxy_ep = checked_cast(HostnameEndpoint, http_proxy_agent.http_proxy_endpoint) - self.assertEqual(proxy_ep._hostStr, "proxy.com") + self.assertEqual(proxy_ep._hostText, "proxy.com") self.assertEqual(proxy_ep._port, 8888) @patch.dict(os.environ, {"http_proxy": "socks://proxy.com:8888"}) @@ -866,14 +866,14 @@ def test_proxy_with_unsupported_scheme(self) -> None: def test_proxy_with_http_scheme(self) -> None: http_proxy_agent = ProxyAgent(self.reactor, use_proxy=True) proxy_ep = checked_cast(HostnameEndpoint, http_proxy_agent.http_proxy_endpoint) - self.assertEqual(proxy_ep._hostStr, "proxy.com") + self.assertEqual(proxy_ep._hostText, "proxy.com") self.assertEqual(proxy_ep._port, 8888) @patch.dict(os.environ, {"http_proxy": "https://proxy.com:8888"}) def test_proxy_with_https_scheme(self) -> None: https_proxy_agent = ProxyAgent(self.reactor, use_proxy=True) proxy_ep = checked_cast(_WrapperEndpoint, https_proxy_agent.http_proxy_endpoint) - self.assertEqual(proxy_ep._wrappedEndpoint._hostStr, "proxy.com") + self.assertEqual(proxy_ep._wrappedEndpoint._hostText, "proxy.com") self.assertEqual(proxy_ep._wrappedEndpoint._port, 8888) diff --git a/tests/http/test_site.py b/tests/http/test_site.py index bfa26a329c5..fc620c705a5 100644 --- a/tests/http/test_site.py +++ b/tests/http/test_site.py @@ -90,3 +90,56 @@ def test_large_request(self) -> None: # default max upload size is 50M, so it should drop on the next buffer after # that. self.assertEqual(sent, 50 * 1024 * 1024 + 1024) + + def test_content_type_multipart(self) -> None: + """HTTP POST requests with `content-type: multipart/form-data` should be rejected""" + self.hs.start_listening() + + # find the HTTP server which is configured to listen on port 0 + (port, factory, _backlog, interface) = self.reactor.tcpServers[0] + self.assertEqual(interface, "::") + self.assertEqual(port, 0) + + # as a control case, first send a regular request. + + # complete the connection and wire it up to a fake transport + client_address = IPv6Address("TCP", "::1", 2345) + protocol = factory.buildProtocol(client_address) + transport = StringTransport() + protocol.makeConnection(transport) + + protocol.dataReceived( + b"POST / HTTP/1.1\r\n" + b"Connection: close\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + b"0\r\n" + b"\r\n" + ) + + while not transport.disconnecting: + self.reactor.advance(1) + + # we should get a 404 + self.assertRegex(transport.value().decode(), r"^HTTP/1\.1 404 ") + + # now send request with content-type header + protocol = factory.buildProtocol(client_address) + transport = StringTransport() + protocol.makeConnection(transport) + + protocol.dataReceived( + b"POST / HTTP/1.1\r\n" + b"Connection: close\r\n" + b"Transfer-Encoding: chunked\r\n" + b"Content-Type: multipart/form-data\r\n" + b"\r\n" + b"0\r\n" + b"\r\n" + ) + + while not transport.disconnecting: + self.reactor.advance(1) + + # we should get a 415 + self.assertRegex(transport.value().decode(), r"^HTTP/1\.1 415 ") diff --git a/tests/push/test_http.py b/tests/push/test_http.py index bcca472617e..5c235bbe536 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -17,9 +17,11 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, List, Tuple +from typing import Any, Dict, List, Tuple from unittest.mock import Mock +from parameterized import parameterized + from twisted.internet.defer import Deferred from twisted.test.proto_helpers import MemoryReactor @@ -1085,3 +1087,83 @@ def test_jitter(self) -> None: self.pump() self.assertEqual(len(self.push_attempts), 11) + + @parameterized.expand( + [ + # Badge count disabled + (True, True), + (True, False), + # Badge count enabled + (False, True), + (False, False), + ] + ) + @override_config({"experimental_features": {"msc4076_enabled": True}}) + def test_msc4076_badge_count( + self, disable_badge_count: bool, event_id_only: bool + ) -> None: + # Register the user who gets notified + user_id = self.register_user("user", "pass") + access_token = self.login("user", "pass") + + # Register the user who sends the message + other_user_id = self.register_user("otheruser", "pass") + other_access_token = self.login("otheruser", "pass") + + # Register the pusher with disable_badge_count set to True + user_tuple = self.get_success( + self.hs.get_datastores().main.get_user_by_access_token(access_token) + ) + assert user_tuple is not None + device_id = user_tuple.device_id + + # Set the push data dict based on test input parameters + push_data: Dict[str, Any] = { + "url": "http://example.com/_matrix/push/v1/notify", + } + if disable_badge_count: + push_data["org.matrix.msc4076.disable_badge_count"] = True + if event_id_only: + push_data["format"] = "event_id_only" + + self.get_success( + self.hs.get_pusherpool().add_or_update_pusher( + user_id=user_id, + device_id=device_id, + kind="http", + app_id="m.http", + app_display_name="HTTP Push Notifications", + device_display_name="pushy push", + pushkey="a@example.com", + lang=None, + data=push_data, + ) + ) + + # Create a room + room = self.helper.create_room_as(user_id, tok=access_token) + + # The other user joins + self.helper.join(room=room, user=other_user_id, tok=other_access_token) + + # The other user sends a message + self.helper.send(room, body="Hi!", tok=other_access_token) + + # Advance time a bit, so the pusher will register something has happened + self.pump() + + # One push was attempted to be sent + self.assertEqual(len(self.push_attempts), 1) + self.assertEqual( + self.push_attempts[0][1], "http://example.com/_matrix/push/v1/notify" + ) + + if disable_badge_count: + # Verify that the notification DOESN'T contain a counts field + self.assertNotIn("counts", self.push_attempts[0][2]["notification"]) + else: + # Ensure that the notification DOES contain a counts field + self.assertIn("counts", self.push_attempts[0][2]["notification"]) + self.assertEqual( + self.push_attempts[0][2]["notification"]["counts"]["unread"], 1 + ) diff --git a/tests/replication/http/test__base.py b/tests/replication/http/test__base.py index 2eaad3707a3..31d3163c010 100644 --- a/tests/replication/http/test__base.py +++ b/tests/replication/http/test__base.py @@ -46,7 +46,7 @@ def __init__(self, hs: HomeServer): self.clock = hs.get_clock() @staticmethod - async def _serialize_payload() -> JsonDict: + async def _serialize_payload(**kwargs: ReplicationEndpoint) -> JsonDict: return {} @cancellable @@ -68,7 +68,7 @@ def __init__(self, hs: HomeServer): self.clock = hs.get_clock() @staticmethod - async def _serialize_payload() -> JsonDict: + async def _serialize_payload(**kwargs: ReplicationEndpoint) -> JsonDict: return {} async def _handle_request( # type: ignore[override] diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py index feb410a11d6..6047ce1f4af 100644 --- a/tests/rest/admin/test_event_reports.py +++ b/tests/rest/admin/test_event_reports.py @@ -378,6 +378,41 @@ def test_next_token(self) -> None: self.assertEqual(len(channel.json_body["event_reports"]), 1) self.assertNotIn("next_token", channel.json_body) + def test_filter_against_event_sender(self) -> None: + """ + Tests filtering by the sender of the reported event + """ + # first grab all the reports + channel = self.make_request( + "GET", + self.url, + access_token=self.admin_user_tok, + ) + self.assertEqual(channel.code, 200) + + # filter out set of report ids of events sent by one of the users + locally_filtered_report_ids = set() + for event_report in channel.json_body["event_reports"]: + if event_report["sender"] == self.other_user: + locally_filtered_report_ids.add(event_report["id"]) + + # grab the report ids by sender and compare to filtered report ids + channel = self.make_request( + "GET", + f"{self.url}?event_sender_user_id={self.other_user}", + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code) + self.assertEqual(channel.json_body["total"], len(locally_filtered_report_ids)) + + event_reports = channel.json_body["event_reports"] + server_filtered_report_ids = set() + for event_report in event_reports: + server_filtered_report_ids.add(event_report["id"]) + self.assertIncludes( + locally_filtered_report_ids, server_filtered_report_ids, exact=True + ) + def _create_event_and_report(self, room_id: str, user_tok: str) -> None: """Create and report events""" resp = self.helper.send(room_id, tok=user_tok) diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 95ed7364510..1817d67a00b 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -2035,6 +2035,52 @@ def test_room_state(self) -> None: # the create_room already does the right thing, so no need to verify that we got # the state events it created. + def test_room_state_param(self) -> None: + """Test that filtering by state event type works when requesting state""" + room_id = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok) + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{room_id}/state?type=m.room.member", + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code) + state = channel.json_body["state"] + # only one member has joined so there should be one membership event + self.assertEqual(1, len(state)) + event = state[0] + self.assertEqual(event["type"], "m.room.member") + self.assertEqual(event["state_key"], self.admin_user) + + def test_room_state_param_empty(self) -> None: + """Test that passing an empty string as state filter param returns no state events""" + room_id = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok) + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{room_id}/state?type=", + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code) + state = channel.json_body["state"] + self.assertEqual(5, len(state)) + + def test_room_state_param_not_in_room(self) -> None: + """ + Test that passing a state filter param for a state event not in the room + returns no state events + """ + room_id = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok) + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{room_id}/state?type=m.room.custom", + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code) + state = channel.json_body["state"] + self.assertEqual(0, len(state)) + def _set_canonical_alias( self, room_id: str, test_alias: str, admin_user_tok: str ) -> None: @@ -3050,7 +3096,7 @@ def _block_room(self, room_id: str) -> None: "pusher_throttle", "room_account_data", "room_tags", - # "state_groups", # Current impl leaves orphaned state groups around. + "state_groups", "state_groups_state", "federation_inbound_events_staging", ] diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index fdb8fafa0e8..a35a250975f 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -60,6 +60,7 @@ from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.test_utils import SMALL_PNG +from tests.test_utils.event_injection import inject_event from tests.unittest import override_config @@ -5031,7 +5032,6 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main - @override_config({"experimental_features": {"msc3823_account_suspension": True}}) def test_suspend_user(self) -> None: # test that suspending user works channel = self.make_request( @@ -5409,6 +5409,64 @@ def test_admin_redact_works_if_user_kicked_or_banned(self) -> None: # we redacted 6 messages self.assertEqual(len(matches), 6) + def test_redactions_for_remote_user_succeed_with_admin_priv_in_room(self) -> None: + """ + Test that if the admin requester has privileges in a room, redaction requests + succeed for a remote user + """ + + # inject some messages from remote user and collect event ids + original_message_ids = [] + for i in range(5): + event = self.get_success( + inject_event( + self.hs, + room_id=self.rm1, + type="m.room.message", + sender="@remote:remote_server", + content={"msgtype": "m.text", "body": f"nefarious_chatter{i}"}, + ) + ) + original_message_ids.append(event.event_id) + + # send a request to redact a remote user's messages in a room. + # the server admin created this room and has admin privilege in room + channel = self.make_request( + "POST", + "/_synapse/admin/v1/user/@remote:remote_server/redact", + content={"rooms": [self.rm1]}, + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + id = channel.json_body.get("redact_id") + + # check that there were no failed redactions + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/user/redact_status/{id}", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body.get("status"), "complete") + failed_redactions = channel.json_body.get("failed_redactions") + self.assertEqual(failed_redactions, {}) + + filter = json.dumps({"types": [EventTypes.Redaction]}) + channel = self.make_request( + "GET", + f"rooms/{self.rm1}/messages?filter={filter}&limit=50", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + + for event in channel.json_body["chunk"]: + for event_id in original_message_ids: + if event["type"] == "m.room.redaction" and event["redacts"] == event_id: + original_message_ids.remove(event_id) + break + # we originally sent 5 messages so 5 should be redacted + self.assertEqual(len(original_message_ids), 0) + class UserRedactionBackgroundTaskTestCase(BaseMultiWorkerStreamTestCase): servlets = [ @@ -5503,3 +5561,254 @@ def test_redact_messages_all_rooms(self) -> None: redaction_ids.add(event["redacts"]) self.assertIncludes(redaction_ids, original_event_ids, exact=True) + + +class GetInvitesFromUserTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + admin.register_servlets, + room.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.admin = self.register_user("thomas", "pass", True) + self.admin_tok = self.login("thomas", "pass") + + self.bad_user = self.register_user("teresa", "pass") + self.bad_user_tok = self.login("teresa", "pass") + + self.random_users = [] + for i in range(4): + self.random_users.append(self.register_user(f"user{i}", f"pass{i}")) + + self.room1 = self.helper.create_room_as(self.bad_user, tok=self.bad_user_tok) + self.room2 = self.helper.create_room_as(self.bad_user, tok=self.bad_user_tok) + self.room3 = self.helper.create_room_as(self.bad_user, tok=self.bad_user_tok) + + @unittest.override_config( + {"rc_invites": {"per_issuer": {"per_second": 1000, "burst_count": 1000}}} + ) + def test_get_user_invite_count_new_invites_test_case(self) -> None: + """ + Test that new invites that arrive after a provided timestamp are counted + """ + # grab a current timestamp + before_invites_sent_ts = self.hs.get_clock().time_msec() + + # bad user sends some invites + for room_id in [self.room1, self.room2]: + for user in self.random_users: + self.helper.invite(room_id, self.bad_user, user, tok=self.bad_user_tok) + + # fetch using timestamp, all should be returned + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/sent_invite_count?from_ts={before_invites_sent_ts}", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["invite_count"], 8) + + # send some more invites, they should show up in addition to original 8 using same timestamp + for user in self.random_users: + self.helper.invite( + self.room3, src=self.bad_user, targ=user, tok=self.bad_user_tok + ) + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/sent_invite_count?from_ts={before_invites_sent_ts}", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["invite_count"], 12) + + def test_get_user_invite_count_invites_before_ts_test_case(self) -> None: + """ + Test that invites sent before provided ts are not counted + """ + # bad user sends some invites + for room_id in [self.room1, self.room2]: + for user in self.random_users: + self.helper.invite(room_id, self.bad_user, user, tok=self.bad_user_tok) + + # add a msec between last invite and ts + after_invites_sent_ts = self.hs.get_clock().time_msec() + 1 + + # fetch invites with timestamp, none should be returned + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/sent_invite_count?from_ts={after_invites_sent_ts}", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["invite_count"], 0) + + def test_user_invite_count_kick_ban_not_counted(self) -> None: + """ + Test that kicks and bans are not counted in invite count + """ + to_kick_user_id = self.register_user("kick_me", "pass") + to_kick_tok = self.login("kick_me", "pass") + + self.helper.join(self.room1, to_kick_user_id, tok=to_kick_tok) + + # grab a current timestamp + before_invites_sent_ts = self.hs.get_clock().time_msec() + + # bad user sends some invites (8) + for room_id in [self.room1, self.room2]: + for user in self.random_users: + self.helper.invite( + room_id, src=self.bad_user, targ=user, tok=self.bad_user_tok + ) + + # fetch using timestamp, all invites sent should be counted + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/sent_invite_count?from_ts={before_invites_sent_ts}", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["invite_count"], 8) + + # send a kick and some bans and make sure these aren't counted against invite total + for user in self.random_users: + self.helper.ban( + self.room1, src=self.bad_user, targ=user, tok=self.bad_user_tok + ) + + channel = self.make_request( + "POST", + f"/_matrix/client/v3/rooms/{self.room1}/kick", + content={"user_id": to_kick_user_id}, + access_token=self.bad_user_tok, + ) + self.assertEqual(channel.code, 200) + + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/sent_invite_count?from_ts={before_invites_sent_ts}", + access_token=self.admin_tok, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body["invite_count"], 8) + + +class GetCumulativeJoinedRoomCountForUserTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + admin.register_servlets, + room.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.admin = self.register_user("thomas", "pass", True) + self.admin_tok = self.login("thomas", "pass") + + self.bad_user = self.register_user("teresa", "pass") + self.bad_user_tok = self.login("teresa", "pass") + + def test_user_cumulative_joined_room_count(self) -> None: + """ + Tests proper count returned from /cumulative_joined_room_count endpoint + """ + # Create rooms and join, grab timestamp before room creation + before_room_creation_timestamp = self.hs.get_clock().time_msec() + + joined_rooms = [] + for _ in range(3): + room = self.helper.create_room_as(self.admin, tok=self.admin_tok) + self.helper.join( + room, user=self.bad_user, expect_code=200, tok=self.bad_user_tok + ) + joined_rooms.append(room) + + # get a timestamp after room creation and join, add a msec between last join and ts + after_room_creation = self.hs.get_clock().time_msec() + 1 + + # Get rooms using this timestamp, there should be none since all rooms were created and joined + # before provided timestamp + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/cumulative_joined_room_count?from_ts={int(after_room_creation)}", + access_token=self.admin_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + self.assertEqual(0, channel.json_body["cumulative_joined_room_count"]) + + # fetch rooms with the older timestamp before they were created and joined, this should + # return the rooms + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/cumulative_joined_room_count?from_ts={int(before_room_creation_timestamp)}", + access_token=self.admin_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + self.assertEqual( + len(joined_rooms), channel.json_body["cumulative_joined_room_count"] + ) + + def test_user_joined_room_count_includes_left_and_banned_rooms(self) -> None: + """ + Tests proper count returned from /joined_room_count endpoint when user has left + or been banned from joined rooms + """ + # Create rooms and join, grab timestamp before room creation + before_room_creation_timestamp = self.hs.get_clock().time_msec() + + joined_rooms = [] + for _ in range(3): + room = self.helper.create_room_as(self.admin, tok=self.admin_tok) + self.helper.join( + room, user=self.bad_user, expect_code=200, tok=self.bad_user_tok + ) + joined_rooms.append(room) + + # fetch rooms with the older timestamp before they were created and joined + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/cumulative_joined_room_count?from_ts={int(before_room_creation_timestamp)}", + access_token=self.admin_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + self.assertEqual( + len(joined_rooms), channel.json_body["cumulative_joined_room_count"] + ) + + # have the user banned from/leave the joined rooms + self.helper.ban( + joined_rooms[0], + src=self.admin, + targ=self.bad_user, + expect_code=200, + tok=self.admin_tok, + ) + self.helper.change_membership( + joined_rooms[1], + src=self.bad_user, + targ=self.bad_user, + membership="leave", + expect_code=200, + tok=self.bad_user_tok, + ) + self.helper.ban( + joined_rooms[2], + src=self.admin, + targ=self.bad_user, + expect_code=200, + tok=self.admin_tok, + ) + + # fetch the joined room count again, the number should remain the same as the collected joined rooms + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/users/{self.bad_user}/cumulative_joined_room_count?from_ts={int(before_room_creation_timestamp)}", + access_token=self.admin_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + self.assertEqual( + len(joined_rooms), channel.json_body["cumulative_joined_room_count"] + ) diff --git a/tests/rest/client/sliding_sync/test_rooms_required_state.py b/tests/rest/client/sliding_sync/test_rooms_required_state.py index ecea5f2d5b3..ba46c5a93cb 100644 --- a/tests/rest/client/sliding_sync/test_rooms_required_state.py +++ b/tests/rest/client/sliding_sync/test_rooms_required_state.py @@ -11,6 +11,7 @@ # See the GNU Affero General Public License for more details: # . # +import enum import logging from parameterized import parameterized, parameterized_class @@ -18,9 +19,9 @@ from twisted.test.proto_helpers import MemoryReactor import synapse.rest.admin -from synapse.api.constants import EventTypes, Membership +from synapse.api.constants import EventContentFields, EventTypes, JoinRules, Membership from synapse.handlers.sliding_sync import StateValues -from synapse.rest.client import login, room, sync +from synapse.rest.client import knock, login, room, sync from synapse.server import HomeServer from synapse.util import Clock @@ -30,6 +31,17 @@ logger = logging.getLogger(__name__) +# Inherit from `str` so that they show up in the test description when we +# `@parameterized.expand(...)` the first parameter +class MembershipAction(str, enum.Enum): + INVITE = "invite" + JOIN = "join" + KNOCK = "knock" + LEAVE = "leave" + BAN = "ban" + KICK = "kick" + + # FIXME: This can be removed once we bump `SCHEMA_COMPAT_VERSION` and run the # foreground update for # `sliding_sync_joined_rooms`/`sliding_sync_membership_snapshots` (tracked by @@ -52,6 +64,7 @@ class SlidingSyncRoomsRequiredStateTestCase(SlidingSyncBase): servlets = [ synapse.rest.admin.register_servlets, login.register_servlets, + knock.register_servlets, room.register_servlets, sync.register_servlets, ] @@ -496,6 +509,153 @@ def test_rooms_required_state_lazy_loading_room_members_incremental_sync( ) self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + @parameterized.expand( + [ + (MembershipAction.LEAVE,), + (MembershipAction.INVITE,), + (MembershipAction.KNOCK,), + (MembershipAction.JOIN,), + (MembershipAction.BAN,), + (MembershipAction.KICK,), + ] + ) + def test_rooms_required_state_changed_membership_in_timeline_lazy_loading_room_members_incremental_sync( + self, + room_membership_action: str, + ) -> None: + """ + On incremental sync, test `rooms.required_state` returns people relevant to the + timeline when lazy-loading room members, `["m.room.member","$LAZY"]` **including + changes to membership**. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + user4_id = self.register_user("user4", "pass") + user4_tok = self.login(user4_id, "pass") + user5_id = self.register_user("user5", "pass") + user5_tok = self.login(user5_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok, is_public=True) + # If we're testing knocks, set the room to knock + if room_membership_action == MembershipAction.KNOCK: + self.helper.send_state( + room_id1, + EventTypes.JoinRules, + {"join_rule": JoinRules.KNOCK}, + tok=user2_tok, + ) + + # Join the test users to the room + self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.invite(room_id1, src=user2_id, targ=user3_id, tok=user2_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + self.helper.invite(room_id1, src=user2_id, targ=user4_id, tok=user2_tok) + self.helper.join(room_id1, user4_id, tok=user4_tok) + if room_membership_action in ( + MembershipAction.LEAVE, + MembershipAction.BAN, + MembershipAction.JOIN, + ): + self.helper.invite(room_id1, src=user2_id, targ=user5_id, tok=user2_tok) + self.helper.join(room_id1, user5_id, tok=user5_tok) + + # Send some messages to fill up the space + self.helper.send(room_id1, "1", tok=user2_tok) + self.helper.send(room_id1, "2", tok=user2_tok) + self.helper.send(room_id1, "3", tok=user2_tok) + + # Make the Sliding Sync request with lazy loading for the room members + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 3, + } + } + } + response_body, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Send more timeline events into the room + self.helper.send(room_id1, "4", tok=user2_tok) + self.helper.send(room_id1, "5", tok=user4_tok) + # The third event will be our membership event concerning user5 + if room_membership_action == MembershipAction.LEAVE: + # User 5 leaves + self.helper.leave(room_id1, user5_id, tok=user5_tok) + elif room_membership_action == MembershipAction.INVITE: + # User 5 is invited + self.helper.invite(room_id1, src=user2_id, targ=user5_id, tok=user2_tok) + elif room_membership_action == MembershipAction.KNOCK: + # User 5 knocks + self.helper.knock(room_id1, user5_id, tok=user5_tok) + # The admin of the room accepts the knock + self.helper.invite(room_id1, src=user2_id, targ=user5_id, tok=user2_tok) + elif room_membership_action == MembershipAction.JOIN: + # Update the display name of user5 (causing a membership change) + self.helper.send_state( + room_id1, + event_type=EventTypes.Member, + state_key=user5_id, + body={ + EventContentFields.MEMBERSHIP: Membership.JOIN, + EventContentFields.MEMBERSHIP_DISPLAYNAME: "quick changer", + }, + tok=user5_tok, + ) + elif room_membership_action == MembershipAction.BAN: + self.helper.ban(room_id1, src=user2_id, targ=user5_id, tok=user2_tok) + elif room_membership_action == MembershipAction.KICK: + # Kick user5 from the room + self.helper.change_membership( + room=room_id1, + src=user2_id, + targ=user5_id, + tok=user2_tok, + membership=Membership.LEAVE, + extra_data={ + "reason": "Bad manners", + }, + ) + else: + raise AssertionError( + f"Unknown room_membership_action: {room_membership_action}" + ) + + # Make an incremental Sliding Sync request + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # Only user2, user4, and user5 sent events in the last 3 events we see in the + # `timeline`. + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + # This appears because *some* membership in the room changed and the + # heroes are recalculated and is thrown in because we have it. But this + # is technically optional and not needed because we've already seen user2 + # in the last sync (and their membership hasn't changed). + state_map[(EventTypes.Member, user2_id)], + # Appears because there is a message in the timeline from this user + state_map[(EventTypes.Member, user4_id)], + # Appears because there is a membership event in the timeline from this user + state_map[(EventTypes.Member, user5_id)], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + def test_rooms_required_state_expand_lazy_loading_room_members_incremental_sync( self, ) -> None: @@ -751,9 +911,10 @@ def test_rooms_required_state_me(self) -> None: self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) @parameterized.expand([(Membership.LEAVE,), (Membership.BAN,)]) - def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: + def test_rooms_required_state_leave_ban_initial(self, stop_membership: str) -> None: """ - Test `rooms.required_state` should not return state past a leave/ban event. + Test `rooms.required_state` should not return state past a leave/ban event when + it's the first "initial" time the room is being sent down the connection. """ user1_id = self.register_user("user1", "pass") user1_tok = self.login(user1_id, "pass") @@ -788,6 +949,13 @@ def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: body={"foo": "bar"}, tok=user2_tok, ) + self.helper.send_state( + room_id1, + event_type="org.matrix.bar_state", + state_key="", + body={"bar": "bar"}, + tok=user2_tok, + ) if stop_membership == Membership.LEAVE: # User 1 leaves @@ -796,6 +964,8 @@ def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: # User 1 is banned self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + # Get the state_map before we change the state as this is the final state we + # expect User1 to be able to see state_map = self.get_success( self.storage_controllers.state.get_current_state(room_id1) ) @@ -808,12 +978,36 @@ def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: body={"foo": "qux"}, tok=user2_tok, ) + self.helper.send_state( + room_id1, + event_type="org.matrix.bar_state", + state_key="", + body={"bar": "qux"}, + tok=user2_tok, + ) self.helper.leave(room_id1, user3_id, tok=user3_tok) # Make an incremental Sliding Sync request + # + # Also expand the required state to include the `org.matrix.bar_state` event. + # This is just an extra complication of the test. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, "*"], + ["org.matrix.foo_state", ""], + ["org.matrix.bar_state", ""], + ], + "timeline_limit": 3, + } + } + } response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) - # Only user2 and user3 sent events in the 3 events we see in the `timeline` + # We should only see the state up to the leave/ban event self._assertRequiredStateIncludes( response_body["rooms"][room_id1]["required_state"], { @@ -822,6 +1016,126 @@ def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: state_map[(EventTypes.Member, user2_id)], state_map[(EventTypes.Member, user3_id)], state_map[("org.matrix.foo_state", "")], + state_map[("org.matrix.bar_state", "")], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + @parameterized.expand([(Membership.LEAVE,), (Membership.BAN,)]) + def test_rooms_required_state_leave_ban_incremental( + self, stop_membership: str + ) -> None: + """ + Test `rooms.required_state` should not return state past a leave/ban event on + incremental sync. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "bar"}, + tok=user2_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.bar_state", + state_key="", + body={"bar": "bar"}, + tok=user2_tok, + ) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, "*"], + ["org.matrix.foo_state", ""], + ], + "timeline_limit": 3, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + if stop_membership == Membership.LEAVE: + # User 1 leaves + self.helper.leave(room_id1, user1_id, tok=user1_tok) + elif stop_membership == Membership.BAN: + # User 1 is banned + self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + + # Get the state_map before we change the state as this is the final state we + # expect User1 to be able to see + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # Change the state after user 1 leaves + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "qux"}, + tok=user2_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.bar_state", + state_key="", + body={"bar": "qux"}, + tok=user2_tok, + ) + self.helper.leave(room_id1, user3_id, tok=user3_tok) + + # Make an incremental Sliding Sync request + # + # Also expand the required state to include the `org.matrix.bar_state` event. + # This is just an extra complication of the test. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, "*"], + ["org.matrix.foo_state", ""], + ["org.matrix.bar_state", ""], + ], + "timeline_limit": 3, + } + } + } + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # User1 should only see the state up to the leave/ban event + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + # User1 should see their leave/ban membership + state_map[(EventTypes.Member, user1_id)], + state_map[("org.matrix.bar_state", "")], + # The commented out state events were already returned in the initial + # sync so we shouldn't see them again on the incremental sync. And we + # shouldn't see the state events that changed after the leave/ban event. + # + # state_map[(EventTypes.Create, "")], + # state_map[(EventTypes.Member, user2_id)], + # state_map[(EventTypes.Member, user3_id)], + # state_map[("org.matrix.foo_state", "")], }, exact=True, ) @@ -1243,7 +1557,7 @@ def test_rooms_required_state_expand_retract_expand(self) -> None: # Update the room name self.helper.send_state( - room_id1, "m.room.name", {"name": "Bar"}, state_key="", tok=user1_tok + room_id1, EventTypes.Name, {"name": "Bar"}, state_key="", tok=user1_tok ) # Update the sliding sync requests to exclude the room name again diff --git a/tests/rest/client/sliding_sync/test_sliding_sync.py b/tests/rest/client/sliding_sync/test_sliding_sync.py index 578cb384cd7..f3cf2111ec1 100644 --- a/tests/rest/client/sliding_sync/test_sliding_sync.py +++ b/tests/rest/client/sliding_sync/test_sliding_sync.py @@ -1169,12 +1169,6 @@ def test_state_reset_room_comes_down_incremental_sync(self) -> None: self.persistence.persist_event(join_rule_event, join_rule_context) ) - # FIXME: We're manually busting the cache since - # https://github.com/element-hq/synapse/issues/17368 is not solved yet - self.store._membership_stream_cache.entity_has_changed( - user1_id, join_rule_event_pos.stream - ) - # Ensure that the state reset worked and only user2 is in the room now users_in_room = self.get_success(self.store.get_users_in_room(room_id1)) self.assertIncludes(set(users_in_room), {user2_id}, exact=True) @@ -1322,12 +1316,6 @@ def test_state_reset_previously_room_comes_down_incremental_sync_with_filters( self.persistence.persist_event(join_rule_event, join_rule_context) ) - # FIXME: We're manually busting the cache since - # https://github.com/element-hq/synapse/issues/17368 is not solved yet - self.store._membership_stream_cache.entity_has_changed( - user1_id, join_rule_event_pos.stream - ) - # Ensure that the state reset worked and only user2 is in the room now users_in_room = self.get_success(self.store.get_users_in_room(space_room_id)) self.assertIncludes(set(users_in_room), {user2_id}, exact=True) @@ -1506,12 +1494,6 @@ def test_state_reset_never_room_incremental_sync_with_filters( self.persistence.persist_event(join_rule_event, join_rule_context) ) - # FIXME: We're manually busting the cache since - # https://github.com/element-hq/synapse/issues/17368 is not solved yet - self.store._membership_stream_cache.entity_has_changed( - user1_id, join_rule_event_pos.stream - ) - # Ensure that the state reset worked and only user2 is in the room now users_in_room = self.get_success(self.store.get_users_in_room(space_room_id)) self.assertIncludes(set(users_in_room), {user2_id}, exact=True) diff --git a/tests/rest/client/test_devices.py b/tests/rest/client/test_devices.py index a3ed12a38fe..dd3abdebac0 100644 --- a/tests/rest/client/test_devices.py +++ b/tests/rest/client/test_devices.py @@ -24,6 +24,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.errors import NotFoundError +from synapse.appservice import ApplicationService from synapse.rest import admin, devices, sync from synapse.rest.client import keys, login, register from synapse.server import HomeServer @@ -455,3 +456,183 @@ def test_msc3814_dehydrated_device_delete_works(self) -> None: token, ) self.assertEqual(channel.json_body["device_keys"], {"@mikey:test": {}}) + + +class MSC4190AppserviceDevicesTestCase(unittest.HomeserverTestCase): + servlets = [ + register.register_servlets, + devices.register_servlets, + ] + + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: + self.hs = self.setup_test_homeserver() + + # This application service uses the new MSC4190 behaviours + self.msc4190_service = ApplicationService( + id="msc4190", + token="some_token", + hs_token="some_token", + sender="@as:example.com", + namespaces={ + ApplicationService.NS_USERS: [{"regex": "@.*", "exclusive": False}] + }, + msc4190_device_management=True, + ) + # This application service doesn't use the new MSC4190 behaviours + self.pre_msc_service = ApplicationService( + id="regular", + token="other_token", + hs_token="other_token", + sender="@as2:example.com", + namespaces={ + ApplicationService.NS_USERS: [{"regex": "@.*", "exclusive": False}] + }, + msc4190_device_management=False, + ) + self.hs.get_datastores().main.services_cache.append(self.msc4190_service) + self.hs.get_datastores().main.services_cache.append(self.pre_msc_service) + return self.hs + + def test_PUT_device(self) -> None: + self.register_appservice_user("alice", self.msc4190_service.token) + self.register_appservice_user("bob", self.pre_msc_service.token) + + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(channel.json_body, {"devices": []}) + + channel = self.make_request( + "PUT", + "/_matrix/client/v3/devices/AABBCCDD?user_id=@alice:test", + content={"display_name": "Alice's device"}, + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 201, channel.json_body) + + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(len(channel.json_body["devices"]), 1) + self.assertEqual(channel.json_body["devices"][0]["device_id"], "AABBCCDD") + + # Doing a second time should return a 200 instead of a 201 + channel = self.make_request( + "PUT", + "/_matrix/client/v3/devices/AABBCCDD?user_id=@alice:test", + content={"display_name": "Alice's device"}, + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # On the regular service, that API should not allow for the + # creation of new devices. + channel = self.make_request( + "PUT", + "/_matrix/client/v3/devices/AABBCCDD?user_id=@bob:test", + content={"display_name": "Bob's device"}, + access_token=self.pre_msc_service.token, + ) + self.assertEqual(channel.code, 404, channel.json_body) + + def test_DELETE_device(self) -> None: + self.register_appservice_user("alice", self.msc4190_service.token) + + # There should be no device + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(channel.json_body, {"devices": []}) + + # Create a device + channel = self.make_request( + "PUT", + "/_matrix/client/v3/devices/AABBCCDD?user_id=@alice:test", + content={}, + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 201, channel.json_body) + + # There should be one device + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(len(channel.json_body["devices"]), 1) + + # Delete the device. UIA should not be required. + channel = self.make_request( + "DELETE", + "/_matrix/client/v3/devices/AABBCCDD?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # There should be no device again + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(channel.json_body, {"devices": []}) + + def test_POST_delete_devices(self) -> None: + self.register_appservice_user("alice", self.msc4190_service.token) + + # There should be no device + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(channel.json_body, {"devices": []}) + + # Create a device + channel = self.make_request( + "PUT", + "/_matrix/client/v3/devices/AABBCCDD?user_id=@alice:test", + content={}, + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 201, channel.json_body) + + # There should be one device + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(len(channel.json_body["devices"]), 1) + + # Delete the device with delete_devices + # UIA should not be required. + channel = self.make_request( + "POST", + "/_matrix/client/v3/delete_devices?user_id=@alice:test", + content={"devices": ["AABBCCDD"]}, + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # There should be no device again + channel = self.make_request( + "GET", + "/_matrix/client/v3/devices?user_id=@alice:test", + access_token=self.msc4190_service.token, + ) + self.assertEqual(channel.code, 200, channel.json_body) + self.assertEqual(channel.json_body, {"devices": []}) diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index c091f403cc0..b697bf6f675 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -120,6 +120,34 @@ def test_POST_appservice_registration_invalid(self) -> None: self.assertEqual(channel.code, 401, msg=channel.result) + def test_POST_appservice_msc4190_enabled(self) -> None: + # With MSC4190 enabled, the registration should *not* return an access token + user_id = "@as_user_kermit:test" + as_token = "i_am_an_app_service" + + appservice = ApplicationService( + as_token, + id="1234", + namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, + sender="@as:test", + msc4190_device_management=True, + ) + + self.hs.get_datastores().main.services_cache.append(appservice) + request_data = { + "username": "as_user_kermit", + "type": APP_SERVICE_REGISTRATION_TYPE, + } + + channel = self.make_request( + b"POST", self.url + b"?access_token=i_am_an_app_service", request_data + ) + + self.assertEqual(channel.code, 200, msg=channel.result) + det_data = {"user_id": user_id, "home_server": self.hs.hostname} + self.assertLessEqual(det_data.items(), channel.json_body.items()) + self.assertNotIn("access_token", channel.json_body) + def test_POST_bad_password(self) -> None: request_data = {"username": "kermit", "password": 666} channel = self.make_request(b"POST", self.url, request_data) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 07600418ed8..4cf1a3dc519 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1337,17 +1337,13 @@ def test_suspended_user_cannot_join_room(self) -> None: "POST", f"/join/{self.room1}", access_token=self.tok2 ) self.assertEqual(channel.code, 403) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") channel = self.make_request( "POST", f"/rooms/{self.room1}/join", access_token=self.tok2 ) self.assertEqual(channel.code, 403) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") def test_suspended_user_cannot_knock_on_room(self) -> None: # set the user as suspended @@ -1361,9 +1357,7 @@ def test_suspended_user_cannot_knock_on_room(self) -> None: shorthand=False, ) self.assertEqual(channel.code, 403) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") def test_suspended_user_cannot_invite_to_room(self) -> None: # set the user as suspended @@ -1376,9 +1370,7 @@ def test_suspended_user_cannot_invite_to_room(self) -> None: access_token=self.tok1, content={"user_id": self.user2}, ) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") class RoomAppserviceTsParamTestCase(unittest.HomeserverTestCase): @@ -4011,9 +4003,7 @@ def test_suspended_user_cannot_send_message_to_room(self) -> None: access_token=self.tok1, content={"body": "hello", "msgtype": "m.text"}, ) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") def test_suspended_user_cannot_change_profile_data(self) -> None: # set the user as suspended @@ -4026,9 +4016,7 @@ def test_suspended_user_cannot_change_profile_data(self) -> None: content={"avatar_url": "mxc://matrix.org/wefh34uihSDRGhw34"}, shorthand=False, ) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") channel2 = self.make_request( "PUT", @@ -4037,9 +4025,7 @@ def test_suspended_user_cannot_change_profile_data(self) -> None: content={"displayname": "something offensive"}, shorthand=False, ) - self.assertEqual( - channel2.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel2.json_body["errcode"], "M_USER_SUSPENDED") def test_suspended_user_cannot_redact_messages_other_than_their_own(self) -> None: # first user sends message @@ -4073,9 +4059,7 @@ def test_suspended_user_cannot_redact_messages_other_than_their_own(self) -> Non content={"reason": "bogus"}, shorthand=False, ) - self.assertEqual( - channel.json_body["errcode"], "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED" - ) + self.assertEqual(channel.json_body["errcode"], "M_USER_SUSPENDED") # but can redact their own channel = self.make_request( diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py index 38a56419f37..0f58dc8a0a6 100644 --- a/tests/storage/test_stream.py +++ b/tests/storage/test_stream.py @@ -1209,12 +1209,6 @@ def test_state_reset2(self) -> None: self.persistence.persist_event(join_rule_event, join_rule_context) ) - # FIXME: We're manually busting the cache since - # https://github.com/element-hq/synapse/issues/17368 is not solved yet - self.store._membership_stream_cache.entity_has_changed( - user1_id, join_rule_event_pos.stream - ) - after_reset_token = self.event_sources.get_current_token() membership_changes = self.get_success( diff --git a/tests/unittest.py b/tests/unittest.py index 614e805abd5..6a32861a3e2 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -781,7 +781,7 @@ def register_appservice_user( self, username: str, appservice_token: str, - ) -> Tuple[str, str]: + ) -> Tuple[str, Optional[str]]: """Register an appservice user as an application service. Requires the client-facing registration API be registered. @@ -805,7 +805,7 @@ def register_appservice_user( access_token=appservice_token, ) self.assertEqual(channel.code, 200, channel.json_body) - return channel.json_body["user_id"], channel.json_body["device_id"] + return channel.json_body["user_id"], channel.json_body.get("device_id") def login( self, diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py index c41f5706af5..9254bff79b5 100644 --- a/tests/util/test_stream_change_cache.py +++ b/tests/util/test_stream_change_cache.py @@ -255,3 +255,28 @@ def test_max_pos(self) -> None: # Unknown entities will return None self.assertEqual(cache.get_max_pos_of_last_change("not@here.website"), None) + + def test_all_entities_changed(self) -> None: + """ + `StreamChangeCache.all_entities_changed(...)` will mark all entites as changed. + """ + cache = StreamChangeCache("#test", 1, max_size=10) + + cache.entity_has_changed("user@foo.com", 2) + cache.entity_has_changed("bar@baz.net", 3) + cache.entity_has_changed("user@elsewhere.org", 4) + + cache.all_entities_changed(5) + + # Everything should be marked as changed before the stream position where the + # change occurred. + self.assertTrue(cache.has_entity_changed("user@foo.com", 4)) + self.assertTrue(cache.has_entity_changed("bar@baz.net", 4)) + self.assertTrue(cache.has_entity_changed("user@elsewhere.org", 4)) + + # Nothing should be marked as changed at/after the stream position where the + # change occurred. In other words, nothing has changed since the stream position + # 5. + self.assertFalse(cache.has_entity_changed("user@foo.com", 5)) + self.assertFalse(cache.has_entity_changed("bar@baz.net", 5)) + self.assertFalse(cache.has_entity_changed("user@elsewhere.org", 5))