From 9d7880c0c617a1a042f0430de3753af8467e98a5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 12 Mar 2024 15:03:45 +0000 Subject: [PATCH 01/45] 1.103.0rc1 --- CHANGES.md | 28 ++++++++++++++++++++++++++++ changelog.d/16768.doc | 1 - changelog.d/16874.feature | 1 - changelog.d/16946.doc | 1 - changelog.d/16947.feature | 1 - changelog.d/16951.doc | 1 - changelog.d/16973.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 9 files changed, 35 insertions(+), 7 deletions(-) delete mode 100644 changelog.d/16768.doc delete mode 100644 changelog.d/16874.feature delete mode 100644 changelog.d/16946.doc delete mode 100644 changelog.d/16947.feature delete mode 100644 changelog.d/16951.doc delete mode 100644 changelog.d/16973.bugfix diff --git a/CHANGES.md b/CHANGES.md index 2e7e4be5ac..3d027ae253 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,31 @@ +# Synapse 1.103.0rc1 (2024-03-12) + +### Features + +- Add a new [List Accounts v3](https://element-hq.github.io/synapse/v1.103/admin_api/user_admin_api.html#list-accounts-v3) Admin API with improved deactivated user filtering capabilities. ([\#16874](https://github.com/element-hq/synapse/issues/16874)) +- Include `Retry-After` header by default per [MSC4041](https://github.com/matrix-org/matrix-spec-proposals/pull/4041). Contributed by @clokep. ([\#16947](https://github.com/element-hq/synapse/issues/16947)) + +### Bugfixes + +- Fix joining remote rooms when a module uses the `on_new_event` callback. This callback may now pass partial state events instead of the full state for remote rooms. Introduced in v1.76.0. ([\#16973](https://github.com/element-hq/synapse/issues/16973)) + +### Improved Documentation + +- Add HAProxy example for single port operation to reverse proxy documentation. Contributed by Georg Pfuetzenreuter (@tacerus). ([\#16768](https://github.com/element-hq/synapse/issues/16768)) +- Improve the documentation around running Complement tests with new configuration parameters. ([\#16946](https://github.com/element-hq/synapse/issues/16946)) +- Add docs on upgrading from a very old version. ([\#16951](https://github.com/element-hq/synapse/issues/16951)) + + +### Updates to locked dependencies + +* Bump JasonEtco/create-an-issue from 2.9.1 to 2.9.2. ([\#16934](https://github.com/element-hq/synapse/issues/16934)) +* Bump anyhow from 1.0.79 to 1.0.80. ([\#16935](https://github.com/element-hq/synapse/issues/16935)) +* Bump dawidd6/action-download-artifact from 3.0.0 to 3.1.1. ([\#16933](https://github.com/element-hq/synapse/issues/16933)) +* Bump furo from 2023.9.10 to 2024.1.29. ([\#16939](https://github.com/element-hq/synapse/issues/16939)) +* Bump pyopenssl from 23.3.0 to 24.0.0. ([\#16937](https://github.com/element-hq/synapse/issues/16937)) +* Bump types-netaddr from 0.10.0.20240106 to 1.2.0.20240219. ([\#16938](https://github.com/element-hq/synapse/issues/16938)) + + # Synapse 1.102.0 (2024-03-05) ### Bugfixes diff --git a/changelog.d/16768.doc b/changelog.d/16768.doc deleted file mode 100644 index 4f574c2ac6..0000000000 --- a/changelog.d/16768.doc +++ /dev/null @@ -1 +0,0 @@ -Add HAProxy example for single port operation to reverse proxy documentation. Contributed by Georg Pfuetzenreuter (@tacerus). diff --git a/changelog.d/16874.feature b/changelog.d/16874.feature deleted file mode 100644 index 6e5afdde3b..0000000000 --- a/changelog.d/16874.feature +++ /dev/null @@ -1 +0,0 @@ -Add a new [List Accounts v3](https://element-hq.github.io/synapse/v1.103/admin_api/user_admin_api.html#list-accounts-v3) Admin API with improved deactivated user filtering capabilities. \ No newline at end of file diff --git a/changelog.d/16946.doc b/changelog.d/16946.doc deleted file mode 100644 index 8037a65450..0000000000 --- a/changelog.d/16946.doc +++ /dev/null @@ -1 +0,0 @@ -Improve the documentation around running Complement tests with new configuration parameters. \ No newline at end of file diff --git a/changelog.d/16947.feature b/changelog.d/16947.feature deleted file mode 100644 index efd0dbddb2..0000000000 --- a/changelog.d/16947.feature +++ /dev/null @@ -1 +0,0 @@ -Include `Retry-After` header by default per [MSC4041](https://github.com/matrix-org/matrix-spec-proposals/pull/4041). Contributed by @clokep. diff --git a/changelog.d/16951.doc b/changelog.d/16951.doc deleted file mode 100644 index a7fb33979d..0000000000 --- a/changelog.d/16951.doc +++ /dev/null @@ -1 +0,0 @@ -Add docs on upgrading from a very old version. \ No newline at end of file diff --git a/changelog.d/16973.bugfix b/changelog.d/16973.bugfix deleted file mode 100644 index a0e3a12304..0000000000 --- a/changelog.d/16973.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix joining remote rooms when a module uses the `on_new_event` callback. This callback may now pass partial state events instead of the full state for remote rooms. diff --git a/debian/changelog b/debian/changelog index df0ab0dcd8..ac128e535d 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.103.0~rc1) stable; urgency=medium + + * New Synapse release 1.103.0rc1. + + -- Synapse Packaging team Tue, 12 Mar 2024 15:02:56 +0000 + matrix-synapse-py3 (1.102.0) stable; urgency=medium * New Synapse release 1.102.0. diff --git a/pyproject.toml b/pyproject.toml index c360d60ec6..2bda9596f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.102.0" +version = "1.103.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 1f887907649c068993d63d32cde5e18b391dc15e Mon Sep 17 00:00:00 2001 From: Gerrit Gogel Date: Tue, 12 Mar 2024 16:07:36 +0100 Subject: [PATCH 02/45] Prevent locking up while processing batched_auth_events (#16968) This PR aims to fix #16895, caused by a regression in #7 and not fixed by #16903. The PR #16903 only fixes a starvation issue, where the CPU isn't released. There is a second issue, where the execution is blocked. This theory is supported by the flame graphs provided in #16895 and the fact that I see the CPU usage reducing and far below the limit. Since the changes in #7, the method `check_state_independent_auth_rules` is called with the additional parameter `batched_auth_events`: https://github.com/element-hq/synapse/blob/6fa13b4f927c10b5f4e9495be746ec28849f5cb6/synapse/handlers/federation_event.py#L1741-L1743 It makes the execution enter this if clause, introduced with #15195 https://github.com/element-hq/synapse/blob/6fa13b4f927c10b5f4e9495be746ec28849f5cb6/synapse/event_auth.py#L178-L189 There are two issues in the above code snippet. First, there is the blocking issue. I'm not entirely sure if this is a deadlock, starvation, or something different. In the beginning, I thought the copy operation was responsible. It wasn't. Then I investigated the nested `store.get_events` inside the function `update`. This was also not causing the blocking issue. Only when I replaced the set difference operation (`-` ) with a list comprehension, the blocking was resolved. Creating and comparing sets with a very large amount of events seems to be problematic. This is how the flamegraph looks now while persisting outliers. As you can see, the execution no longer locks up in the above function. ![output_2024-02-28_13-59-40](https://github.com/element-hq/synapse/assets/13143850/6db9c9ac-484f-47d0-bdde-70abfbd773ec) Second, the copying here doesn't serve any purpose, because only a shallow copy is created. This means the same objects from the original dict are referenced. This fails the intention of protecting these objects from mutation. The review of the original PR https://github.com/matrix-org/synapse/pull/15195 had an extensive discussion about this matter. Various approaches to copying the auth_events were attempted: 1) Implementing a deepcopy caused issues due to builtins.EventInternalMetadata not being pickleable. 2) Creating a dict with new objects akin to a deepcopy. 3) Creating a dict with new objects containing only necessary attributes. Concluding, there is no easy way to create an actual copy of the objects. Opting for a deepcopy can significantly strain memory and CPU resources, making it an inefficient choice. I don't see why the copy is necessary in the first place. Therefore I'm proposing to remove it altogether. After these changes, I was able to successfully join these rooms, without the main worker locking up: - #synapse:matrix.org - #element-android:matrix.org - #element-web:matrix.org - #ecips:matrix.org - #ipfs-chatter:ipfs.io - #python:matrix.org - #matrix:matrix.org --- changelog.d/16968.bugfix | 1 + synapse/event_auth.py | 43 +++++++++++++++++++++++++++++++--------- 2 files changed, 35 insertions(+), 9 deletions(-) create mode 100644 changelog.d/16968.bugfix diff --git a/changelog.d/16968.bugfix b/changelog.d/16968.bugfix new file mode 100644 index 0000000000..57ed851178 --- /dev/null +++ b/changelog.d/16968.bugfix @@ -0,0 +1 @@ +Prevent locking up when checking auth rules that are independent of room state for batched auth events. Contributed by @ggogel. \ No newline at end of file diff --git a/synapse/event_auth.py b/synapse/event_auth.py index d922c8dc35..c8b06f760e 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -23,7 +23,20 @@ import collections.abc import logging import typing -from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple, Union +from typing import ( + Any, + ChainMap, + Dict, + Iterable, + List, + Mapping, + MutableMapping, + Optional, + Set, + Tuple, + Union, + cast, +) from canonicaljson import encode_canonical_json from signedjson.key import decode_verify_key_bytes @@ -175,12 +188,22 @@ async def check_state_independent_auth_rules( return # 2. Reject if event has auth_events that: ... + auth_events: ChainMap[str, EventBase] = ChainMap() if batched_auth_events: - # Copy the batched auth events to avoid mutating them. - auth_events = dict(batched_auth_events) - needed_auth_event_ids = set(event.auth_event_ids()) - batched_auth_events.keys() + # batched_auth_events can become very large. To avoid repeatedly copying it, which + # would significantly impact performance, we use a ChainMap. + # batched_auth_events must be cast to MutableMapping because .new_child() requires + # this type. This casting is safe as the mapping is never mutated. + auth_events = auth_events.new_child( + cast(MutableMapping[str, "EventBase"], batched_auth_events) + ) + needed_auth_event_ids = [ + event_id + for event_id in event.auth_event_ids() + if event_id not in batched_auth_events + ] if needed_auth_event_ids: - auth_events.update( + auth_events = auth_events.new_child( await store.get_events( needed_auth_event_ids, redact_behaviour=EventRedactBehaviour.as_is, @@ -188,10 +211,12 @@ async def check_state_independent_auth_rules( ) ) else: - auth_events = await store.get_events( - event.auth_event_ids(), - redact_behaviour=EventRedactBehaviour.as_is, - allow_rejected=True, + auth_events = auth_events.new_child( + await store.get_events( + event.auth_event_ids(), + redact_behaviour=EventRedactBehaviour.as_is, + allow_rejected=True, + ) ) room_id = event.room_id From 5c0b87ff959dfa424f36f67df12a9e9e2c2e9763 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 12 Mar 2024 15:12:19 +0000 Subject: [PATCH 03/45] Update changelog --- CHANGES.md | 1 + changelog.d/16968.bugfix | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 changelog.d/16968.bugfix diff --git a/CHANGES.md b/CHANGES.md index 3d027ae253..f0c23e693e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,6 +8,7 @@ ### Bugfixes - Fix joining remote rooms when a module uses the `on_new_event` callback. This callback may now pass partial state events instead of the full state for remote rooms. Introduced in v1.76.0. ([\#16973](https://github.com/element-hq/synapse/issues/16973)) +- Fix performance issue when joining very large rooms that can cause the server to lock up. Introduced in v1.100.0. Contributed by @ggogel. ([\#16968](https://github.com/element-hq/synapse/issues/16968)) ### Improved Documentation diff --git a/changelog.d/16968.bugfix b/changelog.d/16968.bugfix deleted file mode 100644 index 57ed851178..0000000000 --- a/changelog.d/16968.bugfix +++ /dev/null @@ -1 +0,0 @@ -Prevent locking up when checking auth rules that are independent of room state for batched auth events. Contributed by @ggogel. \ No newline at end of file From 2bdf6280f632946fb77d5f81f99db31bc0b62e83 Mon Sep 17 00:00:00 2001 From: V02460 Date: Wed, 13 Mar 2024 17:40:08 +0100 Subject: [PATCH 04/45] Raise poetry-core version cap to 1.9.0 (#16986) A new poetry-core version was released. See if CI is happy. Required for the latest Fedora Synapse package. --- changelog.d/16986.misc | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/16986.misc diff --git a/changelog.d/16986.misc b/changelog.d/16986.misc new file mode 100644 index 0000000000..0a556ba8b4 --- /dev/null +++ b/changelog.d/16986.misc @@ -0,0 +1 @@ +Raise poetry-core version cap to 1.9.0. diff --git a/pyproject.toml b/pyproject.toml index 2bda9596f6..f05546b1c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -382,7 +382,7 @@ furo = ">=2022.12.7,<2025.0.0" # runtime errors caused by build system changes. # We are happy to raise these upper bounds upon request, # provided we check that it's safe to do so (i.e. that CI passes). -requires = ["poetry-core>=1.1.0,<=1.8.1", "setuptools_rust>=1.3,<=1.8.1"] +requires = ["poetry-core>=1.1.0,<=1.9.0", "setuptools_rust>=1.3,<=1.8.1"] build-backend = "poetry.core.masonry.api" From 1e68b56a623a0d7f98106482bfed223c22193050 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:46:44 +0000 Subject: [PATCH 05/45] Bump black from 23.10.1 to 24.2.0 (#16936) --- poetry.lock | 44 +++++---- synapse/_scripts/synapse_port_db.py | 22 ++--- synapse/api/constants.py | 1 - synapse/api/room_versions.py | 8 +- synapse/app/homeserver.py | 6 +- synapse/appservice/api.py | 18 ++-- synapse/config/registration.py | 6 +- synapse/config/repository.py | 6 +- synapse/event_auth.py | 3 +- synapse/events/__init__.py | 12 +-- synapse/events/utils.py | 6 +- synapse/federation/federation_server.py | 6 +- synapse/federation/send_queue.py | 6 +- synapse/handlers/account.py | 8 +- synapse/handlers/directory.py | 6 +- synapse/handlers/federation.py | 10 +- synapse/handlers/federation_event.py | 6 +- synapse/handlers/message.py | 6 +- synapse/handlers/presence.py | 12 +-- synapse/handlers/profile.py | 6 +- synapse/handlers/relations.py | 14 +-- synapse/handlers/room.py | 16 ++-- synapse/handlers/room_member.py | 10 +- synapse/handlers/sync.py | 6 +- synapse/http/matrixfederationclient.py | 18 ++-- synapse/http/servlet.py | 91 ++++++------------- synapse/logging/context.py | 6 +- synapse/logging/opentracing.py | 6 +- synapse/media/media_repository.py | 6 +- synapse/metrics/jemalloc.py | 6 +- synapse/notifier.py | 9 +- synapse/push/mailer.py | 14 +-- synapse/replication/http/_base.py | 6 +- synapse/replication/tcp/external_cache.py | 6 +- synapse/rest/admin/__init__.py | 10 +- synapse/rest/admin/users.py | 14 +-- synapse/rest/client/account_data.py | 6 +- synapse/rest/client/sync.py | 18 ++-- synapse/rest/key/v2/remote_key_resource.py | 8 +- synapse/state/__init__.py | 18 ++-- synapse/state/v2.py | 15 +-- synapse/storage/background_updates.py | 6 +- synapse/storage/controllers/persist_events.py | 6 +- synapse/storage/database.py | 24 ++--- synapse/storage/databases/main/devices.py | 6 +- .../storage/databases/main/end_to_end_keys.py | 9 +- synapse/storage/databases/main/events.py | 6 +- .../storage/databases/main/events_worker.py | 32 +++---- synapse/storage/databases/main/lock.py | 6 +- .../databases/main/media_repository.py | 6 +- synapse/storage/databases/main/receipts.py | 12 +-- synapse/storage/databases/main/state.py | 6 +- synapse/storage/databases/main/stream.py | 18 ++-- .../storage/databases/main/task_scheduler.py | 12 +-- .../storage/databases/main/user_directory.py | 8 +- synapse/storage/databases/state/store.py | 10 +- synapse/storage/engines/_base.py | 21 ++--- synapse/storage/types.py | 74 ++++++--------- synapse/synapse_rust/events.pyi | 6 ++ synapse/types/__init__.py | 14 ++- synapse/util/async_helpers.py | 24 ++--- synapse/util/caches/expiringcache.py | 6 +- synapse/util/caches/lrucache.py | 18 ++-- synapse/util/iterutils.py | 3 +- synapse/util/ratelimitutils.py | 6 +- synapse/visibility.py | 6 +- tests/replication/_base.py | 6 +- tests/rest/client/test_rooms.py | 12 +-- tests/rest/client/utils.py | 6 +- tests/storage/test_cleanup_extrems.py | 18 ++-- tests/storage/test_room_search.py | 16 ++-- tests/unittest.py | 3 +- tests/util/test_linearizer.py | 3 +- tests/utils.py | 6 +- 74 files changed, 407 insertions(+), 509 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9257d2ccfa..6212961fb1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -169,29 +169,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.10.1" +version = "24.2.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, ] [package.dependencies] @@ -205,7 +209,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 1dcc289df3..a533cad5ae 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -1040,10 +1040,10 @@ class Porter: return done, remaining + done async def _setup_state_group_id_seq(self) -> None: - curr_id: Optional[ - int - ] = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True + curr_id: Optional[int] = ( + await self.sqlite_store.db_pool.simple_select_one_onecol( + table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True + ) ) if not curr_id: @@ -1132,13 +1132,13 @@ class Porter: ) async def _setup_auth_chain_sequence(self) -> None: - curr_chain_id: Optional[ - int - ] = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="event_auth_chains", - keyvalues={}, - retcol="MAX(chain_id)", - allow_none=True, + curr_chain_id: Optional[int] = ( + await self.sqlite_store.db_pool.simple_select_one_onecol( + table="event_auth_chains", + keyvalues={}, + retcol="MAX(chain_id)", + allow_none=True, + ) ) def r(txn: LoggingTransaction) -> None: diff --git a/synapse/api/constants.py b/synapse/api/constants.py index f3d2c8073d..d25aff98ff 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -43,7 +43,6 @@ MAIN_TIMELINE: Final = "main" class Membership: - """Represents the membership states of a user in a room.""" INVITE: Final = "invite" diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 7ff8ad2d55..fbc1d58ecb 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -370,9 +370,11 @@ class RoomVersionCapability: MSC3244_CAPABILITIES = { cap.identifier: { - "preferred": cap.preferred_version.identifier - if cap.preferred_version is not None - else None, + "preferred": ( + cap.preferred_version.identifier + if cap.preferred_version is not None + else None + ), "support": [ v.identifier for v in KNOWN_ROOM_VERSIONS.values() diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b241dbf627..8a545a86c1 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -188,9 +188,9 @@ class SynapseHomeServer(HomeServer): PasswordResetSubmitTokenResource, ) - resources[ - "/_synapse/client/password_reset/email/submit_token" - ] = PasswordResetSubmitTokenResource(self) + resources["/_synapse/client/password_reset/email/submit_token"] = ( + PasswordResetSubmitTokenResource(self) + ) if name == "consent": from synapse.rest.consent.consent_resource import ConsentResource diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 34fa2bb655..19322471dc 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -362,16 +362,16 @@ class ApplicationServiceApi(SimpleHttpClient): # TODO: Update to stable prefixes once MSC3202 completes FCP merge if service.msc3202_transaction_extensions: if one_time_keys_count: - body[ - "org.matrix.msc3202.device_one_time_key_counts" - ] = one_time_keys_count - body[ - "org.matrix.msc3202.device_one_time_keys_count" - ] = one_time_keys_count + body["org.matrix.msc3202.device_one_time_key_counts"] = ( + one_time_keys_count + ) + body["org.matrix.msc3202.device_one_time_keys_count"] = ( + one_time_keys_count + ) if unused_fallback_keys: - body[ - "org.matrix.msc3202.device_unused_fallback_key_types" - ] = unused_fallback_keys + body["org.matrix.msc3202.device_unused_fallback_key_types"] = ( + unused_fallback_keys + ) if device_list_summary: body["org.matrix.msc3202.device_lists"] = { "changed": list(device_list_summary.changed), diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 3fe0f050cd..c7f3e6d35e 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -171,9 +171,9 @@ class RegistrationConfig(Config): refreshable_access_token_lifetime = self.parse_duration( refreshable_access_token_lifetime ) - self.refreshable_access_token_lifetime: Optional[ - int - ] = refreshable_access_token_lifetime + self.refreshable_access_token_lifetime: Optional[int] = ( + refreshable_access_token_lifetime + ) if ( self.session_lifetime is not None diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 4655882b4b..1645470499 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -199,9 +199,9 @@ class ContentRepositoryConfig(Config): provider_config["module"] == "file_system" or provider_config["module"] == "synapse.rest.media.v1.storage_provider" ): - provider_config[ - "module" - ] = "synapse.media.storage_provider.FileStorageProviderBackend" + provider_config["module"] = ( + "synapse.media.storage_provider.FileStorageProviderBackend" + ) provider_class, parsed_config = load_module( provider_config, ("media_storage_providers", "" % i) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index c8b06f760e..f5abcde2db 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -88,8 +88,7 @@ class _EventSourceStore(Protocol): redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, "EventBase"]: - ... + ) -> Dict[str, "EventBase"]: ... def validate_event_for_room_version(event: "EventBase") -> None: diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 7ec696c6c0..36e0f47e51 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -93,16 +93,14 @@ class DictProperty(Generic[T]): self, instance: Literal[None], owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> "DictProperty": - ... + ) -> "DictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> T: - ... + ) -> T: ... def __get__( self, @@ -161,16 +159,14 @@ class DefaultDictProperty(DictProperty, Generic[T]): self, instance: Literal[None], owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> "DefaultDictProperty": - ... + ) -> "DefaultDictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> T: - ... + ) -> T: ... def __get__( self, diff --git a/synapse/events/utils.py b/synapse/events/utils.py index cc52d0d1e9..e0613d0dbc 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -612,9 +612,9 @@ class EventClientSerializer: serialized_aggregations = {} if event_aggregations.references: - serialized_aggregations[ - RelationTypes.REFERENCE - ] = event_aggregations.references + serialized_aggregations[RelationTypes.REFERENCE] = ( + event_aggregations.references + ) if event_aggregations.replace: # Include information about it in the relations dict. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index dc8cd5ec9a..65d3a661fe 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -169,9 +169,9 @@ class FederationServer(FederationBase): # We cache responses to state queries, as they take a while and often # come in waves. - self._state_resp_cache: ResponseCache[ - Tuple[str, Optional[str]] - ] = ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( + ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + ) self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( hs.get_clock(), "state_ids_resp", timeout_ms=30000 ) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index e9a2386a5c..b5c9fcff7c 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -88,9 +88,9 @@ class FederationRemoteSendQueue(AbstractFederationSender): # Stores the destinations we need to explicitly send presence to about a # given user. # Stream position -> (user_id, destinations) - self.presence_destinations: SortedDict[ - int, Tuple[str, Iterable[str]] - ] = SortedDict() + self.presence_destinations: SortedDict[int, Tuple[str, Iterable[str]]] = ( + SortedDict() + ) # (destination, key) -> EDU self.keyed_edu: Dict[Tuple[str, tuple], Edu] = {} diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py index 37cc3d3ff5..89e944bc17 100644 --- a/synapse/handlers/account.py +++ b/synapse/handlers/account.py @@ -118,10 +118,10 @@ class AccountHandler: } if self._use_account_validity_in_account_status: - status[ - "org.matrix.expired" - ] = await self._account_validity_handler.is_user_expired( - user_id.to_string() + status["org.matrix.expired"] = ( + await self._account_validity_handler.is_user_expired( + user_id.to_string() + ) ) return status diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 5f3dc30b63..ad2b0f5fcc 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -265,9 +265,9 @@ class DirectoryHandler: async def get_association(self, room_alias: RoomAlias) -> JsonDict: room_id = None if self.hs.is_mine(room_alias): - result: Optional[ - RoomAliasMapping - ] = await self.get_association_from_room_alias(room_alias) + result: Optional[RoomAliasMapping] = ( + await self.get_association_from_room_alias(room_alias) + ) if result: room_id = result.room_id diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2b7aad5b58..299588e476 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1001,11 +1001,11 @@ class FederationHandler: ) if include_auth_user_id: - event_content[ - EventContentFields.AUTHORISING_USER - ] = await self._event_auth_handler.get_user_which_could_invite( - room_id, - state_ids, + event_content[EventContentFields.AUTHORISING_USER] = ( + await self._event_auth_handler.get_user_which_could_invite( + room_id, + state_ids, + ) ) builder = self.event_builder_factory.for_room_version( diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 83f6a25981..c85deaed56 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -1367,9 +1367,9 @@ class FederationEventHandler: ) if remote_event.is_state() and remote_event.rejected_reason is None: - state_map[ - (remote_event.type, remote_event.state_key) - ] = remote_event.event_id + state_map[(remote_event.type, remote_event.state_key)] = ( + remote_event.event_id + ) return state_map diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7e5bb97f2a..0ce6eeee15 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1654,9 +1654,9 @@ class EventCreationHandler: expiry_ms=60 * 60 * 1000, ) - self._external_cache_joined_hosts_updates[ - state_entry.state_group - ] = None + self._external_cache_joined_hosts_updates[state_entry.state_group] = ( + None + ) async def _validate_canonical_alias( self, diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 21d3c71d8e..37ee625f71 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -493,9 +493,9 @@ class WorkerPresenceHandler(BasePresenceHandler): # The number of ongoing syncs on this process, by (user ID, device ID). # Empty if _presence_enabled is false. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int - ] = {} + self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = ( + {} + ) self.notifier = hs.get_notifier() self.instance_id = hs.get_instance_id() @@ -818,9 +818,9 @@ class PresenceHandler(BasePresenceHandler): # Keeps track of the number of *ongoing* syncs on this process. While # this is non zero a user will never go offline. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int - ] = {} + self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = ( + {} + ) # Keeps track of the number of *ongoing* syncs on other processes. # diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 279d393a5a..e51e282a9f 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -320,9 +320,9 @@ class ProfileHandler: server_name = host if self._is_mine_server_name(server_name): - media_info: Optional[ - Union[LocalMedia, RemoteMedia] - ] = await self.store.get_local_media(media_id) + media_info: Optional[Union[LocalMedia, RemoteMedia]] = ( + await self.store.get_local_media(media_id) + ) else: media_info = await self.store.get_cached_remote_media(server_name, media_id) diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index 828a4b4cbd..931ac0c813 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -188,13 +188,13 @@ class RelationsHandler: if include_original_event: # Do not bundle aggregations when retrieving the original event because # we want the content before relations are applied to it. - return_value[ - "original_event" - ] = await self._event_serializer.serialize_event( - event, - now, - bundle_aggregations=None, - config=serialize_options, + return_value["original_event"] = ( + await self._event_serializer.serialize_event( + event, + now, + bundle_aggregations=None, + config=serialize_options, + ) ) if next_token: diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 6b116dce8c..3278426ca3 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -538,10 +538,10 @@ class RoomCreationHandler: # deep-copy the power-levels event before we start modifying it # note that if frozen_dicts are enabled, `power_levels` will be a frozen # dict so we can't just copy.deepcopy it. - initial_state[ - (EventTypes.PowerLevels, "") - ] = power_levels = copy_and_fixup_power_levels_contents( - initial_state[(EventTypes.PowerLevels, "")] + initial_state[(EventTypes.PowerLevels, "")] = power_levels = ( + copy_and_fixup_power_levels_contents( + initial_state[(EventTypes.PowerLevels, "")] + ) ) # Resolve the minimum power level required to send any state event @@ -1362,9 +1362,11 @@ class RoomCreationHandler: visibility = room_config.get("visibility", "private") preset_name = room_config.get( "preset", - RoomCreationPreset.PRIVATE_CHAT - if visibility == "private" - else RoomCreationPreset.PUBLIC_CHAT, + ( + RoomCreationPreset.PRIVATE_CHAT + if visibility == "private" + else RoomCreationPreset.PUBLIC_CHAT + ), ) try: preset_config = self._presets_dict[preset_name] diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index d238c40bcf..9e9f6cd062 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1236,11 +1236,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): # If this is going to be a local join, additional information must # be included in the event content in order to efficiently validate # the event. - content[ - EventContentFields.AUTHORISING_USER - ] = await self.event_auth_handler.get_user_which_could_invite( - room_id, - state_before_join, + content[EventContentFields.AUTHORISING_USER] = ( + await self.event_auth_handler.get_user_which_could_invite( + room_id, + state_before_join, + ) ) return False, [] diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 9122a79b4c..5bb8a1439d 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1333,9 +1333,9 @@ class SyncHandler: and auth_event.state_key == member ): missing_members.discard(member) - additional_state_ids[ - (EventTypes.Member, member) - ] = auth_event.event_id + additional_state_ids[(EventTypes.Member, member)] = ( + auth_event.event_id + ) break if missing_members: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 884ecdacdd..c73a589e6c 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -931,8 +931,7 @@ class MatrixFederationHttpClient: try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, backoff_on_all_error_codes: bool = False, - ) -> JsonDict: - ... + ) -> JsonDict: ... @overload async def put_json( @@ -949,8 +948,7 @@ class MatrixFederationHttpClient: try_trailing_slash_on_400: bool = False, parser: Optional[ByteParser[T]] = None, backoff_on_all_error_codes: bool = False, - ) -> T: - ... + ) -> T: ... async def put_json( self, @@ -1140,8 +1138,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> JsonDict: - ... + ) -> JsonDict: ... @overload async def get_json( @@ -1154,8 +1151,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> T: - ... + ) -> T: ... async def get_json( self, @@ -1236,8 +1232,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: - ... + ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: ... @overload async def get_json_with_headers( @@ -1250,8 +1245,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> Tuple[T, Dict[bytes, List[bytes]]]: - ... + ) -> Tuple[T, Dict[bytes, List[bytes]]]: ... async def get_json_with_headers( self, diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index b22eb727b1..b73d06f1d3 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -61,20 +61,17 @@ logger = logging.getLogger(__name__) @overload -def parse_integer(request: Request, name: str, default: int) -> int: - ... +def parse_integer(request: Request, name: str, default: int) -> int: ... @overload -def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: - ... +def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ... @overload def parse_integer( request: Request, name: str, default: Optional[int] = None, required: bool = False -) -> Optional[int]: - ... +) -> Optional[int]: ... def parse_integer( @@ -105,8 +102,7 @@ def parse_integer_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: Optional[int] = None, -) -> Optional[int]: - ... +) -> Optional[int]: ... @overload @@ -115,8 +111,7 @@ def parse_integer_from_args( name: str, *, required: Literal[True], -) -> int: - ... +) -> int: ... @overload @@ -125,8 +120,7 @@ def parse_integer_from_args( name: str, default: Optional[int] = None, required: bool = False, -) -> Optional[int]: - ... +) -> Optional[int]: ... def parse_integer_from_args( @@ -172,20 +166,17 @@ def parse_integer_from_args( @overload -def parse_boolean(request: Request, name: str, default: bool) -> bool: - ... +def parse_boolean(request: Request, name: str, default: bool) -> bool: ... @overload -def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: - ... +def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: ... @overload def parse_boolean( request: Request, name: str, default: Optional[bool] = None, required: bool = False -) -> Optional[bool]: - ... +) -> Optional[bool]: ... def parse_boolean( @@ -216,8 +207,7 @@ def parse_boolean_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: bool, -) -> bool: - ... +) -> bool: ... @overload @@ -226,8 +216,7 @@ def parse_boolean_from_args( name: str, *, required: Literal[True], -) -> bool: - ... +) -> bool: ... @overload @@ -236,8 +225,7 @@ def parse_boolean_from_args( name: str, default: Optional[bool] = None, required: bool = False, -) -> Optional[bool]: - ... +) -> Optional[bool]: ... def parse_boolean_from_args( @@ -289,8 +277,7 @@ def parse_bytes_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: Optional[bytes] = None, -) -> Optional[bytes]: - ... +) -> Optional[bytes]: ... @overload @@ -300,8 +287,7 @@ def parse_bytes_from_args( default: Literal[None] = None, *, required: Literal[True], -) -> bytes: - ... +) -> bytes: ... @overload @@ -310,8 +296,7 @@ def parse_bytes_from_args( name: str, default: Optional[bytes] = None, required: bool = False, -) -> Optional[bytes]: - ... +) -> Optional[bytes]: ... def parse_bytes_from_args( @@ -355,8 +340,7 @@ def parse_string( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -367,8 +351,7 @@ def parse_string( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -380,8 +363,7 @@ def parse_string( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... def parse_string( @@ -437,8 +419,7 @@ def parse_enum( name: str, E: Type[EnumT], default: EnumT, -) -> EnumT: - ... +) -> EnumT: ... @overload @@ -448,8 +429,7 @@ def parse_enum( E: Type[EnumT], *, required: Literal[True], -) -> EnumT: - ... +) -> EnumT: ... def parse_enum( @@ -526,8 +506,7 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: - ... +) -> Optional[List[str]]: ... @overload @@ -538,8 +517,7 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: - ... +) -> List[str]: ... @overload @@ -550,8 +528,7 @@ def parse_strings_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: - ... +) -> List[str]: ... @overload @@ -563,8 +540,7 @@ def parse_strings_from_args( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: - ... +) -> Optional[List[str]]: ... def parse_strings_from_args( @@ -625,8 +601,7 @@ def parse_string_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... @overload @@ -638,8 +613,7 @@ def parse_string_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -650,8 +624,7 @@ def parse_string_from_args( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... def parse_string_from_args( @@ -704,22 +677,19 @@ def parse_string_from_args( @overload -def parse_json_value_from_request(request: Request) -> JsonDict: - ... +def parse_json_value_from_request(request: Request) -> JsonDict: ... @overload def parse_json_value_from_request( request: Request, allow_empty_body: Literal[False] -) -> JsonDict: - ... +) -> JsonDict: ... @overload def parse_json_value_from_request( request: Request, allow_empty_body: bool = False -) -> Optional[JsonDict]: - ... +) -> Optional[JsonDict]: ... def parse_json_value_from_request( @@ -847,7 +817,6 @@ def assert_params_in_dict(body: JsonDict, required: StrCollection) -> None: class RestServlet: - """A Synapse REST Servlet. An implementing class can either provide its own custom 'register' method, diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 548d255b69..b36f397574 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -744,8 +744,7 @@ def preserve_fn( @overload -def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: - ... +def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: ... def preserve_fn( @@ -774,8 +773,7 @@ def run_in_background( @overload def run_in_background( f: Callable[P, R], *args: P.args, **kwargs: P.kwargs -) -> "defer.Deferred[R]": - ... +) -> "defer.Deferred[R]": ... def run_in_background( # type: ignore[misc] diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 78b9fffbfb..7a3c805cc5 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -388,15 +388,13 @@ def only_if_tracing(func: Callable[P, R]) -> Callable[P, Optional[R]]: @overload def ensure_active_span( message: str, -) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: - ... +) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: ... @overload def ensure_active_span( message: str, ret: T -) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: - ... +) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: ... def ensure_active_span( diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index 52859ed490..0e875132f6 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -1002,9 +1002,9 @@ class MediaRepository: ) t_width = min(m_width, t_width) t_height = min(m_height, t_height) - thumbnails[ - (t_width, t_height, requirement.media_type) - ] = requirement.method + thumbnails[(t_width, t_height, requirement.media_type)] = ( + requirement.method + ) # Now we generate the thumbnails for each dimension, store it for (t_width, t_height, t_type), t_method in thumbnails.items(): diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py index 6b4c64f7a5..bd25985686 100644 --- a/synapse/metrics/jemalloc.py +++ b/synapse/metrics/jemalloc.py @@ -42,14 +42,12 @@ class JemallocStats: @overload def _mallctl( self, name: str, read: Literal[True] = True, write: Optional[int] = None - ) -> int: - ... + ) -> int: ... @overload def _mallctl( self, name: str, read: Literal[False], write: Optional[int] = None - ) -> None: - ... + ) -> None: ... def _mallctl( self, name: str, read: bool = True, write: Optional[int] = None diff --git a/synapse/notifier.py b/synapse/notifier.py index 62d954298c..e87333a80a 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -469,8 +469,7 @@ class Notifier: new_token: RoomStreamToken, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... @overload def on_new_event( @@ -479,8 +478,7 @@ class Notifier: new_token: MultiWriterStreamToken, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... @overload def on_new_event( @@ -497,8 +495,7 @@ class Notifier: new_token: int, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... def on_new_event( self, diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index b4bd88f308..f1ffc8115f 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -377,12 +377,14 @@ class Mailer: # # Note that many email clients will not render the unsubscribe link # unless DKIM, etc. is properly setup. - additional_headers={ - "List-Unsubscribe-Post": "List-Unsubscribe=One-Click", - "List-Unsubscribe": f"<{unsubscribe_link}>", - } - if unsubscribe_link - else None, + additional_headers=( + { + "List-Unsubscribe-Post": "List-Unsubscribe=One-Click", + "List-Unsubscribe": f"<{unsubscribe_link}>", + } + if unsubscribe_link + else None + ), ) async def _get_room_vars( diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index a82ad49e01..9aa8d90bfe 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -259,9 +259,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): url_args.append(txn_id) if cls.METHOD == "POST": - request_func: Callable[ - ..., Awaitable[Any] - ] = client.post_json_get_json + request_func: Callable[..., Awaitable[Any]] = ( + client.post_json_get_json + ) elif cls.METHOD == "PUT": request_func = client.put_json elif cls.METHOD == "GET": diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index ce47d8035c..a95771b5f6 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -70,9 +70,9 @@ class ExternalCache: def __init__(self, hs: "HomeServer"): if hs.config.redis.redis_enabled: - self._redis_connection: Optional[ - "ConnectionHandler" - ] = hs.get_outbound_redis_connection() + self._redis_connection: Optional["ConnectionHandler"] = ( + hs.get_outbound_redis_connection() + ) else: self._redis_connection = None diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 07e0fb71f2..6da1d79168 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -237,10 +237,12 @@ class PurgeHistoryStatusRestServlet(RestServlet): raise NotFoundError("purge id '%s' not found" % purge_id) result: JsonDict = { - "status": purge_task.status - if purge_task.status == TaskStatus.COMPLETE - or purge_task.status == TaskStatus.FAILED - else "active", + "status": ( + purge_task.status + if purge_task.status == TaskStatus.COMPLETE + or purge_task.status == TaskStatus.FAILED + else "active" + ), } if purge_task.error: result["error"] = purge_task.error diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index a9645e4af7..4e34e46512 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -1184,12 +1184,14 @@ class RateLimitRestServlet(RestServlet): # convert `null` to `0` for consistency # both values do the same in retelimit handler ret = { - "messages_per_second": 0 - if ratelimit.messages_per_second is None - else ratelimit.messages_per_second, - "burst_count": 0 - if ratelimit.burst_count is None - else ratelimit.burst_count, + "messages_per_second": ( + 0 + if ratelimit.messages_per_second is None + else ratelimit.messages_per_second + ), + "burst_count": ( + 0 if ratelimit.burst_count is None else ratelimit.burst_count + ), } else: ret = {} diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 0cdc4cc4f7..12ffca984f 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -112,9 +112,9 @@ class AccountDataServlet(RestServlet): self._hs.config.experimental.msc4010_push_rules_account_data and account_data_type == AccountDataTypes.PUSH_RULES ): - account_data: Optional[ - JsonMapping - ] = await self._push_rules_handler.push_rules_for_user(requester.user) + account_data: Optional[JsonMapping] = ( + await self._push_rules_handler.push_rules_for_user(requester.user) + ) else: account_data = await self.store.get_global_account_data_by_type_for_user( user_id, account_data_type diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 3af2b7dfd9..2b103ca6a8 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -313,12 +313,12 @@ class SyncRestServlet(RestServlet): # https://github.com/matrix-org/matrix-doc/blob/54255851f642f84a4f1aaf7bc063eebe3d76752b/proposals/2732-olm-fallback-keys.md # states that this field should always be included, as long as the server supports the feature. - response[ - "org.matrix.msc2732.device_unused_fallback_key_types" - ] = sync_result.device_unused_fallback_key_types - response[ - "device_unused_fallback_key_types" - ] = sync_result.device_unused_fallback_key_types + response["org.matrix.msc2732.device_unused_fallback_key_types"] = ( + sync_result.device_unused_fallback_key_types + ) + response["device_unused_fallback_key_types"] = ( + sync_result.device_unused_fallback_key_types + ) if joined: response["rooms"][Membership.JOIN] = joined @@ -543,9 +543,9 @@ class SyncRestServlet(RestServlet): if room.unread_thread_notifications: result["unread_thread_notifications"] = room.unread_thread_notifications if self._msc3773_enabled: - result[ - "org.matrix.msc3773.unread_thread_notifications" - ] = room.unread_thread_notifications + result["org.matrix.msc3773.unread_thread_notifications"] = ( + room.unread_thread_notifications + ) result["summary"] = room.summary if self._msc2654_enabled: result["org.matrix.msc2654.unread_count"] = room.unread_count diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 6afe4a7bcc..dc7325fc57 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -191,10 +191,10 @@ class RemoteKey(RestServlet): server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {} for server_name, key_ids in query.items(): if key_ids: - results: Mapping[ - str, Optional[FetchKeyResultForRemote] - ] = await self.store.get_server_keys_json_for_remote( - server_name, key_ids + results: Mapping[str, Optional[FetchKeyResultForRemote]] = ( + await self.store.get_server_keys_json_for_remote( + server_name, key_ids + ) ) else: results = await self.store.get_all_server_keys_json_for_remote( diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 015e49ab81..72b291889b 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -603,15 +603,15 @@ class StateResolutionHandler: self.resolve_linearizer = Linearizer(name="state_resolve_lock") # dict of set of event_ids -> _StateCacheEntry. - self._state_cache: ExpiringCache[ - FrozenSet[int], _StateCacheEntry - ] = ExpiringCache( - cache_name="state_cache", - clock=self.clock, - max_len=100000, - expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, - iterable=True, - reset_expiry_on_get=True, + self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( + ExpiringCache( + cache_name="state_cache", + clock=self.clock, + max_len=100000, + expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, + iterable=True, + reset_expiry_on_get=True, + ) ) # diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 8de16db1d0..da926ad146 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -52,8 +52,7 @@ class Clock(Protocol): # This is usually synapse.util.Clock, but it's replaced with a FakeClock in tests. # We only ever sleep(0) though, so that other async functions can make forward # progress without waiting for stateres to complete. - def sleep(self, duration_ms: float) -> Awaitable[None]: - ... + def sleep(self, duration_ms: float) -> Awaitable[None]: ... class StateResolutionStore(Protocol): @@ -61,13 +60,11 @@ class StateResolutionStore(Protocol): # TestStateResolutionStore in tests. def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: - ... + ) -> Awaitable[Dict[str, EventBase]]: ... def get_auth_chain_difference( self, room_id: str, state_sets: List[Set[str]] - ) -> Awaitable[Set[str]]: - ... + ) -> Awaitable[Set[str]]: ... # We want to await to the reactor occasionally during state res when dealing @@ -742,8 +739,7 @@ async def _get_event( event_map: Dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[False] = False, -) -> EventBase: - ... +) -> EventBase: ... @overload @@ -753,8 +749,7 @@ async def _get_event( event_map: Dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[True], -) -> Optional[EventBase]: - ... +) -> Optional[EventBase]: ... async def _get_event( diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 9df4edee38..f473294070 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -836,9 +836,9 @@ class BackgroundUpdater: c.execute(sql) if isinstance(self.db_pool.engine, engines.PostgresEngine): - runner: Optional[ - Callable[[LoggingDatabaseConnection], None] - ] = create_index_psql + runner: Optional[Callable[[LoggingDatabaseConnection], None]] = ( + create_index_psql + ) elif psql_only: runner = None else: diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index 69d5999c0a..84699a2ee1 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -773,9 +773,9 @@ class EventsPersistenceStorageController: ) # Remove any events which are prev_events of any existing events. - existing_prevs: Collection[ - str - ] = await self.persist_events_store._get_events_which_are_prevs(result) + existing_prevs: Collection[str] = ( + await self.persist_events_store._get_events_which_are_prevs(result) + ) result.difference_update(existing_prevs) # Finally handle the case where the new events have soft-failed prev diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 8dc9080842..d7d202f028 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -111,8 +111,7 @@ class _PoolConnection(Connection): A Connection from twisted.enterprise.adbapi.Connection. """ - def reconnect(self) -> None: - ... + def reconnect(self) -> None: ... def make_pool( @@ -1603,8 +1602,7 @@ class DatabasePool: retcols: Collection[str], allow_none: Literal[False] = False, desc: str = "simple_select_one", - ) -> Tuple[Any, ...]: - ... + ) -> Tuple[Any, ...]: ... @overload async def simple_select_one( @@ -1614,8 +1612,7 @@ class DatabasePool: retcols: Collection[str], allow_none: Literal[True] = True, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: - ... + ) -> Optional[Tuple[Any, ...]]: ... async def simple_select_one( self, @@ -1654,8 +1651,7 @@ class DatabasePool: retcol: str, allow_none: Literal[False] = False, desc: str = "simple_select_one_onecol", - ) -> Any: - ... + ) -> Any: ... @overload async def simple_select_one_onecol( @@ -1665,8 +1661,7 @@ class DatabasePool: retcol: str, allow_none: Literal[True] = True, desc: str = "simple_select_one_onecol", - ) -> Optional[Any]: - ... + ) -> Optional[Any]: ... async def simple_select_one_onecol( self, @@ -1706,8 +1701,7 @@ class DatabasePool: keyvalues: Dict[str, Any], retcol: str, allow_none: Literal[False] = False, - ) -> Any: - ... + ) -> Any: ... @overload @classmethod @@ -1718,8 +1712,7 @@ class DatabasePool: keyvalues: Dict[str, Any], retcol: str, allow_none: Literal[True] = True, - ) -> Optional[Any]: - ... + ) -> Optional[Any]: ... @classmethod def simple_select_one_onecol_txn( @@ -2501,8 +2494,7 @@ def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, columns: Tuple[str, str], iterable: Collection[Tuple[Any, Any]], -) -> Tuple[str, list]: - ... +) -> Tuple[str, list]: ... def make_tuple_in_list_sql_clause( diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 3e011f3340..8dbcb3f5a0 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1701,9 +1701,9 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): # Map of (user_id, device_id) -> bool. If there is an entry that implies # the device exists. - self.device_id_exists_cache: LruCache[ - Tuple[str, str], Literal[True] - ] = LruCache(cache_name="device_id_exists", max_size=10000) + self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = ( + LruCache(cache_name="device_id_exists", max_size=10000) + ) async def store_device( self, diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index c96371a0d3..b219ea70ee 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -256,8 +256,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker self, query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: - ... + ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( @@ -265,8 +264,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: - ... + ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( @@ -274,8 +272,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: Literal[True], include_deleted_devices: Literal[True], - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: - ... + ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ... @trace @cancellable diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index d5942a10b2..a6fda3f43c 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1292,9 +1292,9 @@ class PersistEventsStore: Returns: filtered list """ - new_events_and_contexts: OrderedDict[ - str, Tuple[EventBase, EventContext] - ] = OrderedDict() + new_events_and_contexts: OrderedDict[str, Tuple[EventBase, EventContext]] = ( + OrderedDict() + ) for event, context in events_and_contexts: prev_event_context = new_events_and_contexts.get(event.event_id) if prev_event_context: diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 9c3775bb7c..4c09567a7a 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -263,13 +263,13 @@ class EventsWorkerStore(SQLBaseStore): 5 * 60 * 1000, ) - self._get_event_cache: AsyncLruCache[ - Tuple[str], EventCacheEntry - ] = AsyncLruCache( - cache_name="*getEvent*", - max_size=hs.config.caches.event_cache_size, - # `extra_index_cb` Returns a tuple as that is the key type - extra_index_cb=lambda _, v: (v.event.room_id,), + self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( + AsyncLruCache( + cache_name="*getEvent*", + max_size=hs.config.caches.event_cache_size, + # `extra_index_cb` Returns a tuple as that is the key type + extra_index_cb=lambda _, v: (v.event.room_id,), + ) ) # Map from event ID to a deferred that will result in a map from event @@ -459,8 +459,7 @@ class EventsWorkerStore(SQLBaseStore): allow_rejected: bool = ..., allow_none: Literal[False] = ..., check_room_id: Optional[str] = ..., - ) -> EventBase: - ... + ) -> EventBase: ... @overload async def get_event( @@ -471,8 +470,7 @@ class EventsWorkerStore(SQLBaseStore): allow_rejected: bool = ..., allow_none: Literal[True] = ..., check_room_id: Optional[str] = ..., - ) -> Optional[EventBase]: - ... + ) -> Optional[EventBase]: ... @cancellable async def get_event( @@ -800,9 +798,9 @@ class EventsWorkerStore(SQLBaseStore): # to all the events we pulled from the DB (this will result in this # function returning more events than requested, but that can happen # already due to `_get_events_from_db`). - fetching_deferred: ObservableDeferred[ - Dict[str, EventCacheEntry] - ] = ObservableDeferred(defer.Deferred(), consumeErrors=True) + fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = ( + ObservableDeferred(defer.Deferred(), consumeErrors=True) + ) for event_id in missing_events_ids: self._current_event_fetches[event_id] = fetching_deferred @@ -1871,9 +1869,9 @@ class EventsWorkerStore(SQLBaseStore): " LIMIT ?" ) txn.execute(sql, (-last_id, -current_id, instance_name, limit)) - new_event_updates: List[ - Tuple[int, Tuple[str, str, str, str, str, str]] - ] = [] + new_event_updates: List[Tuple[int, Tuple[str, str, str, str, str, str]]] = ( + [] + ) row: Tuple[int, str, str, str, str, str, str] # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index 0794cc6d25..8277ad8c33 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -79,9 +79,9 @@ class LockStore(SQLBaseStore): # A map from `(lock_name, lock_key)` to lock that we think we # currently hold. - self._live_lock_tokens: WeakValueDictionary[ - Tuple[str, str], Lock - ] = WeakValueDictionary() + self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = ( + WeakValueDictionary() + ) # A map from `(lock_name, lock_key, token)` to read/write lock that we # think we currently hold. For a given lock_name/lock_key, there can be diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index b5ed1bf9c8..6128332af8 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -158,9 +158,9 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore): ) if hs.config.media.can_load_media_repo: - self.unused_expiration_time: Optional[ - int - ] = hs.config.media.unused_expiration_time + self.unused_expiration_time: Optional[int] = ( + hs.config.media.unused_expiration_time + ) else: self.unused_expiration_time = None diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 8a426d2875..d513c42530 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -394,9 +394,9 @@ class ReceiptsWorkerStore(SQLBaseStore): content: JsonDict = {} for receipt_type, user_id, event_id, data in rows: - content.setdefault(event_id, {}).setdefault(receipt_type, {})[ - user_id - ] = db_to_json(data) + content.setdefault(event_id, {}).setdefault(receipt_type, {})[user_id] = ( + db_to_json(data) + ) return [{"type": EduTypes.RECEIPT, "room_id": room_id, "content": content}] @@ -483,9 +483,9 @@ class ReceiptsWorkerStore(SQLBaseStore): if user_id in receipt_type_dict: # existing receipt # is the existing receipt threaded and we are currently processing an unthreaded one? if "thread_id" in receipt_type_dict[user_id] and not thread_id: - receipt_type_dict[ - user_id - ] = receipt_data # replace with unthreaded one + receipt_type_dict[user_id] = ( + receipt_data # replace with unthreaded one + ) else: # receipt does not exist, just set it receipt_type_dict[user_id] = receipt_data if thread_id: diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index 3220d515d9..b2a67aff89 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -768,12 +768,10 @@ class StateMapWrapper(Dict[StateKey, str]): return super().__getitem__(key) @overload - def get(self, key: Tuple[str, str]) -> Optional[str]: - ... + def get(self, key: Tuple[str, str]) -> Optional[str]: ... @overload - def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: - ... + def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: ... def get( self, key: StateKey, default: Union[str, _T, None] = None diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 19041cc35b..7ab6003f61 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -988,8 +988,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): txn: LoggingTransaction, event_id: str, allow_none: Literal[False] = False, - ) -> int: - ... + ) -> int: ... @overload def get_stream_id_for_event_txn( @@ -997,8 +996,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): txn: LoggingTransaction, event_id: str, allow_none: bool = False, - ) -> Optional[int]: - ... + ) -> Optional[int]: ... def get_stream_id_for_event_txn( self, @@ -1476,12 +1474,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): _EventDictReturn(event_id, topological_ordering, stream_ordering) for event_id, instance_name, topological_ordering, stream_ordering in txn if _filter_results( - lower_token=to_token - if direction == Direction.BACKWARDS - else from_token, - upper_token=from_token - if direction == Direction.BACKWARDS - else to_token, + lower_token=( + to_token if direction == Direction.BACKWARDS else from_token + ), + upper_token=( + from_token if direction == Direction.BACKWARDS else to_token + ), instance_name=instance_name, topological_ordering=topological_ordering, stream_ordering=stream_ordering, diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py index 7b95616432..4956870b1a 100644 --- a/synapse/storage/databases/main/task_scheduler.py +++ b/synapse/storage/databases/main/task_scheduler.py @@ -136,12 +136,12 @@ class TaskSchedulerWorkerStore(SQLBaseStore): "status": task.status, "timestamp": task.timestamp, "resource_id": task.resource_id, - "params": None - if task.params is None - else json_encoder.encode(task.params), - "result": None - if task.result is None - else json_encoder.encode(task.result), + "params": ( + None if task.params is None else json_encoder.encode(task.params) + ), + "result": ( + None if task.result is None else json_encoder.encode(task.result) + ), "error": task.error, }, desc="insert_scheduled_task", diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index a1c4b8c6c3..0513e7dc06 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -745,9 +745,11 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore): p.user_id, get_localpart_from_id(p.user_id), get_domain_from_id(p.user_id), - _filter_text_for_index(p.display_name) - if p.display_name - else None, + ( + _filter_text_for_index(p.display_name) + if p.display_name + else None + ), ) for p in profiles ], diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index e64495ba8d..d4ac74c1ee 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -120,11 +120,11 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore): # TODO: this hasn't been tuned yet 50000, ) - self._state_group_members_cache: DictionaryCache[ - int, StateKey, str - ] = DictionaryCache( - "*stateGroupMembersCache*", - 500000, + self._state_group_members_cache: DictionaryCache[int, StateKey, str] = ( + DictionaryCache( + "*stateGroupMembersCache*", + 500000, + ) ) def get_max_state_group_txn(txn: Cursor) -> int: diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 8c29236b59..ad222e7e2d 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -48,8 +48,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM @property @abc.abstractmethod - def single_threaded(self) -> bool: - ... + def single_threaded(self) -> bool: ... @property @abc.abstractmethod @@ -68,8 +67,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM @abc.abstractmethod def check_database( self, db_conn: ConnectionType, allow_outdated_version: bool = False - ) -> None: - ... + ) -> None: ... @abc.abstractmethod def check_new_database(self, txn: CursorType) -> None: @@ -79,27 +77,22 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM ... @abc.abstractmethod - def convert_param_style(self, sql: str) -> str: - ... + def convert_param_style(self, sql: str) -> str: ... # This method would ideally take a plain ConnectionType, but it seems that # the Sqlite engine expects to use LoggingDatabaseConnection.cursor # instead of sqlite3.Connection.cursor: only the former takes a txn_name. @abc.abstractmethod - def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: - ... + def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: ... @abc.abstractmethod - def is_deadlock(self, error: Exception) -> bool: - ... + def is_deadlock(self, error: Exception) -> bool: ... @abc.abstractmethod - def is_connection_closed(self, conn: ConnectionType) -> bool: - ... + def is_connection_closed(self, conn: ConnectionType) -> bool: ... @abc.abstractmethod - def lock_table(self, txn: Cursor, table: str) -> None: - ... + def lock_table(self, txn: Cursor, table: str) -> None: ... @property @abc.abstractmethod diff --git a/synapse/storage/types.py b/synapse/storage/types.py index b4e0a8f576..74f60cc590 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -42,20 +42,17 @@ SQLQueryParameters = Union[Sequence[Any], Mapping[str, Any]] class Cursor(Protocol): - def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: - ... + def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: ... - def executemany(self, sql: str, parameters: Sequence[SQLQueryParameters]) -> Any: - ... + def executemany( + self, sql: str, parameters: Sequence[SQLQueryParameters] + ) -> Any: ... - def fetchone(self) -> Optional[Tuple]: - ... + def fetchone(self) -> Optional[Tuple]: ... - def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: - ... + def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ... - def fetchall(self) -> List[Tuple]: - ... + def fetchall(self) -> List[Tuple]: ... @property def description( @@ -70,36 +67,28 @@ class Cursor(Protocol): def rowcount(self) -> int: return 0 - def __iter__(self) -> Iterator[Tuple]: - ... + def __iter__(self) -> Iterator[Tuple]: ... - def close(self) -> None: - ... + def close(self) -> None: ... class Connection(Protocol): - def cursor(self) -> Cursor: - ... + def cursor(self) -> Cursor: ... - def close(self) -> None: - ... + def close(self) -> None: ... - def commit(self) -> None: - ... + def commit(self) -> None: ... - def rollback(self) -> None: - ... + def rollback(self) -> None: ... - def __enter__(self) -> "Connection": - ... + def __enter__(self) -> "Connection": ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], - ) -> Optional[bool]: - ... + ) -> Optional[bool]: ... class DBAPI2Module(Protocol): @@ -129,24 +118,20 @@ class DBAPI2Module(Protocol): # explain why this is necessary for safety. TL;DR: we shouldn't be able to write # to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 . @property - def Warning(self) -> Type[Exception]: - ... + def Warning(self) -> Type[Exception]: ... @property - def Error(self) -> Type[Exception]: - ... + def Error(self) -> Type[Exception]: ... # Errors are divided into `InterfaceError`s (something went wrong in the database # driver) and `DatabaseError`s (something went wrong in the database). These are # both subclasses of `Error`, but we can't currently express this in type # annotations due to https://github.com/python/mypy/issues/8397 @property - def InterfaceError(self) -> Type[Exception]: - ... + def InterfaceError(self) -> Type[Exception]: ... @property - def DatabaseError(self) -> Type[Exception]: - ... + def DatabaseError(self) -> Type[Exception]: ... # Everything below is a subclass of `DatabaseError`. @@ -155,8 +140,7 @@ class DBAPI2Module(Protocol): # - An invalid date time was provided. # - A string contained a null code point. @property - def DataError(self) -> Type[Exception]: - ... + def DataError(self) -> Type[Exception]: ... # Roughly: something went wrong in the database, but it's not within the application # programmer's control. Examples: @@ -167,21 +151,18 @@ class DBAPI2Module(Protocol): # - The database ran out of resources, such as storage, memory, connections, etc. # - The database encountered an error from the operating system. @property - def OperationalError(self) -> Type[Exception]: - ... + def OperationalError(self) -> Type[Exception]: ... # Roughly: we've given the database data which breaks a rule we asked it to enforce. # Examples: # - Stop, criminal scum! You violated the foreign key constraint # - Also check constraints, non-null constraints, etc. @property - def IntegrityError(self) -> Type[Exception]: - ... + def IntegrityError(self) -> Type[Exception]: ... # Roughly: something went wrong within the database server itself. @property - def InternalError(self) -> Type[Exception]: - ... + def InternalError(self) -> Type[Exception]: ... # Roughly: the application did something silly that needs to be fixed. Examples: # - We don't have permissions to do something. @@ -189,13 +170,11 @@ class DBAPI2Module(Protocol): # - We tried to use a reserved name. # - We referred to a column that doesn't exist. @property - def ProgrammingError(self) -> Type[Exception]: - ... + def ProgrammingError(self) -> Type[Exception]: ... # Roughly: we've tried to do something that this database doesn't support. @property - def NotSupportedError(self) -> Type[Exception]: - ... + def NotSupportedError(self) -> Type[Exception]: ... # We originally wrote # def connect(self, *args, **kwargs) -> Connection: ... @@ -204,8 +183,7 @@ class DBAPI2Module(Protocol): # psycopg2.connect doesn't have a mandatory positional argument. Instead, we use # the following slightly unusual workaround. @property - def connect(self) -> Callable[..., Connection]: - ... + def connect(self) -> Callable[..., Connection]: ... __all__ = ["Cursor", "Connection", "DBAPI2Module"] diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index 423ede5969..6ec07e2d73 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -57,6 +57,7 @@ class EventInternalMetadata: (Added in synapse 0.99.0, so may be unreliable for events received before that) """ ... + def get_send_on_behalf_of(self) -> Optional[str]: """Whether this server should send the event on behalf of another server. This is used by the federation "send_join" API to forward the initial join @@ -65,6 +66,7 @@ class EventInternalMetadata: returns a str with the name of the server this event is sent on behalf of. """ ... + def need_to_check_redaction(self) -> bool: """Whether the redaction event needs to be rechecked when fetching from the database. @@ -76,6 +78,7 @@ class EventInternalMetadata: due to auth rules, then this will always return false. """ ... + def is_soft_failed(self) -> bool: """Whether the event has been soft failed. @@ -86,6 +89,7 @@ class EventInternalMetadata: therefore not to current state). """ ... + def should_proactively_send(self) -> bool: """Whether the event, if ours, should be sent to other clients and servers. @@ -94,6 +98,7 @@ class EventInternalMetadata: can still explicitly fetch the event. """ ... + def is_redacted(self) -> bool: """Whether the event has been redacted. @@ -101,6 +106,7 @@ class EventInternalMetadata: marked as redacted without needing to make another database call. """ ... + def is_notifiable(self) -> bool: """Whether this event can trigger a push notification""" ... diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index d3ee718375..a88982a04c 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -976,12 +976,12 @@ class StreamToken: return attr.evolve(self, **{key.value: new_value}) @overload - def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: - ... + def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: ... @overload - def get_field(self, key: Literal[StreamKeyType.RECEIPT]) -> MultiWriterStreamToken: - ... + def get_field( + self, key: Literal[StreamKeyType.RECEIPT] + ) -> MultiWriterStreamToken: ... @overload def get_field( @@ -995,14 +995,12 @@ class StreamToken: StreamKeyType.TYPING, StreamKeyType.UN_PARTIAL_STATED_ROOMS, ], - ) -> int: - ... + ) -> int: ... @overload def get_field( self, key: StreamKeyType - ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: - ... + ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: ... def get_field( self, key: StreamKeyType diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 914d4fd747..d50b5dd346 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -357,24 +357,21 @@ T4 = TypeVar("T4") @overload def gather_results( deferredList: Tuple[()], consumeErrors: bool = ... -) -> "defer.Deferred[Tuple[()]]": - ... +) -> "defer.Deferred[Tuple[()]]": ... @overload def gather_results( deferredList: Tuple["defer.Deferred[T1]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1]]": - ... +) -> "defer.Deferred[Tuple[T1]]": ... @overload def gather_results( deferredList: Tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2]]": - ... +) -> "defer.Deferred[Tuple[T1, T2]]": ... @overload @@ -383,8 +380,7 @@ def gather_results( "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]" ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3]]": - ... +) -> "defer.Deferred[Tuple[T1, T2, T3]]": ... @overload @@ -396,8 +392,7 @@ def gather_results( "defer.Deferred[T4]", ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": - ... +) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": ... def gather_results( # type: ignore[misc] @@ -782,18 +777,15 @@ def stop_cancellation(deferred: "defer.Deferred[T]") -> "defer.Deferred[T]": @overload -def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": - ... +def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": ... @overload -def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": - ... +def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": ... @overload -def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: - ... +def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: ... def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index a52ba59a34..462016f820 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -152,12 +152,10 @@ class ExpiringCache(Generic[KT, VT]): return key in self._cache @overload - def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: - ... + def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: ... @overload - def get(self, key: KT, default: T) -> Union[VT, T]: - ... + def get(self, key: KT, default: T) -> Union[VT, T]: ... def get(self, key: KT, default: Optional[T] = None) -> Union[VT, Optional[T]]: try: diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index a1b4f5b6a7..481a1a621e 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -580,8 +580,7 @@ class LruCache(Generic[KT, VT]): callbacks: Collection[Callable[[], None]] = ..., update_metrics: bool = ..., update_last_access: bool = ..., - ) -> Optional[VT]: - ... + ) -> Optional[VT]: ... @overload def cache_get( @@ -590,8 +589,7 @@ class LruCache(Generic[KT, VT]): callbacks: Collection[Callable[[], None]] = ..., update_metrics: bool = ..., update_last_access: bool = ..., - ) -> Union[T, VT]: - ... + ) -> Union[T, VT]: ... @synchronized def cache_get( @@ -634,16 +632,14 @@ class LruCache(Generic[KT, VT]): key: tuple, default: Literal[None] = None, update_metrics: bool = True, - ) -> Union[None, Iterable[Tuple[KT, VT]]]: - ... + ) -> Union[None, Iterable[Tuple[KT, VT]]]: ... @overload def cache_get_multi( key: tuple, default: T, update_metrics: bool = True, - ) -> Union[T, Iterable[Tuple[KT, VT]]]: - ... + ) -> Union[T, Iterable[Tuple[KT, VT]]]: ... @synchronized def cache_get_multi( @@ -728,12 +724,10 @@ class LruCache(Generic[KT, VT]): return value @overload - def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: - ... + def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: ... @overload - def cache_pop(key: KT, default: T) -> Union[T, VT]: - ... + def cache_pop(key: KT, default: T) -> Union[T, VT]: ... @synchronized def cache_pop(key: KT, default: Optional[T] = None) -> Union[None, T, VT]: diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py index 082ad8cedb..b73f690b88 100644 --- a/synapse/util/iterutils.py +++ b/synapse/util/iterutils.py @@ -50,8 +50,7 @@ class _SelfSlice(Sized, Protocol): returned. """ - def __getitem__(self: S, i: slice) -> S: - ... + def __getitem__(self: S, i: slice) -> S: ... def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T, ...]]: diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index dc9bddb00d..8ead72bb7a 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -177,9 +177,9 @@ class FederationRateLimiter: clock=clock, config=config, metrics_name=metrics_name ) - self.ratelimiters: DefaultDict[ - str, "_PerHostRatelimiter" - ] = collections.defaultdict(new_limiter) + self.ratelimiters: DefaultDict[str, "_PerHostRatelimiter"] = ( + collections.defaultdict(new_limiter) + ) with _rate_limiter_instances_lock: _rate_limiter_instances.add(self) diff --git a/synapse/visibility.py b/synapse/visibility.py index e58f649aaf..d1d478129f 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -129,9 +129,9 @@ async def filter_events_for_client( retention_policies: Dict[str, RetentionPolicy] = {} for room_id in room_ids: - retention_policies[ - room_id - ] = await storage.main.get_retention_policy_for_room(room_id) + retention_policies[room_id] = ( + await storage.main.get_retention_policy_for_room(room_id) + ) def allowed(event: EventBase) -> Optional[EventBase]: return _check_client_allowed_to_see_event( diff --git a/tests/replication/_base.py b/tests/replication/_base.py index d2220f8195..8437da1cdd 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -495,9 +495,9 @@ class FakeRedisPubSubServer: """A fake Redis server for pub/sub.""" def __init__(self) -> None: - self._subscribers_by_channel: Dict[ - bytes, Set["FakeRedisPubSubProtocol"] - ] = defaultdict(set) + self._subscribers_by_channel: Dict[bytes, Set["FakeRedisPubSubProtocol"]] = ( + defaultdict(set) + ) def add_subscriber(self, conn: "FakeRedisPubSubProtocol", channel: bytes) -> None: """A connection has called SUBSCRIBE""" diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index b11a73e92b..d2f2ded487 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1222,9 +1222,9 @@ class RoomJoinTestCase(RoomBase): """ # Register a dummy callback. Make it allow all room joins for now. - return_value: Union[ - Literal["NOT_SPAM"], Tuple[Codes, dict], Codes - ] = synapse.module_api.NOT_SPAM + return_value: Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes] = ( + synapse.module_api.NOT_SPAM + ) async def user_may_join_room( userid: str, @@ -1664,9 +1664,9 @@ class RoomMessagesTestCase(RoomBase): expected_fields: dict, ) -> None: class SpamCheck: - mock_return_value: Union[ - str, bool, Codes, Tuple[Codes, JsonDict], bool - ] = "NOT_SPAM" + mock_return_value: Union[str, bool, Codes, Tuple[Codes, JsonDict], bool] = ( + "NOT_SPAM" + ) mock_content: Optional[JsonDict] = None async def check_event_for_spam( diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py index 10cfe22d8e..daa68d78b9 100644 --- a/tests/rest/client/utils.py +++ b/tests/rest/client/utils.py @@ -87,8 +87,7 @@ class RestHelper: expect_code: Literal[200] = ..., extra_content: Optional[Dict] = ..., custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., - ) -> str: - ... + ) -> str: ... @overload def create_room_as( @@ -100,8 +99,7 @@ class RestHelper: expect_code: int = ..., extra_content: Optional[Dict] = ..., custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., - ) -> Optional[str]: - ... + ) -> Optional[str]: ... def create_room_as( self, diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index 249c6b39f7..d5b9996284 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -337,15 +337,15 @@ class CleanupExtremDummyEventsTestCase(HomeserverTestCase): """Simple test to ensure that _expire_rooms_to_exclude_from_dummy_event_insertion() expires old entries correctly. """ - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "1" - ] = 100000 - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "2" - ] = 200000 - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "3" - ] = 300000 + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["1"] = ( + 100000 + ) + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["2"] = ( + 200000 + ) + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["3"] = ( + 300000 + ) self.event_creator_handler._expire_rooms_to_exclude_from_dummy_event_insertion() # All entries within time frame diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index 01c5324802..1eab89f140 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -328,9 +328,11 @@ class MessageSearchTest(HomeserverTestCase): self.assertEqual( result["count"], 1 if expect_to_contain else 0, - f"expected '{query}' to match '{self.PHRASE}'" - if expect_to_contain - else f"'{query}' unexpectedly matched '{self.PHRASE}'", + ( + f"expected '{query}' to match '{self.PHRASE}'" + if expect_to_contain + else f"'{query}' unexpectedly matched '{self.PHRASE}'" + ), ) self.assertEqual( len(result["results"]), @@ -346,9 +348,11 @@ class MessageSearchTest(HomeserverTestCase): self.assertEqual( result["count"], 1 if expect_to_contain else 0, - f"expected '{query}' to match '{self.PHRASE}'" - if expect_to_contain - else f"'{query}' unexpectedly matched '{self.PHRASE}'", + ( + f"expected '{query}' to match '{self.PHRASE}'" + if expect_to_contain + else f"'{query}' unexpectedly matched '{self.PHRASE}'" + ), ) self.assertEqual( len(result["results"]), diff --git a/tests/unittest.py b/tests/unittest.py index 33c9a384ea..6fe0cd4a2d 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -109,8 +109,7 @@ class _TypedFailure(Generic[_ExcType], Protocol): """Extension to twisted.Failure, where the 'value' has a certain type.""" @property - def value(self) -> _ExcType: - ... + def value(self) -> _ExcType: ... def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]: diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index d4268bc2e2..7cbb1007da 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -34,8 +34,7 @@ from tests import unittest class UnblockFunction(Protocol): - def __call__(self, pump_reactor: bool = True) -> None: - ... + def __call__(self, pump_reactor: bool = True) -> None: ... class LinearizerTestCase(unittest.TestCase): diff --git a/tests/utils.py b/tests/utils.py index b5dbd60a9c..757320ebee 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -121,13 +121,11 @@ def setupdb() -> None: @overload -def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: - ... +def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: ... @overload -def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: - ... +def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: ... def default_config( From 6cb8839f67cdf5401555ba2aec1a7e23b32a7591 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:49:06 +0000 Subject: [PATCH 06/45] Bump log from 0.4.20 to 0.4.21 (#16977) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d41a498fc7..a7809b672f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -138,9 +138,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "memchr" From a91fb6cc0669cee4d2b01bebdd706b63d4d8e290 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:49:19 +0000 Subject: [PATCH 07/45] Bump serde from 1.0.196 to 1.0.197 (#16963) --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7809b672f..5399421090 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -339,18 +339,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", From 74fb3e1996b14fbe0ad4f5df1132cc11cf7edd55 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:49:54 +0000 Subject: [PATCH 08/45] Bump serde_json from 1.0.113 to 1.0.114 (#16961) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5399421090..4dd3ef29f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -359,9 +359,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", From 10e56b162f38c155b1333f9a63f32e2e9f6eda0e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:50:11 +0000 Subject: [PATCH 09/45] Bump cryptography from 41.0.7 to 42.0.5 (#16958) --- poetry.lock | 65 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 37 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6212961fb1..1a2eba0a29 100644 --- a/poetry.lock +++ b/poetry.lock @@ -465,47 +465,56 @@ files = [ [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] From f4e12ceb1fc2a02b2c3deed4530cea0a601ec4df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 16:50:47 +0000 Subject: [PATCH 10/45] Bump dawidd6/action-download-artifact from 3.1.1 to 3.1.2 (#16960) --- .github/workflows/docs-pr-netlify.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index c42077d3af..3fdf170f20 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@72aaadce3bc708349fc665eee3785cbb1b6e51d0 # v3.1.1 + uses: dawidd6/action-download-artifact@71072fbb1229e1317f1a8de6b04206afb461bd67 # v3.1.2 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} From e161103b46e9e85b95575ad12b34eb7ec570e0a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 17:05:57 +0000 Subject: [PATCH 11/45] Bump mypy from 1.5.1 to 1.8.0 (#16901) --- poetry.lock | 57 ++++++++++--------- synapse/handlers/auth.py | 4 +- synapse/handlers/sso.py | 2 +- synapse/logging/context.py | 6 +- .../callbacks/spamchecker_callbacks.py | 2 +- synapse/storage/database.py | 6 +- .../storage/databases/main/events_worker.py | 4 +- synapse/streams/events.py | 9 +-- synapse/util/async_helpers.py | 12 +--- synapse/util/caches/dictionary_cache.py | 2 +- synapse/util/caches/expiringcache.py | 2 +- tests/rest/client/test_filter.py | 2 +- 12 files changed, 49 insertions(+), 59 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1a2eba0a29..4b261315f5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1472,38 +1472,38 @@ files = [ [[package]] name = "mypy" -version = "1.5.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, - {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, - {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, - {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, - {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, - {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, - {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, - {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, - {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, - {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, - {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, - {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, - {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, - {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -1514,6 +1514,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f233f1b034..a1fab99f6b 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -2185,7 +2185,7 @@ class PasswordAuthProvider: # result is always the right type, but as it is 3rd party code it might not be if not isinstance(result, tuple) or len(result) != 2: - logger.warning( + logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" " Optional[Tuple[str, Optional[Callable]]]", callback, @@ -2248,7 +2248,7 @@ class PasswordAuthProvider: # result is always the right type, but as it is 3rd party code it might not be if not isinstance(result, tuple) or len(result) != 2: - logger.warning( + logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" " Optional[Tuple[str, Optional[Callable]]]", callback, diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 437cb5509c..8e39e76c97 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -150,7 +150,7 @@ class UserAttributes: display_name: Optional[str] = None picture: Optional[str] = None # mypy thinks these are incompatible for some reason. - emails: StrCollection = attr.Factory(list) # type: ignore[assignment] + emails: StrCollection = attr.Factory(list) @attr.s(slots=True, auto_attribs=True) diff --git a/synapse/logging/context.py b/synapse/logging/context.py index b36f397574..4650b60962 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -776,11 +776,7 @@ def run_in_background( ) -> "defer.Deferred[R]": ... -def run_in_background( # type: ignore[misc] - # The `type: ignore[misc]` above suppresses - # "Overloaded function implementation does not accept all possible arguments of signature 1" - # "Overloaded function implementation does not accept all possible arguments of signature 2" - # which seems like a bug in mypy. +def run_in_background( f: Union[ Callable[P, R], Callable[P, Awaitable[R]], diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 6ec56a7f14..17079ff781 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -455,7 +455,7 @@ class SpamCheckerModuleApiCallbacks: # mypy complains that we can't reach this code because of the # return type in CHECK_EVENT_FOR_SPAM_CALLBACK, but we don't know # for sure that the module actually returns it. - logger.warning( + logger.warning( # type: ignore[unreachable] "Module returned invalid value, rejecting message as spam" ) res = "This message has been rejected as probable spam" diff --git a/synapse/storage/database.py b/synapse/storage/database.py index d7d202f028..d9c85e411e 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -913,9 +913,9 @@ class DatabasePool: try: with opentracing.start_active_span(f"db.{desc}"): - result = await self.runWithConnection( + result: R = await self.runWithConnection( # mypy seems to have an issue with this, maybe a bug? - self.new_transaction, # type: ignore[arg-type] + self.new_transaction, desc, after_callbacks, async_after_callbacks, @@ -934,7 +934,7 @@ class DatabasePool: await async_callback(*async_args, **async_kwargs) for after_callback, after_args, after_kwargs in after_callbacks: after_callback(*after_args, **after_kwargs) - return cast(R, result) + return result except Exception: for exception_callback, after_args, after_kwargs in exception_callbacks: exception_callback(*after_args, **after_kwargs) diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 4c09567a7a..81fccfbccb 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -1876,7 +1876,7 @@ class EventsWorkerStore(SQLBaseStore): # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a # variadic tuple to a fixed length tuple and flags it up as an error. - for row in txn: # type: ignore[assignment] + for row in txn: new_event_updates.append((row[0], row[1:])) limited = False @@ -1903,7 +1903,7 @@ class EventsWorkerStore(SQLBaseStore): # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a # variadic tuple to a fixed length tuple and flags it up as an error. - for row in txn: # type: ignore[assignment] + for row in txn: new_event_updates.append((row[0], row[1:])) if len(new_event_updates) >= limit: diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 7466488157..dd7401ac8e 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -57,12 +57,13 @@ class _EventSourcesInner: class EventSources: def __init__(self, hs: "HomeServer"): self.sources = _EventSourcesInner( - # mypy previously warned that attribute.type is `Optional`, but we know it's + # attribute.type is `Optional`, but we know it's # never `None` here since all the attributes of `_EventSourcesInner` are # annotated. - # As of the stubs in attrs 22.1.0, `attr.fields()` now returns Any, - # so the call to `attribute.type` is not checked. - *(attribute.type(hs) for attribute in attr.fields(_EventSourcesInner)) + *( + attribute.type(hs) # type: ignore[misc] + for attribute in attr.fields(_EventSourcesInner) + ) ) self.store = hs.get_datastores().main self._instance_name = hs.get_instance_name() diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index d50b5dd346..70139beef2 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -284,15 +284,7 @@ async def yieldable_gather_results( try: return await make_deferred_yieldable( defer.gatherResults( - # type-ignore: mypy reports two errors: - # error: Argument 1 to "run_in_background" has incompatible type - # "Callable[[T, **P], Awaitable[R]]"; expected - # "Callable[[T, **P], Awaitable[R]]" [arg-type] - # error: Argument 2 to "run_in_background" has incompatible type - # "T"; expected "[T, **P.args]" [arg-type] - # The former looks like a mypy bug, and the latter looks like a - # false positive. - [run_in_background(func, item, *args, **kwargs) for item in iter], # type: ignore[arg-type] + [run_in_background(func, item, *args, **kwargs) for item in iter], consumeErrors=True, ) ) @@ -338,7 +330,7 @@ async def yieldable_gather_results_delaying_cancellation( return await make_deferred_yieldable( delay_cancellation( defer.gatherResults( - [run_in_background(func, item, *args, **kwargs) for item in iter], # type: ignore[arg-type] + [run_in_background(func, item, *args, **kwargs) for item in iter], consumeErrors=True, ) ) diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index 4245b7289c..1e6696332f 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -229,7 +229,7 @@ class DictionaryCache(Generic[KT, DKT, DV]): for dict_key in missing: # We explicitly add each dict key to the cache, so that cache hit # rates and LRU times for each key can be tracked separately. - value = entry.get(dict_key, _Sentinel.sentinel) # type: ignore[arg-type] + value = entry.get(dict_key, _Sentinel.sentinel) self.cache[(key, dict_key)] = _PerKeyValue(value) if value is not _Sentinel.sentinel: diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 462016f820..8017c031ee 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -142,7 +142,7 @@ class ExpiringCache(Generic[KT, VT]): return default if self.iterable: - self.metrics.inc_evictions(EvictionReason.invalidation, len(value.value)) # type: ignore[arg-type] + self.metrics.inc_evictions(EvictionReason.invalidation, len(value.value)) else: self.metrics.inc_evictions(EvictionReason.invalidation) diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py index 0a894ad081..9cfc6b224f 100644 --- a/tests/rest/client/test_filter.py +++ b/tests/rest/client/test_filter.py @@ -72,7 +72,7 @@ class FilterTestCase(unittest.HomeserverTestCase): def test_add_filter_non_local_user(self) -> None: _is_mine = self.hs.is_mine - self.hs.is_mine = lambda target_user: False # type: ignore[method-assign] + self.hs.is_mine = lambda target_user: False # type: ignore[assignment] channel = self.make_request( "POST", "/_matrix/client/r0/user/%s/filter" % (self.user_id), From 9b5eef95adbe00936f3c33522c42bcd7d50dd01b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 17:06:23 +0000 Subject: [PATCH 12/45] Bump ruff from 0.1.14 to 0.3.2 (#16994) --- poetry.lock | 38 ++++++++++++++++----------------- pyproject.toml | 2 +- synapse/synapse_rust/events.pyi | 7 ------ 3 files changed, 20 insertions(+), 27 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4b261315f5..9af88428b7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2441,28 +2441,28 @@ files = [ [[package]] name = "ruff" -version = "0.1.14" +version = "0.3.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, + {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"}, + {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"}, + {file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"}, + {file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"}, + {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"}, + {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"}, ] [[package]] @@ -3448,4 +3448,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "e4ca55af1dcb6b28b8064b7551008fd16f6cdfa9cb9bf90d18c6b47766b56ae6" +content-hash = "b510fa05f4ea33194bec079f5d04ebb3f9ffbb5c1ea96a0341d57ba770ef81e6" diff --git a/pyproject.toml b/pyproject.toml index f05546b1c1..f0f1112b4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -321,7 +321,7 @@ all = [ # This helps prevents merge conflicts when running a batch of dependabot updates. isort = ">=5.10.1" black = ">=22.7.0" -ruff = "0.1.14" +ruff = "0.3.2" # Type checking only works with the pydantic.v1 compat module from pydantic v2 pydantic = "^2" diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index 6ec07e2d73..69837617f5 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -56,7 +56,6 @@ class EventInternalMetadata: (Added in synapse 0.99.0, so may be unreliable for events received before that) """ - ... def get_send_on_behalf_of(self) -> Optional[str]: """Whether this server should send the event on behalf of another server. @@ -65,7 +64,6 @@ class EventInternalMetadata: returns a str with the name of the server this event is sent on behalf of. """ - ... def need_to_check_redaction(self) -> bool: """Whether the redaction event needs to be rechecked when fetching @@ -77,7 +75,6 @@ class EventInternalMetadata: If the sender of the redaction event is allowed to redact any event due to auth rules, then this will always return false. """ - ... def is_soft_failed(self) -> bool: """Whether the event has been soft failed. @@ -88,7 +85,6 @@ class EventInternalMetadata: 2. They should not be added to the forward extremities (and therefore not to current state). """ - ... def should_proactively_send(self) -> bool: """Whether the event, if ours, should be sent to other clients and @@ -97,7 +93,6 @@ class EventInternalMetadata: This is used for sending dummy events internally. Servers and clients can still explicitly fetch the event. """ - ... def is_redacted(self) -> bool: """Whether the event has been redacted. @@ -105,8 +100,6 @@ class EventInternalMetadata: This is used for efficiently checking whether an event has been marked as redacted without needing to make another database call. """ - ... def is_notifiable(self) -> bool: """Whether this event can trigger a push notification""" - ... From 92f20696273ca88c24d1dcdd738bd0f1d4be9e61 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 13 Mar 2024 17:21:37 +0000 Subject: [PATCH 13/45] Multi-worker-docker-container: disable log buffering (#16919) Background: we have a `matrixdotorg/synapse-workers` docker image, which is intended for running multiple workers within the same container. That image includes a `prefix-log` script which, for each line printed to stdout or stderr by one of the processes, prepends the name of the process. This commit disables buffering in that script, so that lines are logged quickly after they are printed. This makes it much easier to understand the output, since they then come out in a natural order. --- changelog.d/16919.misc | 1 + docker/prefix-log | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelog.d/16919.misc diff --git a/changelog.d/16919.misc b/changelog.d/16919.misc new file mode 100644 index 0000000000..2c76f25379 --- /dev/null +++ b/changelog.d/16919.misc @@ -0,0 +1 @@ +Multi-worker-docker-container: disable log buffering. diff --git a/docker/prefix-log b/docker/prefix-log index 0e26a4f19d..32dddbbfd4 100755 --- a/docker/prefix-log +++ b/docker/prefix-log @@ -7,6 +7,9 @@ # prefix-log command [args...] # -exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1) -exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2) +# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on +# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce +# confusion due to to interleaving of the different processes. +exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1) +exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2) exec "$@" From 1cc1d6b655b693fc50ed0aad490ba6de62015d4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Mar 2024 10:36:13 +0000 Subject: [PATCH 14/45] Bump pyo3 from 0.20.2 to 0.20.3 (#16962) --- Cargo.lock | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4dd3ef29f9..d2ddf5ab6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -186,6 +186,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "proc-macro2" version = "1.0.76" @@ -197,9 +203,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0" +checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" dependencies = [ "anyhow", "cfg-if", @@ -207,6 +213,7 @@ dependencies = [ "libc", "memoffset", "parking_lot", + "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", @@ -215,9 +222,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be" +checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" dependencies = [ "once_cell", "target-lexicon", @@ -225,9 +232,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1" +checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" dependencies = [ "libc", "pyo3-build-config", @@ -246,9 +253,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3" +checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -258,12 +265,13 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f" +checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" dependencies = [ "heck", "proc-macro2", + "pyo3-build-config", "quote", "syn", ] From a111ba02079275376d8fe3b3fbedabf83afcc098 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Mar 2024 10:36:21 +0000 Subject: [PATCH 15/45] Bump types-psycopg2 from 2.9.21.16 to 2.9.21.20240311 (#16995) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9af88428b7..b0ec52a2f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3117,13 +3117,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.16" +version = "2.9.21.20240311" description = "Typing stubs for psycopg2" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"}, - {file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"}, + {file = "types-psycopg2-2.9.21.20240311.tar.gz", hash = "sha256:722945dffa6a729bebc660f14137f37edfcead5a2c15eb234212a7d017ee8072"}, + {file = "types_psycopg2-2.9.21.20240311-py3-none-any.whl", hash = "sha256:2e137ae2b516ee0dbaab6f555086b6cfb723ba4389d67f551b0336adf4efcf1b"}, ] [[package]] From cb562d73aaa592c05b96672724ba296d02cee896 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Thu, 14 Mar 2024 14:49:54 +0100 Subject: [PATCH 16/45] Improve lock performance when a lot of locks are waiting (#16840) When a lot of locks are waiting for a single lock, notifying all locks independently with `call_later` on each release is really costly and incurs some kind of async contention, where the CPU is spinning a lot for not much. The included test is taking around 30s before the change, and 0.5s after. It was found following failing tests with https://github.com/element-hq/synapse/pull/16827. --- changelog.d/16840.misc | 1 + synapse/handlers/worker_lock.py | 13 +++++---- tests/handlers/test_worker_lock.py | 23 ++++++++++++++++ tests/utils.py | 42 +++++++++++++++++++++++++++++- 4 files changed, 73 insertions(+), 6 deletions(-) create mode 100644 changelog.d/16840.misc diff --git a/changelog.d/16840.misc b/changelog.d/16840.misc new file mode 100644 index 0000000000..1175e6de71 --- /dev/null +++ b/changelog.d/16840.misc @@ -0,0 +1 @@ +Improve lock performance when a lot of locks are all waiting for a single lock to be released. diff --git a/synapse/handlers/worker_lock.py b/synapse/handlers/worker_lock.py index a870fd1124..7e578cf462 100644 --- a/synapse/handlers/worker_lock.py +++ b/synapse/handlers/worker_lock.py @@ -182,12 +182,15 @@ class WorkerLocksHandler: if not locks: return - def _wake_deferred(deferred: defer.Deferred) -> None: - if not deferred.called: - deferred.callback(None) + def _wake_all_locks( + locks: Collection[Union[WaitingLock, WaitingMultiLock]] + ) -> None: + for lock in locks: + deferred = lock.deferred + if not deferred.called: + deferred.callback(None) - for lock in locks: - self._clock.call_later(0, _wake_deferred, lock.deferred) + self._clock.call_later(0, _wake_all_locks, locks) @wrap_as_background_process("_cleanup_locks") async def _cleanup_locks(self) -> None: diff --git a/tests/handlers/test_worker_lock.py b/tests/handlers/test_worker_lock.py index 3a4cf82094..6e9a15c8ee 100644 --- a/tests/handlers/test_worker_lock.py +++ b/tests/handlers/test_worker_lock.py @@ -27,6 +27,7 @@ from synapse.util import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase +from tests.utils import test_timeout class WorkerLockTestCase(unittest.HomeserverTestCase): @@ -50,6 +51,28 @@ class WorkerLockTestCase(unittest.HomeserverTestCase): self.get_success(d2) self.get_success(lock2.__aexit__(None, None, None)) + def test_lock_contention(self) -> None: + """Test lock contention when a lot of locks wait on a single worker""" + + # It takes around 0.5s on a 5+ years old laptop + with test_timeout(5): + nb_locks = 500 + d = self._take_locks(nb_locks) + self.assertEqual(self.get_success(d), nb_locks) + + async def _take_locks(self, nb_locks: int) -> int: + locks = [ + self.hs.get_worker_locks_handler().acquire_lock("test_lock", "") + for _ in range(nb_locks) + ] + + nb_locks_taken = 0 + for lock in locks: + async with lock: + nb_locks_taken += 1 + + return nb_locks_taken + class WorkerLockWorkersTestCase(BaseMultiWorkerStreamTestCase): def prepare( diff --git a/tests/utils.py b/tests/utils.py index 757320ebee..9fd26ef348 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,7 +21,20 @@ import atexit import os -from typing import Any, Callable, Dict, List, Tuple, Type, TypeVar, Union, overload +import signal +from types import FrameType, TracebackType +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) import attr from typing_extensions import Literal, ParamSpec @@ -379,3 +392,30 @@ def checked_cast(type: Type[T], x: object) -> T: """ assert isinstance(x, type) return x + + +class TestTimeout(Exception): + pass + + +class test_timeout: + def __init__(self, seconds: int, error_message: Optional[str] = None) -> None: + if error_message is None: + error_message = "test timed out after {}s.".format(seconds) + self.seconds = seconds + self.error_message = error_message + + def handle_timeout(self, signum: int, frame: Optional[FrameType]) -> None: + raise TestTimeout(self.error_message) + + def __enter__(self) -> None: + signal.signal(signal.SIGALRM, self.handle_timeout) + signal.alarm(self.seconds) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + signal.alarm(0) From 1c1b0bfa778e32bf740469b0a86be55102bc01b4 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 14 Mar 2024 13:53:25 +0000 Subject: [PATCH 17/45] Add query to update local cache of a remote user's device list to docs (#16892) --- changelog.d/16892.doc | 1 + docs/usage/administration/useful_sql_for_admins.md | 9 +++++++++ 2 files changed, 10 insertions(+) create mode 100644 changelog.d/16892.doc diff --git a/changelog.d/16892.doc b/changelog.d/16892.doc new file mode 100644 index 0000000000..dd82b49112 --- /dev/null +++ b/changelog.d/16892.doc @@ -0,0 +1 @@ +Add a query to force a refresh of a remote user's device list to the "Useful SQL for Admins" documentation page. \ No newline at end of file diff --git a/docs/usage/administration/useful_sql_for_admins.md b/docs/usage/administration/useful_sql_for_admins.md index 9f2cc9b957..41755cd3b6 100644 --- a/docs/usage/administration/useful_sql_for_admins.md +++ b/docs/usage/administration/useful_sql_for_admins.md @@ -205,3 +205,12 @@ SELECT user_id, device_id, user_agent, TO_TIMESTAMP(last_seen / 1000) AS "last_s FROM devices WHERE last_seen < DATE_PART('epoch', NOW() - INTERVAL '3 month') * 1000; ``` + +## Clear the cache of a remote user's device list + +Forces the resync of a remote user's device list - if you have somehow cached a bad state, and the remote server is +will not send out a device list update. +```sql +INSERT INTO device_lists_remote_resync +VALUES ('USER_ID', (EXTRACT(epoch FROM NOW()) * 1000)::BIGINT); +``` From acc2f00ecaab073abd8bc386d0b4f0d8b30606ef Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 14 Mar 2024 13:54:01 +0000 Subject: [PATCH 18/45] upgrade.md: fix grammatical errors (#16965) comma splice "rollback" is a noun --- changelog.d/16965.doc | 1 + docs/upgrade.md | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/16965.doc diff --git a/changelog.d/16965.doc b/changelog.d/16965.doc new file mode 100644 index 0000000000..36f8093298 --- /dev/null +++ b/changelog.d/16965.doc @@ -0,0 +1 @@ +Minor grammatical corrections to the upgrade documentation. diff --git a/docs/upgrade.md b/docs/upgrade.md index 640fed3ae3..e7247676d1 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -88,11 +88,11 @@ process, for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` -Generally Synapse database schemas are compatible across multiple versions, once -a version of Synapse is deployed you may not be able to rollback automatically. +Generally Synapse database schemas are compatible across multiple versions, but once +a version of Synapse is deployed you may not be able to roll back automatically. The following table gives the version ranges and the earliest version they can be rolled back to. E.g. Synapse versions v1.58.0 through v1.61.1 can be rolled -back safely to v1.57.0, but starting with v1.62.0 it is only safe to rollback to +back safely to v1.57.0, but starting with v1.62.0 it is only safe to roll back to v1.61.0. From 1198f649ea91bcc21ae5b596ad50bf1851de2589 Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Thu, 14 Mar 2024 15:18:51 +0000 Subject: [PATCH 19/45] Sort versions in the documentation version picker appropriately. (#16966) Fixes #16964 This adds a proper sorter for versions which takes into account semantic versions, rather than just relying on localeCompare. --- changelog.d/16966.doc | 1 + docs/website_files/version-picker.js | 26 +++++++++++++++++++++++--- 2 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 changelog.d/16966.doc diff --git a/changelog.d/16966.doc b/changelog.d/16966.doc new file mode 100644 index 0000000000..06f4093aee --- /dev/null +++ b/changelog.d/16966.doc @@ -0,0 +1 @@ +Fix the sort order for the documentation version picker, so that newer releases appear above older ones. diff --git a/docs/website_files/version-picker.js b/docs/website_files/version-picker.js index b6f35f29c7..3174b5d0bc 100644 --- a/docs/website_files/version-picker.js +++ b/docs/website_files/version-picker.js @@ -100,10 +100,30 @@ function sortVersions(a, b) { if (a === 'develop' || a === 'latest') return -1; if (b === 'develop' || b === 'latest') return 1; - const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0]; - const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0]; + // If any of the versions do not confrom to a semantic version string, they + // will be sorted behind a valid version. + const versionA = (a.match(/v(\d+(\.\d+)+)/) || [])[1]?.split('.') ?? ''; + const versionB = (b.match(/v(\d+(\.\d+)+)/) || [])[1]?.split('.') ?? ''; - return versionB.localeCompare(versionA); + for (let i = 0; i < Math.max(versionA.length, versionB.length); i++) { + if (versionB[i] === undefined) { + return -1; + } + if (versionA[i] === undefined) { + return 1; + } + + const partA = parseInt(versionA[i], 10); + const partB = parseInt(versionB[i], 10); + + if (partA > partB) { + return -1; + } else if (partB > partA) { + return 1; + } + } + + return 0; } /** From 6d5bafb2c8e8d750dede8446f665c05b52715b0a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 14 Mar 2024 17:18:48 +0000 Subject: [PATCH 20/45] Split up `SyncHandler.compute_state_delta` (#16929) This is a huge method, which melts my brain. This is a non-functional change which lays some groundwork for future work in this area. --- changelog.d/16929.misc | 2 + synapse/handlers/sync.py | 381 ++++++++++++++++++++++++--------------- 2 files changed, 238 insertions(+), 145 deletions(-) create mode 100644 changelog.d/16929.misc diff --git a/changelog.d/16929.misc b/changelog.d/16929.misc new file mode 100644 index 0000000000..9489784e4a --- /dev/null +++ b/changelog.d/16929.misc @@ -0,0 +1,2 @@ +Refactor state delta calculation in `/sync` handler. + diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 5bb8a1439d..08fe4eb3b3 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1014,30 +1014,6 @@ class SyncHandler: if event.is_state(): timeline_state[(event.type, event.state_key)] = event.event_id - if full_state: - # always make sure we LL ourselves so we know we're in the room - # (if we are) to fix https://github.com/vector-im/riot-web/issues/7209 - # We only need apply this on full state syncs given we disabled - # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840. - # We don't insert ourselves into `members_to_fetch`, because in some - # rare cases (an empty event batch with a now_token after the user's - # leave in a partial state room which another local user has - # joined), the room state will be missing our membership and there - # is no guarantee that our membership will be in the auth events of - # timeline events when the room is partial stated. - state_filter = StateFilter.from_lazy_load_member_list( - members_to_fetch.union((sync_config.user.to_string(),)) - ) - else: - state_filter = StateFilter.from_lazy_load_member_list( - members_to_fetch - ) - - # We are happy to use partial state to compute the `/sync` response. - # Since partial state may not include the lazy-loaded memberships we - # require, we fix up the state response afterwards with memberships from - # auth events. - await_full_state = False else: timeline_state = { (event.type, event.state_key): event.event_id @@ -1045,9 +1021,6 @@ class SyncHandler: if event.is_state() } - state_filter = StateFilter.all() - await_full_state = True - # Now calculate the state to return in the sync response for the room. # This is more or less the change in state between the end of the previous # sync's timeline and the start of the current sync's timeline. @@ -1057,132 +1030,29 @@ class SyncHandler: # whether the room is partial stated *before* fetching it. is_partial_state_room = await self.store.is_partial_state_room(room_id) if full_state: - if batch: - state_at_timeline_end = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[-1].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - - state_at_timeline_start = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[0].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - - else: - state_at_timeline_end = await self.get_state_at( - room_id, - stream_position=now_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - - state_at_timeline_start = state_at_timeline_end - - state_ids = _calculate_state( - timeline_contains=timeline_state, - timeline_start=state_at_timeline_start, - timeline_end=state_at_timeline_end, - previous_timeline_end={}, - lazy_load_members=lazy_load_members, + state_ids = await self._compute_state_delta_for_full_sync( + room_id, + sync_config.user, + batch, + now_token, + members_to_fetch, + timeline_state, ) - elif batch.limited: - if batch: - state_at_timeline_start = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[0].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - else: - # We can get here if the user has ignored the senders of all - # the recent events. - state_at_timeline_start = await self.get_state_at( - room_id, - stream_position=now_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - - # for now, we disable LL for gappy syncs - see - # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346 - # N.B. this slows down incr syncs as we are now processing way - # more state in the server than if we were LLing. - # - # We still have to filter timeline_start to LL entries (above) in order - # for _calculate_state's LL logic to work, as we have to include LL - # members for timeline senders in case they weren't loaded in the initial - # sync. We do this by (counterintuitively) by filtering timeline_start - # members to just be ones which were timeline senders, which then ensures - # all of the rest get included in the state block (if we need to know - # about them). - state_filter = StateFilter.all() - + else: # If this is an initial sync then full_state should be set, and # that case is handled above. We assert here to ensure that this # is indeed the case. assert since_token is not None - state_at_previous_sync = await self.get_state_at( + + state_ids = await self._compute_state_delta_for_incremental_sync( room_id, - stream_position=since_token, - state_filter=state_filter, - await_full_state=await_full_state, + batch, + since_token, + now_token, + members_to_fetch, + timeline_state, ) - if batch: - state_at_timeline_end = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[-1].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - else: - # We can get here if the user has ignored the senders of all - # the recent events. - state_at_timeline_end = await self.get_state_at( - room_id, - stream_position=now_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - - state_ids = _calculate_state( - timeline_contains=timeline_state, - timeline_start=state_at_timeline_start, - timeline_end=state_at_timeline_end, - previous_timeline_end=state_at_previous_sync, - # we have to include LL members in case LL initial sync missed them - lazy_load_members=lazy_load_members, - ) - else: - state_ids = {} - if lazy_load_members: - if members_to_fetch and batch.events: - # We're returning an incremental sync, with no - # "gap" since the previous sync, so normally there would be - # no state to return. - # But we're lazy-loading, so the client might need some more - # member events to understand the events in this timeline. - # So we fish out all the member events corresponding to the - # timeline here, and then dedupe any redundant ones below. - - state_ids = await self._state_storage_controller.get_state_ids_for_event( - batch.events[0].event_id, - # we only want members! - state_filter=StateFilter.from_types( - (EventTypes.Member, member) - for member in members_to_fetch - ), - await_full_state=False, - ) - # If we only have partial state for the room, `state_ids` may be missing the # memberships we wanted. We attempt to find some by digging through the auth # events of timeline events. @@ -1245,6 +1115,227 @@ class SyncHandler: if e.type != EventTypes.Aliases # until MSC2261 or alternative solution } + async def _compute_state_delta_for_full_sync( + self, + room_id: str, + syncing_user: UserID, + batch: TimelineBatch, + now_token: StreamToken, + members_to_fetch: Optional[Set[str]], + timeline_state: StateMap[str], + ) -> StateMap[str]: + """Calculate the state events to be included in a full sync response. + + As with `_compute_state_delta_for_incremental_sync`, the result will include + the membership events for the senders of each event in `members_to_fetch`. + + Args: + room_id: The room we are calculating for. + syncing_user: The user that is calling `/sync`. + batch: The timeline batch for the room that will be sent to the user. + now_token: Token of the end of the current batch. + members_to_fetch: If lazy-loading is enabled, the memberships needed for + events in the timeline. + timeline_state: The contribution to the room state from state events in + `batch`. Only contains the last event for any given state key. + + Returns: + A map from (type, state_key) to event_id, for each event that we believe + should be included in the `state` part of the sync response. + """ + if members_to_fetch is not None: + # Lazy-loading of membership events is enabled. + # + # Always make sure we load our own membership event so we know if + # we're in the room, to fix https://github.com/vector-im/riot-web/issues/7209. + # + # We only need apply this on full state syncs given we disabled + # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840. + # + # We don't insert ourselves into `members_to_fetch`, because in some + # rare cases (an empty event batch with a now_token after the user's + # leave in a partial state room which another local user has + # joined), the room state will be missing our membership and there + # is no guarantee that our membership will be in the auth events of + # timeline events when the room is partial stated. + state_filter = StateFilter.from_lazy_load_member_list( + members_to_fetch.union((syncing_user.to_string(),)) + ) + + # We are happy to use partial state to compute the `/sync` response. + # Since partial state may not include the lazy-loaded memberships we + # require, we fix up the state response afterwards with memberships from + # auth events. + await_full_state = False + lazy_load_members = True + else: + state_filter = StateFilter.all() + await_full_state = True + lazy_load_members = False + + if batch: + state_at_timeline_end = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[-1].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + + state_at_timeline_start = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[0].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + else: + state_at_timeline_end = await self.get_state_at( + room_id, + stream_position=now_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + state_at_timeline_start = state_at_timeline_end + + state_ids = _calculate_state( + timeline_contains=timeline_state, + timeline_start=state_at_timeline_start, + timeline_end=state_at_timeline_end, + previous_timeline_end={}, + lazy_load_members=lazy_load_members, + ) + return state_ids + + async def _compute_state_delta_for_incremental_sync( + self, + room_id: str, + batch: TimelineBatch, + since_token: StreamToken, + now_token: StreamToken, + members_to_fetch: Optional[Set[str]], + timeline_state: StateMap[str], + ) -> StateMap[str]: + """Calculate the state events to be included in an incremental sync response. + + If lazy-loading of membership events is enabled (as indicated by + `members_to_fetch` being not-`None`), the result will include the membership + events for each member in `members_to_fetch`. The caller + (`compute_state_delta`) is responsible for keeping track of which membership + events we have already sent to the client, and hence ripping them out. + + Args: + room_id: The room we are calculating for. + batch: The timeline batch for the room that will be sent to the user. + since_token: Token of the end of the previous batch. + now_token: Token of the end of the current batch. + members_to_fetch: If lazy-loading is enabled, the memberships needed for + events in the timeline. Otherwise, `None`. + timeline_state: The contribution to the room state from state events in + `batch`. Only contains the last event for any given state key. + + Returns: + A map from (type, state_key) to event_id, for each event that we believe + should be included in the `state` part of the sync response. + """ + if members_to_fetch is not None: + # Lazy-loading is enabled. Only return the state that is needed. + state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch) + await_full_state = False + lazy_load_members = True + else: + state_filter = StateFilter.all() + await_full_state = True + lazy_load_members = False + + if batch.limited: + if batch: + state_at_timeline_start = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[0].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + else: + # We can get here if the user has ignored the senders of all + # the recent events. + state_at_timeline_start = await self.get_state_at( + room_id, + stream_position=now_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + # for now, we disable LL for gappy syncs - see + # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346 + # N.B. this slows down incr syncs as we are now processing way + # more state in the server than if we were LLing. + # + # We still have to filter timeline_start to LL entries (above) in order + # for _calculate_state's LL logic to work, as we have to include LL + # members for timeline senders in case they weren't loaded in the initial + # sync. We do this by (counterintuitively) by filtering timeline_start + # members to just be ones which were timeline senders, which then ensures + # all of the rest get included in the state block (if we need to know + # about them). + state_filter = StateFilter.all() + + state_at_previous_sync = await self.get_state_at( + room_id, + stream_position=since_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + if batch: + state_at_timeline_end = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[-1].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + else: + # We can get here if the user has ignored the senders of all + # the recent events. + state_at_timeline_end = await self.get_state_at( + room_id, + stream_position=now_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + state_ids = _calculate_state( + timeline_contains=timeline_state, + timeline_start=state_at_timeline_start, + timeline_end=state_at_timeline_end, + previous_timeline_end=state_at_previous_sync, + lazy_load_members=lazy_load_members, + ) + else: + state_ids = {} + if lazy_load_members: + if members_to_fetch and batch.events: + # We're returning an incremental sync, with no + # "gap" since the previous sync, so normally there would be + # no state to return. + # But we're lazy-loading, so the client might need some more + # member events to understand the events in this timeline. + # So we fish out all the member events corresponding to the + # timeline here. The caller will then dedupe any redundant ones. + + state_ids = await self._state_storage_controller.get_state_ids_for_event( + batch.events[0].event_id, + # we only want members! + state_filter=StateFilter.from_types( + (EventTypes.Member, member) for member in members_to_fetch + ), + await_full_state=False, + ) + return state_ids + async def _find_missing_partial_state_memberships( self, room_id: str, From 52f456a8228eaf4d6cc75c0514788e8507975035 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 14 Mar 2024 17:34:19 +0000 Subject: [PATCH 21/45] `/sync`: Fix edge-case in calculating the "device_lists" response (#16949) Fixes https://github.com/element-hq/synapse/issues/16948. If the `join` and the `leave` are in the same sync response, we need to count them as a "left" user. --- changelog.d/16949.bugfix | 1 + synapse/handlers/sync.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 changelog.d/16949.bugfix diff --git a/changelog.d/16949.bugfix b/changelog.d/16949.bugfix new file mode 100644 index 0000000000..99ed435d75 --- /dev/null +++ b/changelog.d/16949.bugfix @@ -0,0 +1 @@ +Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 08fe4eb3b3..0aedb37f16 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -2837,7 +2837,7 @@ class SyncResultBuilder: if self.since_token: for joined_sync in self.joined: it = itertools.chain( - joined_sync.timeline.events, joined_sync.state.values() + joined_sync.state.values(), joined_sync.timeline.events ) for event in it: if event.type == EventTypes.Member: @@ -2849,13 +2849,20 @@ class SyncResultBuilder: newly_joined_or_invited_or_knocked_users.add( event.state_key ) + # If the user left and rejoined in the same batch, they + # count as a newly-joined user, *not* a newly-left user. + newly_left_users.discard(event.state_key) else: prev_content = event.unsigned.get("prev_content", {}) prev_membership = prev_content.get("membership", None) if prev_membership == Membership.JOIN: newly_left_users.add(event.state_key) + # If the user joined and left in the same batch, they + # count as a newly-left user, not a newly-joined user. + newly_joined_or_invited_or_knocked_users.discard( + event.state_key + ) - newly_left_users -= newly_joined_or_invited_or_knocked_users return newly_joined_or_invited_or_knocked_users, newly_left_users From 0b4dc4de7cd92180074ccf32ed4a53b7fa330aad Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 19 Mar 2024 12:24:41 +0000 Subject: [PATCH 22/45] 1.103.0 --- CHANGES.md | 7 +++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index f0c23e693e..c0679159df 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,10 @@ +# Synapse 1.103.0 (2024-03-19) + +No significant changes since 1.103.0rc1. + + + + # Synapse 1.103.0rc1 (2024-03-12) ### Features diff --git a/debian/changelog b/debian/changelog index ac128e535d..c718dec7b0 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.103.0) stable; urgency=medium + + * New Synapse release 1.103.0. + + -- Synapse Packaging team Tue, 19 Mar 2024 12:24:36 +0000 + matrix-synapse-py3 (1.103.0~rc1) stable; urgency=medium * New Synapse release 1.103.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 2bda9596f6..d0b119fab1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.103.0rc1" +version = "1.103.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 9635822cc11aa2fb132af88db98280df30be9756 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:16:37 +0000 Subject: [PATCH 23/45] Clarify docs for some room state functions (#16950) State *before* an event is different to state *after* that event, and people tend to assume the wrong one. --- changelog.d/16950.misc | 1 + synapse/storage/controllers/state.py | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 changelog.d/16950.misc diff --git a/changelog.d/16950.misc b/changelog.d/16950.misc new file mode 100644 index 0000000000..c5a0741c3b --- /dev/null +++ b/changelog.d/16950.misc @@ -0,0 +1 @@ +Clarify docs for some room state functions. diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 22d93a561c..f9eced23bf 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -273,8 +273,10 @@ class StateStorageController: await_full_state: bool = True, ) -> Dict[str, StateMap[str]]: """ - Get the state dicts corresponding to a list of events, containing the event_ids - of the state events (as opposed to the events themselves) + Get the room states after each of a list of events. + + For each event in `event_ids`, the result contains a map from state tuple + to the event_ids of the state event (as opposed to the events themselves). Args: event_ids: events whose state should be returned @@ -347,7 +349,7 @@ class StateStorageController: await_full_state: bool = True, ) -> StateMap[str]: """ - Get the state dict corresponding to a particular event + Get the state dict corresponding to the state after a particular event Args: event_id: event whose state should be returned From 05489d89c6ded7825cc7547e601390c2df78fb21 Mon Sep 17 00:00:00 2001 From: V02460 Date: Tue, 19 Mar 2024 18:19:12 +0100 Subject: [PATCH 24/45] Specify IP subnet literals in canonical form (#16953) This is needed, because the netaddr package removed support for the implicit prefix form in version 1.0.0: https://github.com/netaddr/netaddr/pull/360 --- changelog.d/16953.misc | 1 + tests/api/test_auth.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/16953.misc diff --git a/changelog.d/16953.misc b/changelog.d/16953.misc new file mode 100644 index 0000000000..81abcc3df0 --- /dev/null +++ b/changelog.d/16953.misc @@ -0,0 +1 @@ +Specify IP subnets in canonical form. diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index ce85ddf22a..bd229cf7e9 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -128,7 +128,7 @@ class AuthTestCase(unittest.HomeserverTestCase): token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=IPSet(["192.168/16"]), + ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -147,7 +147,7 @@ class AuthTestCase(unittest.HomeserverTestCase): token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=IPSet(["192.168/16"]), + ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) From 74ab329eaa50348d3ff65fc97d7fbc9cd9773311 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Tue, 19 Mar 2024 18:20:10 +0100 Subject: [PATCH 25/45] Pass module API to OIDC mapping provider (#16974) As done for SAML mapping provider, let's pass the module API to the OIDC one so the mapper can do more logic in its code. --- changelog.d/16974.misc | 1 + docs/sso_mapping_providers.md | 4 +++- synapse/handlers/oidc.py | 17 ++++++++++++++--- 3 files changed, 18 insertions(+), 4 deletions(-) create mode 100644 changelog.d/16974.misc diff --git a/changelog.d/16974.misc b/changelog.d/16974.misc new file mode 100644 index 0000000000..bf0a13786c --- /dev/null +++ b/changelog.d/16974.misc @@ -0,0 +1 @@ +As done for SAML mapping provider, let's pass the module API to the OIDC one so the mapper can do more logic in its code. diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md index 77cc02c541..10c695029f 100644 --- a/docs/sso_mapping_providers.md +++ b/docs/sso_mapping_providers.md @@ -50,11 +50,13 @@ comment these options out and use those specified by the module instead. A custom mapping provider must specify the following methods: -* `def __init__(self, parsed_config)` +* `def __init__(self, parsed_config, module_api)` - Arguments: - `parsed_config` - A configuration object that is the return value of the `parse_config` method. You should set any configuration options needed by the module here. + - `module_api` - a `synapse.module_api.ModuleApi` object which provides the + stable API available for extension modules. * `def parse_config(config)` - This method should have the `@staticmethod` decoration. - Arguments: diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index fe13d82b66..ba67cc4768 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -65,6 +65,7 @@ from synapse.http.server import finish_request from synapse.http.servlet import parse_string from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable +from synapse.module_api import ModuleApi from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart from synapse.util import Clock, json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall @@ -421,9 +422,19 @@ class OidcProvider: # from the IdP's jwks_uri, if required. self._jwks = RetryOnExceptionCachedCall(self._load_jwks) - self._user_mapping_provider = provider.user_mapping_provider_class( - provider.user_mapping_provider_config + user_mapping_provider_init_method = ( + provider.user_mapping_provider_class.__init__ ) + if len(inspect.signature(user_mapping_provider_init_method).parameters) == 3: + self._user_mapping_provider = provider.user_mapping_provider_class( + provider.user_mapping_provider_config, + ModuleApi(hs, hs.get_auth_handler()), + ) + else: + self._user_mapping_provider = provider.user_mapping_provider_class( + provider.user_mapping_provider_config, + ) + self._skip_verification = provider.skip_verification self._allow_existing_users = provider.allow_existing_users @@ -1583,7 +1594,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): This is the default mapping provider. """ - def __init__(self, config: JinjaOidcMappingConfig): + def __init__(self, config: JinjaOidcMappingConfig, module_api: ModuleApi): self._config = config @staticmethod From f768e028c1acdf6aa3eb77984734a3defa0a49a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:45:15 +0000 Subject: [PATCH 26/45] Bump types-pyopenssl from 23.3.0.0 to 24.0.0.20240311 (#17003) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index b0ec52a2f0..23879db8e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3128,13 +3128,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.3.0.0" +version = "24.0.0.20240311" description = "Typing stubs for pyOpenSSL" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, - {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, + {file = "types-pyOpenSSL-24.0.0.20240311.tar.gz", hash = "sha256:7bca00cfc4e7ef9c5d2663c6a1c068c35798e59670595439f6296e7ba3d58083"}, + {file = "types_pyOpenSSL-24.0.0.20240311-py3-none-any.whl", hash = "sha256:6e8e8bfad34924067333232c93f7fc4b369856d8bea0d5c9d1808cb290ab1972"}, ] [package.dependencies] From 3e89afdef7cd8610324a554defcc0a94bf6bc4c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:45:23 +0000 Subject: [PATCH 27/45] Bump jinja2 from 3.1.2 to 3.1.3 (#17005) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 23879db8e9..eee863a099 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1001,13 +1001,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] From 77317cecc77cde35d9a98bc4fd3b6044964b74fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:45:41 +0000 Subject: [PATCH 28/45] Bump anyhow from 1.0.80 to 1.0.81 (#17009) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d2ddf5ab6b..b2af044054 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.80" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "arc-swap" From 77b824008c8f6d3ae9ea8eac938af657961ec670 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:45:56 +0000 Subject: [PATCH 29/45] Bump pydantic from 2.6.0 to 2.6.4 (#17004) --- poetry.lock | 168 ++++++++++++++++++++++++++-------------------------- 1 file changed, 84 insertions(+), 84 deletions(-) diff --git a/poetry.lock b/poetry.lock index eee863a099..bee0b98057 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1870,18 +1870,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.0" +version = "2.6.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, - {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.1" +pydantic-core = "2.16.3" typing-extensions = ">=4.6.1" [package.extras] @@ -1889,90 +1889,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.1" +version = "2.16.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, - {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, - {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, - {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, - {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, - {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, - {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, - {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, - {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, - {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, - {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, - {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, - {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, - {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [package.dependencies] From 8fb5b0f335b3dc54962aea102c71a7e449497487 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 19 Mar 2024 10:52:53 -0700 Subject: [PATCH 30/45] Improve event validation (#16908) As the title states. --- changelog.d/16908.misc | 1 + synapse/api/constants.py | 2 + synapse/handlers/message.py | 13 ++++ synapse/handlers/sync.py | 12 +++- tests/handlers/test_message.py | 40 ++++++++++++ tests/handlers/test_sync.py | 115 ++++++++++++++++++++++++++++++++- 6 files changed, 180 insertions(+), 3 deletions(-) create mode 100644 changelog.d/16908.misc diff --git a/changelog.d/16908.misc b/changelog.d/16908.misc new file mode 100644 index 0000000000..d13c59aa35 --- /dev/null +++ b/changelog.d/16908.misc @@ -0,0 +1 @@ +Improve event validation (#16908). \ No newline at end of file diff --git a/synapse/api/constants.py b/synapse/api/constants.py index d25aff98ff..98884b4967 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -129,6 +129,8 @@ class EventTypes: Reaction: Final = "m.reaction" + CallInvite: Final = "m.call.invite" + class ToDeviceEventTypes: RoomKeyRequest: Final = "m.room_key_request" diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 0ce6eeee15..ccaa5508ff 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -34,6 +34,7 @@ from synapse.api.constants import ( EventTypes, GuestAccess, HistoryVisibility, + JoinRules, Membership, RelationTypes, UserTypes, @@ -1325,6 +1326,18 @@ class EventCreationHandler: self.validator.validate_new(event, self.config) await self._validate_event_relation(event) + + if event.type == EventTypes.CallInvite: + room_id = event.room_id + room_info = await self.store.get_room_with_stats(room_id) + assert room_info is not None + + if room_info.join_rules == JoinRules.PUBLIC: + raise SynapseError( + 403, + "Call invites are not allowed in public rooms.", + Codes.FORBIDDEN, + ) logger.debug("Created event %s", event.event_id) return event, context diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 0aedb37f16..3aa2e2b7ba 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -41,6 +41,7 @@ from synapse.api.constants import ( AccountDataTypes, EventContentFields, EventTypes, + JoinRules, Membership, ) from synapse.api.filtering import FilterCollection @@ -675,13 +676,22 @@ class SyncHandler: ) ) - loaded_recents = await filter_events_for_client( + filtered_recents = await filter_events_for_client( self._storage_controllers, sync_config.user.to_string(), loaded_recents, always_include_ids=current_state_ids, ) + loaded_recents = [] + for event in filtered_recents: + if event.type == EventTypes.CallInvite: + room_info = await self.store.get_room_with_stats(event.room_id) + assert room_info is not None + if room_info.join_rules == JoinRules.PUBLIC: + continue + loaded_recents.append(event) + log_kv({"loaded_recents_after_client_filtering": len(loaded_recents)}) loaded_recents.extend(recents) diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 0ee5eee385..76ab83d1f7 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -24,6 +24,7 @@ from typing import Tuple from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import EventTypes +from synapse.api.errors import SynapseError from synapse.events import EventBase from synapse.events.snapshot import EventContext, UnpersistedEventContextBase from synapse.rest import admin @@ -51,11 +52,15 @@ class EventCreationTestCase(unittest.HomeserverTestCase): persistence = self.hs.get_storage_controllers().persistence assert persistence is not None self._persist_event_storage_controller = persistence + self.store = self.hs.get_datastores().main self.user_id = self.register_user("tester", "foobar") device_id = "dev-1" access_token = self.login("tester", "foobar", device_id=device_id) self.room_id = self.helper.create_room_as(self.user_id, tok=access_token) + self.private_room_id = self.helper.create_room_as( + self.user_id, tok=access_token, extra_content={"preset": "private_chat"} + ) self.requester = create_requester(self.user_id, device_id=device_id) @@ -285,6 +290,41 @@ class EventCreationTestCase(unittest.HomeserverTestCase): AssertionError, ) + def test_call_invite_event_creation_fails_in_public_room(self) -> None: + # get prev_events for room + prev_events = self.get_success( + self.store.get_prev_events_for_room(self.room_id) + ) + + # the invite in a public room should fail + self.get_failure( + self.handler.create_event( + self.requester, + { + "type": EventTypes.CallInvite, + "room_id": self.room_id, + "sender": self.requester.user.to_string(), + }, + prev_event_ids=prev_events, + auth_event_ids=prev_events, + ), + SynapseError, + ) + + # but a call invite in a private room should succeed + self.get_success( + self.handler.create_event( + self.requester, + { + "type": EventTypes.CallInvite, + "room_id": self.private_room_id, + "sender": self.requester.user.to_string(), + }, + prev_event_ids=prev_events, + auth_event_ids=prev_events, + ) + ) + class ServerAclValidationTestCase(unittest.HomeserverTestCase): servlets = [ diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 37904926e3..1b36324b8f 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Optional +from typing import Collection, List, Optional from unittest.mock import AsyncMock, Mock, patch from twisted.test.proto_helpers import MemoryReactor @@ -25,7 +25,10 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import EventTypes, JoinRules from synapse.api.errors import Codes, ResourceLimitError from synapse.api.filtering import Filtering -from synapse.api.room_versions import RoomVersions +from synapse.api.room_versions import RoomVersion, RoomVersions +from synapse.events import EventBase +from synapse.events.snapshot import EventContext +from synapse.federation.federation_base import event_from_pdu_json from synapse.handlers.sync import SyncConfig, SyncResult from synapse.rest import admin from synapse.rest.client import knock, login, room @@ -285,6 +288,114 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ) self.assertEqual(eve_initial_sync_after_join.joined, []) + def test_call_invite_in_public_room_not_returned(self) -> None: + user = self.register_user("alice", "password") + tok = self.login(user, "password") + room_id = self.helper.create_room_as(user, is_public=True, tok=tok) + self.handler = self.hs.get_federation_handler() + federation_event_handler = self.hs.get_federation_event_handler() + + async def _check_event_auth( + origin: Optional[str], event: EventBase, context: EventContext + ) -> None: + pass + + federation_event_handler._check_event_auth = _check_event_auth # type: ignore[method-assign] + self.client = self.hs.get_federation_client() + + async def _check_sigs_and_hash_for_pulled_events_and_fetch( + dest: str, pdus: Collection[EventBase], room_version: RoomVersion + ) -> List[EventBase]: + return list(pdus) + + self.client._check_sigs_and_hash_for_pulled_events_and_fetch = _check_sigs_and_hash_for_pulled_events_and_fetch # type: ignore[assignment] + + prev_events = self.get_success(self.store.get_prev_events_for_room(room_id)) + + # create a call invite event + call_event = event_from_pdu_json( + { + "type": EventTypes.CallInvite, + "content": {}, + "room_id": room_id, + "sender": user, + "depth": 32, + "prev_events": prev_events, + "auth_events": prev_events, + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V10, + ) + + self.assertEqual( + self.get_success( + federation_event_handler.on_receive_pdu("test.serv", call_event) + ), + None, + ) + + # check that it is in DB + recent_event = self.get_success(self.store.get_prev_events_for_room(room_id)) + self.assertIn(call_event.event_id, recent_event) + + # but that it does not come down /sync in public room + sync_result: SyncResult = self.get_success( + self.sync_handler.wait_for_sync_for_user( + create_requester(user), generate_sync_config(user) + ) + ) + event_ids = [] + for event in sync_result.joined[0].timeline.events: + event_ids.append(event.event_id) + self.assertNotIn(call_event.event_id, event_ids) + + # it will come down in a private room, though + user2 = self.register_user("bob", "password") + tok2 = self.login(user2, "password") + private_room_id = self.helper.create_room_as( + user2, is_public=False, tok=tok2, extra_content={"preset": "private_chat"} + ) + + priv_prev_events = self.get_success( + self.store.get_prev_events_for_room(private_room_id) + ) + private_call_event = event_from_pdu_json( + { + "type": EventTypes.CallInvite, + "content": {}, + "room_id": private_room_id, + "sender": user, + "depth": 32, + "prev_events": priv_prev_events, + "auth_events": priv_prev_events, + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V10, + ) + + self.assertEqual( + self.get_success( + federation_event_handler.on_receive_pdu("test.serv", private_call_event) + ), + None, + ) + + recent_events = self.get_success( + self.store.get_prev_events_for_room(private_room_id) + ) + self.assertIn(private_call_event.event_id, recent_events) + + private_sync_result: SyncResult = self.get_success( + self.sync_handler.wait_for_sync_for_user( + create_requester(user2), generate_sync_config(user2) + ) + ) + priv_event_ids = [] + for event in private_sync_result.joined[0].timeline.events: + priv_event_ids.append(event.event_id) + + self.assertIn(private_call_event.event_id, priv_event_ids) + _request_key = 0 From cf5adc80e1a362eb651abdb6d84b2397434a7f45 Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 19 Mar 2024 10:55:31 -0700 Subject: [PATCH 31/45] Update power level default for public rooms (#16907) --- changelog.d/16907.misc | 1 + synapse/handlers/room.py | 2 +- tests/rest/client/test_rooms.py | 18 ++++++++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 changelog.d/16907.misc diff --git a/changelog.d/16907.misc b/changelog.d/16907.misc new file mode 100644 index 0000000000..e3a7fad447 --- /dev/null +++ b/changelog.d/16907.misc @@ -0,0 +1 @@ +Update power level default for public rooms (#16907). \ No newline at end of file diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3278426ca3..8b5ffb135e 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -151,7 +151,7 @@ class RoomCreationHandler: "history_visibility": HistoryVisibility.SHARED, "original_invitees_have_ops": False, "guest_can_join": False, - "power_level_content_override": {}, + "power_level_content_override": {EventTypes.CallInvite: 50}, }, } diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index d2f2ded487..1364615085 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -93,6 +93,7 @@ class RoomPermissionsTestCase(RoomBase): rmcreator_id = "@notme:red" def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store_controllers = hs.get_storage_controllers() self.helper.auth_user_id = self.rmcreator_id # create some rooms under the name rmcreator_id self.uncreated_rmid = "!aa:test" @@ -482,6 +483,23 @@ class RoomPermissionsTestCase(RoomBase): expect_code=HTTPStatus.OK, ) + def test_default_call_invite_power_level(self) -> None: + pl_event = self.get_success( + self.store_controllers.state.get_current_state_event( + self.created_public_rmid, EventTypes.PowerLevels, "" + ) + ) + assert pl_event is not None + self.assertEqual(50, pl_event.content.get("m.call.invite")) + + private_pl_event = self.get_success( + self.store_controllers.state.get_current_state_event( + self.created_rmid, EventTypes.PowerLevels, "" + ) + ) + assert private_pl_event is not None + self.assertEqual(None, private_pl_event.content.get("m.call.invite")) + class RoomStateTestCase(RoomBase): """Tests /rooms/$room_id/state.""" From 21daa56ee15b6b68c05f827b221f9b00004d040b Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 21 Mar 2024 08:50:51 -0500 Subject: [PATCH 32/45] Prevent `start_for_complement.sh` from setting `START_POSTGRES` to `false` when it's already set (#16985) I have a use case where I'd like the Synapse image to start up a postgres instance that I can use, but don't want to force Synapse to use postgres as well. This commit prevents postgres from being started when it has already been explicitly enabled elsewhere. --- changelog.d/16985.misc | 1 + docker/complement/conf/postgres.supervisord.conf | 2 +- docker/complement/conf/start_for_complement.sh | 5 +++-- 3 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog.d/16985.misc diff --git a/changelog.d/16985.misc b/changelog.d/16985.misc new file mode 100644 index 0000000000..34d1337a28 --- /dev/null +++ b/changelog.d/16985.misc @@ -0,0 +1 @@ +Allow containers building on top of Synapse's Complement container is use the included PostgreSQL cluster. diff --git a/docker/complement/conf/postgres.supervisord.conf b/docker/complement/conf/postgres.supervisord.conf index b88bfc772e..657845dfdb 100644 --- a/docker/complement/conf/postgres.supervisord.conf +++ b/docker/complement/conf/postgres.supervisord.conf @@ -1,7 +1,7 @@ [program:postgres] command=/usr/local/bin/prefix-log gosu postgres postgres -# Only start if START_POSTGRES=1 +# Only start if START_POSTGRES=true autostart=%(ENV_START_POSTGRES)s # Lower priority number = starts first diff --git a/docker/complement/conf/start_for_complement.sh b/docker/complement/conf/start_for_complement.sh index 7b012ce8ab..cc798a3210 100755 --- a/docker/complement/conf/start_for_complement.sh +++ b/docker/complement/conf/start_for_complement.sh @@ -32,8 +32,9 @@ case "$SYNAPSE_COMPLEMENT_DATABASE" in ;; sqlite|"") - # Configure supervisord not to start Postgres, as we don't need it - export START_POSTGRES=false + # Set START_POSTGRES to false unless it has already been set + # (i.e. by another container image inheriting our own). + export START_POSTGRES=${START_POSTGRES:-false} ;; *) From 70a86f69c24f82d81137636b2a40530978734123 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 16:53:51 +0000 Subject: [PATCH 33/45] Bump types-jsonschema from 4.21.0.20240118 to 4.21.0.20240311 (#17007) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index bee0b98057..ecb291d207 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3070,13 +3070,13 @@ files = [ [[package]] name = "types-jsonschema" -version = "4.21.0.20240118" +version = "4.21.0.20240311" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.21.0.20240118.tar.gz", hash = "sha256:31aae1b5adc0176c1155c2d4f58348b22d92ae64315e9cc83bd6902168839232"}, - {file = "types_jsonschema-4.21.0.20240118-py3-none-any.whl", hash = "sha256:77a4ac36b0be4f24274d5b9bf0b66208ee771c05f80e34c4641de7d63e8a872d"}, + {file = "types-jsonschema-4.21.0.20240311.tar.gz", hash = "sha256:f7165ce70abd91df490c73b089873afd2899c5e56430ee495b64f851ad01f287"}, + {file = "types_jsonschema-4.21.0.20240311-py3-none-any.whl", hash = "sha256:e872f5661513824edf9698f73a66c9c114713d93eab58699bd0532e7e6db5750"}, ] [package.dependencies] From 159536d5257fd7cab874878fa271c71cc9f29ade Mon Sep 17 00:00:00 2001 From: Eirik Date: Thu, 21 Mar 2024 18:05:52 +0100 Subject: [PATCH 34/45] Update link, in installation guide, for docker hub synapse images (#17001) --- docs/setup/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 324cdc67b2..9126874d44 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -26,7 +26,7 @@ for most users. #### Docker images and Ansible playbooks There is an official synapse image available at - or at [`ghcr.io/element-hq/synapse`](https://ghcr.io/element-hq/synapse) + or at [`ghcr.io/element-hq/synapse`](https://ghcr.io/element-hq/synapse) which can be used with the docker-compose file available at [contrib/docker](https://github.com/element-hq/synapse/tree/develop/contrib/docker). Further information on this including configuration options is available in the README From 6cf23febb998db1c7857ee87cbd2ddaac94700fc Mon Sep 17 00:00:00 2001 From: grahhnt <46821216+grahhnt@users.noreply.github.com> Date: Thu, 21 Mar 2024 11:07:21 -0600 Subject: [PATCH 35/45] Add note to using --curses under sqlite porting (#17012) --- docs/postgres.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/postgres.md b/docs/postgres.md index ad7c6a0738..921bae9877 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -182,7 +182,7 @@ synapse_port_db --sqlite-database homeserver.db.snapshot \ --postgres-config homeserver-postgres.yaml ``` -The flag `--curses` displays a coloured curses progress UI. +The flag `--curses` displays a coloured curses progress UI. (NOTE: if your terminal is too small the script will error out) If the script took a long time to complete, or time has otherwise passed since the original snapshot was taken, repeat the previous steps with a From 5a59c68b3dba307e6ccc8a7b000ac21161c0d19b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tadeusz=20So=C5=9Bnierz?= Date: Thu, 21 Mar 2024 18:12:02 +0100 Subject: [PATCH 36/45] Remove the hardcoded poetry version from contributing guide (#17002) --- changelog.d/17002.doc | 1 + docs/development/contributing_guide.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17002.doc diff --git a/changelog.d/17002.doc b/changelog.d/17002.doc new file mode 100644 index 0000000000..a1b2ce5e31 --- /dev/null +++ b/changelog.d/17002.doc @@ -0,0 +1 @@ +Remove recommendation for a specific poetry version from contributing guide. diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index df6451273a..ac8a7039d1 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -68,7 +68,7 @@ Of their installation methods, we recommend ```shell pip install --user pipx -pipx install poetry==1.5.1 # Problems with Poetry 1.6, see https://github.com/matrix-org/synapse/issues/16147 +pipx install poetry ``` but see poetry's [installation instructions](https://python-poetry.org/docs/#installation) From 4c98aad47bbeda7a2015333304952a1e8c920b1e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 17:36:40 +0000 Subject: [PATCH 37/45] Bump netaddr from 0.9.0 to 1.2.1 (#17006) --- poetry.lock | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index ecb291d207..5332774705 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1575,15 +1575,18 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, [[package]] name = "netaddr" -version = "0.9.0" +version = "1.2.1" description = "A network address manipulation library for Python" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "netaddr-0.9.0-py3-none-any.whl", hash = "sha256:5148b1055679d2a1ec070c521b7db82137887fabd6d7e37f5199b44f775c3bb1"}, - {file = "netaddr-0.9.0.tar.gz", hash = "sha256:7b46fa9b1a2d71fd5de9e4a3784ef339700a53a08c8040f08baf5f1194da0128"}, + {file = "netaddr-1.2.1-py3-none-any.whl", hash = "sha256:bd9e9534b0d46af328cf64f0e5a23a5a43fca292df221c85580b27394793496e"}, + {file = "netaddr-1.2.1.tar.gz", hash = "sha256:6eb8fedf0412c6d294d06885c110de945cf4d22d2b510d0404f4e06950857987"}, ] +[package.extras] +nicer-shell = ["ipython"] + [[package]] name = "opentracing" version = "2.4.0" From db95b7551504b394cee4f875865dc713239662f9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 21 Mar 2024 17:48:16 +0000 Subject: [PATCH 38/45] Patch the db conn pool sooner in tests (#17017) When running unit tests, we patch the database connection pool so that it runs queries "synchronously". This is ok, except that if any queries are launched before we do the patching, those queries get left in limbo and never complete. To fix this, let's change the way we do the switcheroo, by patching out the method which creates the connection pool in the first place. --- changelog.d/17017.misc | 1 + tests/server.py | 112 ++++++++++++++++++++++------------------- 2 files changed, 60 insertions(+), 53 deletions(-) create mode 100644 changelog.d/17017.misc diff --git a/changelog.d/17017.misc b/changelog.d/17017.misc new file mode 100644 index 0000000000..c8af23d67a --- /dev/null +++ b/changelog.d/17017.misc @@ -0,0 +1 @@ +Patch the db conn pool sooner in tests. diff --git a/tests/server.py b/tests/server.py index f0cc4206b0..4aaa91e956 100644 --- a/tests/server.py +++ b/tests/server.py @@ -47,7 +47,7 @@ from typing import ( Union, cast, ) -from unittest.mock import Mock +from unittest.mock import Mock, patch import attr from incremental import Version @@ -55,6 +55,7 @@ from typing_extensions import ParamSpec from zope.interface import implementer import twisted +from twisted.enterprise import adbapi from twisted.internet import address, tcp, threads, udp from twisted.internet._resolver import SimpleResolverComplexifier from twisted.internet.defer import Deferred, fail, maybeDeferred, succeed @@ -94,8 +95,8 @@ from synapse.module_api.callbacks.third_party_event_rules_callbacks import ( ) from synapse.server import HomeServer from synapse.storage import DataStore -from synapse.storage.database import LoggingDatabaseConnection -from synapse.storage.engines import create_engine +from synapse.storage.database import LoggingDatabaseConnection, make_pool +from synapse.storage.engines import BaseDatabaseEngine, create_engine from synapse.storage.prepare_database import prepare_database from synapse.types import ISynapseReactor, JsonDict from synapse.util import Clock @@ -670,6 +671,53 @@ def validate_connector(connector: tcp.Connector, expected_ip: str) -> None: ) +def make_fake_db_pool( + reactor: ISynapseReactor, + db_config: DatabaseConnectionConfig, + engine: BaseDatabaseEngine, +) -> adbapi.ConnectionPool: + """Wrapper for `make_pool` which builds a pool which runs db queries synchronously. + + For more deterministic testing, we don't use a regular db connection pool: instead + we run all db queries synchronously on the test reactor's main thread. This function + is a drop-in replacement for the normal `make_pool` which builds such a connection + pool. + """ + pool = make_pool(reactor, db_config, engine) + + def runWithConnection( + func: Callable[..., R], *args: Any, **kwargs: Any + ) -> Awaitable[R]: + return threads.deferToThreadPool( + pool._reactor, + pool.threadpool, + pool._runWithConnection, + func, + *args, + **kwargs, + ) + + def runInteraction( + desc: str, func: Callable[..., R], *args: Any, **kwargs: Any + ) -> Awaitable[R]: + return threads.deferToThreadPool( + pool._reactor, + pool.threadpool, + pool._runInteraction, + desc, + func, + *args, + **kwargs, + ) + + pool.runWithConnection = runWithConnection # type: ignore[method-assign] + pool.runInteraction = runInteraction # type: ignore[assignment] + # Replace the thread pool with a threadless 'thread' pool + pool.threadpool = ThreadPool(reactor) + pool.running = True + return pool + + class ThreadPool: """ Threadless thread pool. @@ -706,52 +754,6 @@ class ThreadPool: return d -def _make_test_homeserver_synchronous(server: HomeServer) -> None: - """ - Make the given test homeserver's database interactions synchronous. - """ - - clock = server.get_clock() - - for database in server.get_datastores().databases: - pool = database._db_pool - - def runWithConnection( - func: Callable[..., R], *args: Any, **kwargs: Any - ) -> Awaitable[R]: - return threads.deferToThreadPool( - pool._reactor, - pool.threadpool, - pool._runWithConnection, - func, - *args, - **kwargs, - ) - - def runInteraction( - desc: str, func: Callable[..., R], *args: Any, **kwargs: Any - ) -> Awaitable[R]: - return threads.deferToThreadPool( - pool._reactor, - pool.threadpool, - pool._runInteraction, - desc, - func, - *args, - **kwargs, - ) - - pool.runWithConnection = runWithConnection # type: ignore[method-assign] - pool.runInteraction = runInteraction # type: ignore[assignment] - # Replace the thread pool with a threadless 'thread' pool - pool.threadpool = ThreadPool(clock._reactor) - pool.running = True - - # We've just changed the Databases to run DB transactions on the same - # thread, so we need to disable the dedicated thread behaviour. - server.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False - - def get_clock() -> Tuple[ThreadedMemoryReactorClock, Clock]: clock = ThreadedMemoryReactorClock() hs_clock = Clock(clock) @@ -1067,7 +1069,14 @@ def setup_test_homeserver( # Mock TLS hs.tls_server_context_factory = Mock() - hs.setup() + # Patch `make_pool` before initialising the database, to make database transactions + # synchronous for testing. + with patch("synapse.storage.database.make_pool", side_effect=make_fake_db_pool): + hs.setup() + + # Since we've changed the databases to run DB transactions on the same + # thread, we need to stop the event fetcher hogging that one thread. + hs.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False if USE_POSTGRES_FOR_TESTS: database_pool = hs.get_datastores().databases[0] @@ -1137,9 +1146,6 @@ def setup_test_homeserver( hs.get_auth_handler().validate_hash = validate_hash # type: ignore[assignment] - # Make the threadpool and database transactions synchronous for testing. - _make_test_homeserver_synchronous(hs) - # Load any configured modules into the homeserver module_api = hs.get_module_api() for module, module_config in hs.config.modules.loaded_modules: From 3ab9e6d524960784f6f108e57ac86a921912ea84 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Thu, 21 Mar 2024 18:49:44 +0100 Subject: [PATCH 39/45] OIDC: try to JWT decode userinfo response if JSON parsing failed (#16972) --- changelog.d/16972.feature | 1 + synapse/handlers/oidc.py | 32 ++++++++++++++++++++++++++++---- 2 files changed, 29 insertions(+), 4 deletions(-) create mode 100644 changelog.d/16972.feature diff --git a/changelog.d/16972.feature b/changelog.d/16972.feature new file mode 100644 index 0000000000..0f28cbbcd6 --- /dev/null +++ b/changelog.d/16972.feature @@ -0,0 +1 @@ +OIDC: try to JWT decode userinfo response if JSON parsing failed. diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index ba67cc4768..ab28dc800e 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -829,14 +829,38 @@ class OidcProvider: logger.debug("Using the OAuth2 access_token to request userinfo") metadata = await self.load_metadata() - resp = await self._http_client.get_json( + resp = await self._http_client.request( + "GET", metadata["userinfo_endpoint"], - headers={"Authorization": ["Bearer {}".format(token["access_token"])]}, + headers=Headers( + {"Authorization": ["Bearer {}".format(token["access_token"])]} + ), ) - logger.debug("Retrieved user info from userinfo endpoint: %r", resp) + body = await readBody(resp) - return UserInfo(resp) + content_type_headers = resp.headers.getRawHeaders("Content-Type") + assert content_type_headers + # We use `startswith` because the header value can contain the `charset` parameter + # even if it is useless, and Twisted doesn't take care of that for us. + if content_type_headers[0].startswith("application/jwt"): + alg_values = metadata.get( + "id_token_signing_alg_values_supported", ["RS256"] + ) + jwt = JsonWebToken(alg_values) + jwk_set = await self.load_jwks() + try: + decoded_resp = jwt.decode(body, key=jwk_set) + except ValueError: + logger.info("Reloading JWKS after decode error") + jwk_set = await self.load_jwks(force=True) # try reloading the jwks + decoded_resp = jwt.decode(body, key=jwk_set) + else: + decoded_resp = json_decoder.decode(body.decode("utf-8")) + + logger.debug("Retrieved user info from userinfo endpoint: %r", decoded_resp) + + return UserInfo(decoded_resp) async def _verify_jwt( self, From 6d3ffdd42142832259a9bbb0073dd4f75980fc9b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 17:50:18 +0000 Subject: [PATCH 40/45] Bump dawidd6/action-download-artifact from 3.1.2 to 3.1.4 (#17008) --- .github/workflows/docs-pr-netlify.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index 3fdf170f20..277083ded3 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@71072fbb1229e1317f1a8de6b04206afb461bd67 # v3.1.2 + uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3.1.4 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} From bef765b2624a16a9f896aab670b4509c8b06628e Mon Sep 17 00:00:00 2001 From: Sam Wedgwood <28223854+swedgwood@users.noreply.github.com> Date: Thu, 21 Mar 2024 17:55:44 +0000 Subject: [PATCH 41/45] generate configuration with correct user in start.py for docker (#16978) --- changelog.d/16978.docker | 1 + docker/start.py | 15 ++++++++------- 2 files changed, 9 insertions(+), 7 deletions(-) create mode 100644 changelog.d/16978.docker diff --git a/changelog.d/16978.docker b/changelog.d/16978.docker new file mode 100644 index 0000000000..2e095c6aa9 --- /dev/null +++ b/changelog.d/16978.docker @@ -0,0 +1 @@ +Updated start.py to generate config using the correct user ID when running as root (fixes #16824, #15202). diff --git a/docker/start.py b/docker/start.py index 12c444da9a..818a5355ca 100755 --- a/docker/start.py +++ b/docker/start.py @@ -160,11 +160,6 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml") data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") - if ownership is not None: - # make sure that synapse has perms to write to the data dir. - log(f"Setting ownership on {data_dir} to {ownership}") - subprocess.run(["chown", ownership, data_dir], check=True) - # create a suitable log config from our template log_config_file = "%s/%s.log.config" % (config_dir, server_name) if not os.path.exists(log_config_file): @@ -189,9 +184,15 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> "--generate-config", "--open-private-ports", ] + + if ownership is not None: + # make sure that synapse has perms to write to the data dir. + log(f"Setting ownership on {data_dir} to {ownership}") + subprocess.run(["chown", ownership, data_dir], check=True) + args = ["gosu", ownership] + args + # log("running %s" % (args, )) - flush_buffers() - os.execv(sys.executable, args) + subprocess.run(args, check=True) def main(args: List[str], environ: MutableMapping[str, str]) -> None: From f7a3ebe44d24e18c79847104f103a07f4a575a5f Mon Sep 17 00:00:00 2001 From: Hanadi Date: Thu, 21 Mar 2024 21:05:54 +0300 Subject: [PATCH 42/45] Fix reject knocks on deactivating account (#17010) --- changelog.d/17010.bugfix | 1 + synapse/handlers/deactivate_account.py | 25 +++--- synapse/storage/databases/main/roommember.py | 16 ++++ tests/handlers/test_deactivate_account.py | 87 +++++++++++++++++++- 4 files changed, 117 insertions(+), 12 deletions(-) create mode 100644 changelog.d/17010.bugfix diff --git a/changelog.d/17010.bugfix b/changelog.d/17010.bugfix new file mode 100644 index 0000000000..0e1495f744 --- /dev/null +++ b/changelog.d/17010.bugfix @@ -0,0 +1 @@ +Fix bug which did not retract a user's pending knocks at rooms when their account was deactivated. Contributed by @hanadi92. \ No newline at end of file diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index de5ed5d5b6..b13c4b6cb9 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -18,9 +18,11 @@ # [This file includes modifications made by New Vector Limited] # # +import itertools import logging from typing import TYPE_CHECKING, Optional +from synapse.api.constants import Membership from synapse.api.errors import SynapseError from synapse.handlers.device import DeviceHandler from synapse.metrics.background_process_metrics import run_as_background_process @@ -168,9 +170,9 @@ class DeactivateAccountHandler: # parts users from rooms (if it isn't already running) self._start_user_parting() - # Reject all pending invites for the user, so that the user doesn't show up in the - # "invited" section of rooms' members list. - await self._reject_pending_invites_for_user(user_id) + # Reject all pending invites and knocks for the user, so that the + # user doesn't show up in the "invited" section of rooms' members list. + await self._reject_pending_invites_and_knocks_for_user(user_id) # Remove all information on the user from the account_validity table. if self._account_validity_enabled: @@ -194,34 +196,37 @@ class DeactivateAccountHandler: return identity_server_supports_unbinding - async def _reject_pending_invites_for_user(self, user_id: str) -> None: - """Reject pending invites addressed to a given user ID. + async def _reject_pending_invites_and_knocks_for_user(self, user_id: str) -> None: + """Reject pending invites and knocks addressed to a given user ID. Args: - user_id: The user ID to reject pending invites for. + user_id: The user ID to reject pending invites and knocks for. """ user = UserID.from_string(user_id) pending_invites = await self.store.get_invited_rooms_for_local_user(user_id) + pending_knocks = await self.store.get_knocked_at_rooms_for_local_user(user_id) - for room in pending_invites: + for room in itertools.chain(pending_invites, pending_knocks): try: await self._room_member_handler.update_membership( create_requester(user, authenticated_entity=self._server_name), user, room.room_id, - "leave", + Membership.LEAVE, ratelimit=False, require_consent=False, ) logger.info( - "Rejected invite for deactivated user %r in room %r", + "Rejected %r for deactivated user %r in room %r", + room.membership, user_id, room.room_id, ) except Exception: logger.exception( - "Failed to reject invite for user %r in room %r:" + "Failed to reject %r for user %r in room %r:" " ignoring and continuing", + room.membership, user_id, room.room_id, ) diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 5b0daffa46..5d51502595 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -369,6 +369,22 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore): user_id, [Membership.INVITE] ) + async def get_knocked_at_rooms_for_local_user( + self, user_id: str + ) -> Sequence[RoomsForUser]: + """Get all the rooms the *local* user has knocked at. + + Args: + user_id: The user ID. + + Returns: + A list of RoomsForUser. + """ + + return await self.get_rooms_for_local_user_where_membership_is( + user_id, [Membership.KNOCK] + ) + async def get_invite_for_local_user_in_room( self, user_id: str, room_id: str ) -> Optional[RoomsForUser]: diff --git a/tests/handlers/test_deactivate_account.py b/tests/handlers/test_deactivate_account.py index 25ac68e6c5..b3f9e50f0f 100644 --- a/tests/handlers/test_deactivate_account.py +++ b/tests/handlers/test_deactivate_account.py @@ -21,12 +21,13 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.constants import AccountDataTypes +from synapse.api.constants import AccountDataTypes, EventTypes, JoinRules, Membership from synapse.push.rulekinds import PRIORITY_CLASS_MAP from synapse.rest import admin -from synapse.rest.client import account, login +from synapse.rest.client import account, login, room from synapse.server import HomeServer from synapse.synapse_rust.push import PushRule +from synapse.types import UserID, create_requester from synapse.util import Clock from tests.unittest import HomeserverTestCase @@ -37,6 +38,7 @@ class DeactivateAccountTestCase(HomeserverTestCase): login.register_servlets, admin.register_servlets, account.register_servlets, + room.register_servlets, ] def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: @@ -44,6 +46,7 @@ class DeactivateAccountTestCase(HomeserverTestCase): self.user = self.register_user("user", "pass") self.token = self.login("user", "pass") + self.handler = self.hs.get_room_member_handler() def _deactivate_my_account(self) -> None: """ @@ -341,3 +344,83 @@ class DeactivateAccountTestCase(HomeserverTestCase): self.assertEqual(req.code, 401, req) self.assertEqual(req.json_body["flows"], [{"stages": ["m.login.password"]}]) + + def test_deactivate_account_rejects_invites(self) -> None: + """ + Tests that deactivating an account rejects its invite memberships + """ + # Create another user and room just for the invitation + another_user = self.register_user("another_user", "pass") + token = self.login("another_user", "pass") + room_id = self.helper.create_room_as(another_user, is_public=False, tok=token) + + # Invite user to the created room + invite_event, _ = self.get_success( + self.handler.update_membership( + requester=create_requester(another_user), + target=UserID.from_string(self.user), + room_id=room_id, + action=Membership.INVITE, + ) + ) + + # Check that the invite exists + invite = self.get_success( + self._store.get_invited_rooms_for_local_user(self.user) + ) + self.assertEqual(invite[0].event_id, invite_event) + + # Deactivate the user + self._deactivate_my_account() + + # Check that the deactivated user has no invites in the room + after_deactivate_invite = self.get_success( + self._store.get_invited_rooms_for_local_user(self.user) + ) + self.assertEqual(len(after_deactivate_invite), 0) + + def test_deactivate_account_rejects_knocks(self) -> None: + """ + Tests that deactivating an account rejects its knock memberships + """ + # Create another user and room just for the invitation + another_user = self.register_user("another_user", "pass") + token = self.login("another_user", "pass") + room_id = self.helper.create_room_as( + another_user, + is_public=False, + tok=token, + ) + + # Allow room to be knocked at + self.helper.send_state( + room_id, + EventTypes.JoinRules, + {"join_rule": JoinRules.KNOCK}, + tok=token, + ) + + # Knock user at the created room + knock_event, _ = self.get_success( + self.handler.update_membership( + requester=create_requester(self.user), + target=UserID.from_string(self.user), + room_id=room_id, + action=Membership.KNOCK, + ) + ) + + # Check that the knock exists + knocks = self.get_success( + self._store.get_knocked_at_rooms_for_local_user(self.user) + ) + self.assertEqual(knocks[0].event_id, knock_event) + + # Deactivate the user + self._deactivate_my_account() + + # Check that the deactivated user has no knocks + after_deactivate_knocks = self.get_success( + self._store.get_knocked_at_rooms_for_local_user(self.user) + ) + self.assertEqual(len(after_deactivate_knocks), 0) From 9ad49e7ecf2fb489fb66445b9aed8489ee8f2a6d Mon Sep 17 00:00:00 2001 From: SpiritCroc Date: Thu, 21 Mar 2024 19:43:07 +0100 Subject: [PATCH 43/45] Do not refuse to set read_marker if previous event_id is in wrong room (#16990) --- changelog.d/16990.bugfix | 1 + synapse/handlers/read_marker.py | 4 ++-- synapse/storage/databases/main/events_worker.py | 8 +++++--- tests/rest/client/test_read_marker.py | 8 ++++---- 4 files changed, 12 insertions(+), 9 deletions(-) create mode 100644 changelog.d/16990.bugfix diff --git a/changelog.d/16990.bugfix b/changelog.d/16990.bugfix new file mode 100644 index 0000000000..76f9dd2e36 --- /dev/null +++ b/changelog.d/16990.bugfix @@ -0,0 +1 @@ +Fix case in which `m.fully_read` marker would not get updated. Contributed by @SpiritCroc. diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 135a662267..fb39c8e04b 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -55,12 +55,12 @@ class ReadMarkerHandler: should_update = True # Get event ordering, this also ensures we know about the event - event_ordering = await self.store.get_event_ordering(event_id) + event_ordering = await self.store.get_event_ordering(event_id, room_id) if existing_read_marker: try: old_event_ordering = await self.store.get_event_ordering( - existing_read_marker["event_id"] + existing_read_marker["event_id"], room_id ) except SynapseError: # Old event no longer exists, assume new is ahead. This may diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 81fccfbccb..e39d4b9624 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -1995,16 +1995,18 @@ class EventsWorkerStore(SQLBaseStore): return rows, to_token, True @cached(max_entries=5000) - async def get_event_ordering(self, event_id: str) -> Tuple[int, int]: + async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, int]: res = await self.db_pool.simple_select_one( table="events", retcols=["topological_ordering", "stream_ordering"], - keyvalues={"event_id": event_id}, + keyvalues={"event_id": event_id, "room_id": room_id}, allow_none=True, ) if not res: - raise SynapseError(404, "Could not find event %s" % (event_id,)) + raise SynapseError( + 404, "Could not find event %s in room %s" % (event_id, room_id) + ) return int(res[0]), int(res[1]) diff --git a/tests/rest/client/test_read_marker.py b/tests/rest/client/test_read_marker.py index 2fe350e1e8..0b4ad685b3 100644 --- a/tests/rest/client/test_read_marker.py +++ b/tests/rest/client/test_read_marker.py @@ -78,7 +78,7 @@ class ReadMarkerTestCase(unittest.HomeserverTestCase): channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_1, }, @@ -90,7 +90,7 @@ class ReadMarkerTestCase(unittest.HomeserverTestCase): event_id_2 = send_message() channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_2, }, @@ -123,7 +123,7 @@ class ReadMarkerTestCase(unittest.HomeserverTestCase): channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_1, }, @@ -142,7 +142,7 @@ class ReadMarkerTestCase(unittest.HomeserverTestCase): event_id_2 = send_message() channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_2, }, From b7af076ab524c018992a05b031cd8e3533ab59d4 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Fri, 22 Mar 2024 11:35:11 +0100 Subject: [PATCH 44/45] Add OIDC config to add extra parameters to the authorize URL (#16971) --- changelog.d/16971.feature | 1 + .../configuration/config_documentation.md | 5 +++++ synapse/config/oidc.py | 6 ++++++ synapse/handlers/oidc.py | 20 +++++++++++++------ 4 files changed, 26 insertions(+), 6 deletions(-) create mode 100644 changelog.d/16971.feature diff --git a/changelog.d/16971.feature b/changelog.d/16971.feature new file mode 100644 index 0000000000..9fdc88a322 --- /dev/null +++ b/changelog.d/16971.feature @@ -0,0 +1 @@ +Add an OIDC config to specify extra parameters for the authorization grant URL. IT can be useful to pass an ACR value for example. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 638a459ed5..985f90c8a1 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3349,6 +3349,9 @@ Options for each entry include: not included in `scopes`. Set to `userinfo_endpoint` to always use the userinfo endpoint. +* `additional_authorization_parameters`: String to string dictionary that will be passed as + additional parameters to the authorization grant URL. + * `allow_existing_users`: set to true to allow a user logging in via OIDC to match a pre-existing account instead of failing. This could be used if switching from password logins to OIDC. Defaults to false. @@ -3473,6 +3476,8 @@ oidc_providers: token_endpoint: "https://accounts.example.com/oauth2/token" userinfo_endpoint: "https://accounts.example.com/userinfo" jwks_uri: "https://accounts.example.com/.well-known/jwks.json" + additional_authorization_parameters: + acr_values: 2fa skip_verification: true enable_registration: true user_mapping_provider: diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 102dba0219..d0a03baf55 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -342,6 +342,9 @@ def _parse_oidc_config_dict( user_mapping_provider_config=user_mapping_provider_config, attribute_requirements=attribute_requirements, enable_registration=oidc_config.get("enable_registration", True), + additional_authorization_parameters=oidc_config.get( + "additional_authorization_parameters", {} + ), ) @@ -444,3 +447,6 @@ class OidcProviderConfig: # Whether automatic registrations are enabled in the ODIC flow. Defaults to True enable_registration: bool + + # Additional parameters that will be passed to the authorization grant URL + additional_authorization_parameters: Mapping[str, str] diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index ab28dc800e..22b59829fa 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -453,6 +453,10 @@ class OidcProvider: # optional brand identifier for this auth provider self.idp_brand = provider.idp_brand + self.additional_authorization_parameters = ( + provider.additional_authorization_parameters + ) + self._sso_handler = hs.get_sso_handler() self._device_handler = hs.get_device_handler() @@ -1006,17 +1010,21 @@ class OidcProvider: metadata = await self.load_metadata() + additional_authorization_parameters = dict( + self.additional_authorization_parameters + ) # Automatically enable PKCE if it is supported. - extra_grant_values = {} if metadata.get("code_challenge_methods_supported"): code_verifier = generate_token(48) # Note that we verified the server supports S256 earlier (in # OidcProvider._validate_metadata). - extra_grant_values = { - "code_challenge_method": "S256", - "code_challenge": create_s256_code_challenge(code_verifier), - } + additional_authorization_parameters.update( + { + "code_challenge_method": "S256", + "code_challenge": create_s256_code_challenge(code_verifier), + } + ) cookie = self._macaroon_generaton.generate_oidc_session_token( state=state, @@ -1055,7 +1063,7 @@ class OidcProvider: scope=self._scopes, state=state, nonce=nonce, - **extra_grant_values, + **additional_authorization_parameters, ) async def handle_oidc_callback( From b5322b4daf2e13310200e57eb427568cb6a92ddf Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 22 Mar 2024 13:24:11 +0000 Subject: [PATCH 45/45] Ensure that pending to-device events are sent over federation at startup (#16925) Fixes https://github.com/element-hq/synapse/issues/16680, as well as a related bug, where servers which we had *never* successfully sent an event to would not be retried. In order to fix the case of pending to-device messages, we hook into the existing `wake_destinations_needing_catchup` process, by extending it to look for destinations that have pending to-device messages. The federation transmission loop then attempts to send the pending to-device messages as normal. --- changelog.d/16925.bugfix | 1 + synapse/federation/sender/__init__.py | 14 +-- .../storage/databases/main/transactions.py | 103 ++++++++++++++---- synapse/util/__init__.py | 44 +++++++- 4 files changed, 129 insertions(+), 33 deletions(-) create mode 100644 changelog.d/16925.bugfix diff --git a/changelog.d/16925.bugfix b/changelog.d/16925.bugfix new file mode 100644 index 0000000000..781cad4b5b --- /dev/null +++ b/changelog.d/16925.bugfix @@ -0,0 +1 @@ +Fix a bug which meant that, under certain circumstances, we might never retry sending events or to-device messages over federation after a failure. diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 9ed6fc98b5..1888480881 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -192,10 +192,9 @@ sent_pdus_destination_dist_total = Counter( ) # Time (in s) to wait before trying to wake up destinations that have -# catch-up outstanding. This will also be the delay applied at startup -# before trying the same. +# catch-up outstanding. # Please note that rate limiting still applies, so while the loop is -# executed every X seconds the destinations may not be wake up because +# executed every X seconds the destinations may not be woken up because # they are being rate limited following previous attempt failures. WAKEUP_RETRY_PERIOD_SEC = 60 @@ -428,18 +427,17 @@ class FederationSender(AbstractFederationSender): / hs.config.ratelimiting.federation_rr_transactions_per_room_per_second ) + self._external_cache = hs.get_external_cache() + self._destination_wakeup_queue = _DestinationWakeupQueue(self, self.clock) + # Regularly wake up destinations that have outstanding PDUs to be caught up - self.clock.looping_call( + self.clock.looping_call_now( run_as_background_process, WAKEUP_RETRY_PERIOD_SEC * 1000.0, "wake_destinations_needing_catchup", self._wake_destinations_needing_catchup, ) - self._external_cache = hs.get_external_cache() - - self._destination_wakeup_queue = _DestinationWakeupQueue(self, self.clock) - def _get_per_destination_queue(self, destination: str) -> PerDestinationQueue: """Get or create a PerDestinationQueue for the given destination diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index c91c44818f..08e0241f68 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -423,8 +423,11 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore): self, after_destination: Optional[str] ) -> List[str]: """ - Gets at most 25 destinations which have outstanding PDUs to be caught up, - and are not being backed off from + Get a list of destinations we should retry transaction sending to. + + Returns up to 25 destinations which have outstanding PDUs or to-device messages, + and are not subject to a backoff. + Args: after_destination: If provided, all destinations must be lexicographically greater @@ -448,30 +451,86 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore): def _get_catch_up_outstanding_destinations_txn( txn: LoggingTransaction, now_time_ms: int, after_destination: Optional[str] ) -> List[str]: + # We're looking for destinations which satisfy either of the following + # conditions: + # + # * There is at least one room where we have an event that we have not yet + # sent to them, indicated by a row in `destination_rooms` with a + # `stream_ordering` older than the `last_successful_stream_ordering` + # (if any) in `destinations`, or: + # + # * There is at least one to-device message outstanding for the destination, + # indicated by a row in `device_federation_outbox`. + # + # Of course, that may produce destinations where we are already busy sending + # the relevant PDU or to-device message, but in that case, waking up the + # sender will just be a no-op. + # + # From those two lists, we need to *exclude* destinations which are subject + # to a backoff (ie, where `destinations.retry_last_ts + destinations.retry_interval` + # is in the future). There is also an edge-case where, if the server was + # previously shut down in the middle of the first send attempt to a given + # destination, there may be no row in `destinations` at all; we need to include + # such rows in the output, which means we need to left-join rather than + # inner-join against `destinations`. + # + # The two sources of destinations (`destination_rooms` and + # `device_federation_outbox`) are queried separately and UNIONed; but the list + # may be very long, and we don't want to return all the rows at once. We + # therefore sort the output and just return the first 25 rows. Obviously that + # means there is no point in either of the inner queries returning more than + # 25 results, since any further results are certain to be dropped by the outer + # LIMIT. In order to help the query-optimiser understand that, we *also* sort + # and limit the *inner* queries, hence we express them as CTEs rather than + # sub-queries. + # + # (NB: we make sure to do the top-level sort and limit on the database, rather + # than making two queries and combining the result in Python. We could otherwise + # suffer from slight differences in sort order between Python and the database, + # which would make the `after_destination` condition unreliable.) + q = """ - SELECT DISTINCT destination FROM destinations - INNER JOIN destination_rooms USING (destination) - WHERE - stream_ordering > last_successful_stream_ordering - AND destination > ? - AND ( - retry_last_ts IS NULL OR - retry_last_ts + retry_interval < ? - ) - ORDER BY destination - LIMIT 25 - """ - txn.execute( - q, - ( - # everything is lexicographically greater than "" so this gives - # us the first batch of up to 25. - after_destination or "", - now_time_ms, - ), + WITH pdu_destinations AS ( + SELECT DISTINCT destination FROM destination_rooms + LEFT JOIN destinations USING (destination) + WHERE + destination > ? + AND destination_rooms.stream_ordering > COALESCE(destinations.last_successful_stream_ordering, 0) + AND ( + destinations.retry_last_ts IS NULL OR + destinations.retry_last_ts + destinations.retry_interval < ? + ) + ORDER BY destination + LIMIT 25 + ), to_device_destinations AS ( + SELECT DISTINCT destination FROM device_federation_outbox + LEFT JOIN destinations USING (destination) + WHERE + destination > ? + AND ( + destinations.retry_last_ts IS NULL OR + destinations.retry_last_ts + destinations.retry_interval < ? + ) + ORDER BY destination + LIMIT 25 ) + SELECT destination FROM pdu_destinations + UNION SELECT destination FROM to_device_destinations + ORDER BY destination + LIMIT 25 + """ + + # everything is lexicographically greater than "" so this gives + # us the first batch of up to 25. + after_destination = after_destination or "" + + txn.execute( + q, + (after_destination, now_time_ms, after_destination, now_time_ms), + ) destinations = [row[0] for row in txn] + return destinations async def get_destinations_paginate( diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 9e374354ec..e0d876e84b 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -117,7 +117,11 @@ class Clock: return int(self.time() * 1000) def looping_call( - self, f: Callable[P, object], msec: float, *args: P.args, **kwargs: P.kwargs + self, + f: Callable[P, object], + msec: float, + *args: P.args, + **kwargs: P.kwargs, ) -> LoopingCall: """Call a function repeatedly. @@ -134,12 +138,46 @@ class Clock: Args: f: The function to call repeatedly. msec: How long to wait between calls in milliseconds. - *args: Postional arguments to pass to function. + *args: Positional arguments to pass to function. **kwargs: Key arguments to pass to function. """ + return self._looping_call_common(f, msec, False, *args, **kwargs) + + def looping_call_now( + self, + f: Callable[P, object], + msec: float, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Call a function immediately, and then repeatedly thereafter. + + As with `looping_call`: subsequent calls are not scheduled until after the + the Awaitable returned by a previous call has finished. + + Also as with `looping_call`: the function is called with no logcontext and + you probably want to wrap it in `run_as_background_process`. + + Args: + f: The function to call repeatedly. + msec: How long to wait between calls in milliseconds. + *args: Positional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + return self._looping_call_common(f, msec, True, *args, **kwargs) + + def _looping_call_common( + self, + f: Callable[P, object], + msec: float, + now: bool, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Common functionality for `looping_call` and `looping_call_now`""" call = task.LoopingCall(f, *args, **kwargs) call.clock = self._reactor - d = call.start(msec / 1000.0, now=False) + d = call.start(msec / 1000.0, now=now) d.addErrback(log_failure, "Looping call died", consumeErrors=False) return call