From 646cb6ff2412bfc5180b5d748b95dbe6ef790a0b Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 25 Apr 2024 13:25:26 +0100 Subject: [PATCH 01/19] Add type annotation to `visited_chains` (#17125) This should fix CI on `develop`. Broke in https://github.com/element-hq/synapse/commit/0fe9e1f7dafa80f3e02762f7ae75cefee5b3316c, presumably due to a `mypy` dependency upgrade. --- changelog.d/17125.misc | 1 + synapse/storage/databases/main/events.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17125.misc diff --git a/changelog.d/17125.misc b/changelog.d/17125.misc new file mode 100644 index 0000000000..a7d9ce6491 --- /dev/null +++ b/changelog.d/17125.misc @@ -0,0 +1 @@ +Fix type annotation for `visited_chains` after `mypy` upgrade. \ No newline at end of file diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 1e731d56bd..990698aa5c 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -2454,7 +2454,7 @@ class _LinkMap: return target_seq <= src_seq # We have to graph traverse the links to check for indirect paths. - visited_chains = collections.Counter() + visited_chains: Dict[int, int] = collections.Counter() search = [(src_chain, src_seq)] while search: chain, seq = search.pop() From 2e92b718d5ea063af4b2dc9412dcd2ce625b4987 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 25 Apr 2024 14:50:12 +0200 Subject: [PATCH 02/19] MSC4108 implementation (#17056) Co-authored-by: Hugh Nimmo-Smith Co-authored-by: Hugh Nimmo-Smith Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- Cargo.lock | 164 ++++++++- changelog.d/17056.feature | 1 + rust/Cargo.toml | 4 + rust/src/lib.rs | 2 + rust/src/rendezvous/mod.rs | 315 +++++++++++++++++ rust/src/rendezvous/session.rs | 91 +++++ synapse/config/experimental.py | 12 +- synapse/http/server.py | 5 +- synapse/rest/client/rendezvous.py | 16 + synapse/rest/client/versions.py | 9 +- synapse/rest/synapse/client/__init__.py | 4 + synapse/rest/synapse/client/rendezvous.py | 58 ++++ synapse/server.py | 5 + synapse/synapse_rust/rendezvous.pyi | 30 ++ tests/rest/client/test_rendezvous.py | 401 +++++++++++++++++++++- tests/server.py | 7 +- tests/unittest.py | 5 + 17 files changed, 1120 insertions(+), 9 deletions(-) create mode 100644 changelog.d/17056.feature create mode 100644 rust/src/rendezvous/mod.rs create mode 100644 rust/src/rendezvous/session.rs create mode 100644 synapse/rest/synapse/client/rendezvous.py create mode 100644 synapse/synapse_rust/rendezvous.pyi diff --git a/Cargo.lock b/Cargo.lock index faac6b3c8a..4474dfb903 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -59,6 +59,12 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + [[package]] name = "bytes" version = "1.6.0" @@ -92,9 +98,9 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.5" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", @@ -117,6 +123,19 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + [[package]] name = "headers" version = "0.4.0" @@ -182,6 +201,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -266,6 +294,12 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + [[package]] name = "proc-macro2" version = "1.0.76" @@ -369,6 +403,36 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -461,6 +525,17 @@ dependencies = [ "digest", ] +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "smallvec" version = "1.10.0" @@ -489,6 +564,7 @@ name = "synapse" version = "0.1.0" dependencies = [ "anyhow", + "base64", "blake2", "bytes", "headers", @@ -496,12 +572,15 @@ dependencies = [ "http", "lazy_static", "log", + "mime", "pyo3", "pyo3-log", "pythonize", "regex", "serde", "serde_json", + "sha2", + "ulid", ] [[package]] @@ -516,6 +595,17 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" +[[package]] +name = "ulid" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259" +dependencies = [ + "getrandom", + "rand", + "web-time", +] + [[package]] name = "unicode-ident" version = "1.0.5" @@ -534,6 +624,76 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "windows-sys" version = "0.36.1" diff --git a/changelog.d/17056.feature b/changelog.d/17056.feature new file mode 100644 index 0000000000..b4cbe849e4 --- /dev/null +++ b/changelog.d/17056.feature @@ -0,0 +1 @@ +Implement the rendezvous mechanism described by MSC4108. diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 9ac766182b..d41a216d1c 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -23,11 +23,13 @@ name = "synapse.synapse_rust" [dependencies] anyhow = "1.0.63" +base64 = "0.21.7" bytes = "1.6.0" headers = "0.4.0" http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" +mime = "0.3.17" pyo3 = { version = "0.20.0", features = [ "macros", "anyhow", @@ -37,8 +39,10 @@ pyo3 = { version = "0.20.0", features = [ pyo3-log = "0.9.0" pythonize = "0.20.0" regex = "1.6.0" +sha2 = "0.10.8" serde = { version = "1.0.144", features = ["derive"] } serde_json = "1.0.85" +ulid = "1.1.2" [features] extension-module = ["pyo3/extension-module"] diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 36a3d64528..9bd1f17ad9 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -7,6 +7,7 @@ pub mod errors; pub mod events; pub mod http; pub mod push; +pub mod rendezvous; lazy_static! { static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init(); @@ -45,6 +46,7 @@ fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> { acl::register_module(py, m)?; push::register_module(py, m)?; events::register_module(py, m)?; + rendezvous::register_module(py, m)?; Ok(()) } diff --git a/rust/src/rendezvous/mod.rs b/rust/src/rendezvous/mod.rs new file mode 100644 index 0000000000..c0f5d8b600 --- /dev/null +++ b/rust/src/rendezvous/mod.rs @@ -0,0 +1,315 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + * + */ + +use std::{ + collections::{BTreeMap, HashMap}, + time::{Duration, SystemTime}, +}; + +use bytes::Bytes; +use headers::{ + AccessControlAllowOrigin, AccessControlExposeHeaders, CacheControl, ContentLength, ContentType, + HeaderMapExt, IfMatch, IfNoneMatch, Pragma, +}; +use http::{header::ETAG, HeaderMap, Response, StatusCode, Uri}; +use mime::Mime; +use pyo3::{ + exceptions::PyValueError, pyclass, pymethods, types::PyModule, Py, PyAny, PyObject, PyResult, + Python, ToPyObject, +}; +use ulid::Ulid; + +use self::session::Session; +use crate::{ + errors::{NotFoundError, SynapseError}, + http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt}, +}; + +mod session; + +// n.b. Because OPTIONS requests are handled by the Python code, we don't need to set Access-Control-Allow-Headers. +fn prepare_headers(headers: &mut HeaderMap, session: &Session) { + headers.typed_insert(AccessControlAllowOrigin::ANY); + headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG])); + headers.typed_insert(Pragma::no_cache()); + headers.typed_insert(CacheControl::new().with_no_store()); + headers.typed_insert(session.etag()); + headers.typed_insert(session.expires()); + headers.typed_insert(session.last_modified()); +} + +#[pyclass] +struct RendezvousHandler { + base: Uri, + clock: PyObject, + sessions: BTreeMap, + capacity: usize, + max_content_length: u64, + ttl: Duration, +} + +impl RendezvousHandler { + /// Check the input headers of a request which sets data for a session, and return the content type. + fn check_input_headers(&self, headers: &HeaderMap) -> PyResult { + let ContentLength(content_length) = headers.typed_get_required()?; + + if content_length > self.max_content_length { + return Err(SynapseError::new( + StatusCode::PAYLOAD_TOO_LARGE, + "Payload too large".to_owned(), + "M_TOO_LARGE", + None, + None, + )); + } + + let content_type: ContentType = headers.typed_get_required()?; + + // Content-Type must be text/plain + if content_type != ContentType::text() { + return Err(SynapseError::new( + StatusCode::BAD_REQUEST, + "Content-Type must be text/plain".to_owned(), + "M_INVALID_PARAM", + None, + None, + )); + } + + Ok(content_type.into()) + } + + /// Evict expired sessions and remove the oldest sessions until we're under the capacity. + fn evict(&mut self, now: SystemTime) { + // First remove all the entries which expired + self.sessions.retain(|_, session| !session.expired(now)); + + // Then we remove the oldest entires until we're under the limit + while self.sessions.len() > self.capacity { + self.sessions.pop_first(); + } + } +} + +#[pymethods] +impl RendezvousHandler { + #[new] + #[pyo3(signature = (homeserver, /, capacity=100, max_content_length=4*1024, eviction_interval=60*1000, ttl=60*1000))] + fn new( + py: Python<'_>, + homeserver: &PyAny, + capacity: usize, + max_content_length: u64, + eviction_interval: u64, + ttl: u64, + ) -> PyResult> { + let base: String = homeserver + .getattr("config")? + .getattr("server")? + .getattr("public_baseurl")? + .extract()?; + let base = Uri::try_from(format!("{base}_synapse/client/rendezvous")) + .map_err(|_| PyValueError::new_err("Invalid base URI"))?; + + let clock = homeserver.call_method0("get_clock")?.to_object(py); + + // Construct a Python object so that we can get a reference to the + // evict method and schedule it to run. + let self_ = Py::new( + py, + Self { + base, + clock, + sessions: BTreeMap::new(), + capacity, + max_content_length, + ttl: Duration::from_millis(ttl), + }, + )?; + + let evict = self_.getattr(py, "_evict")?; + homeserver.call_method0("get_clock")?.call_method( + "looping_call", + (evict, eviction_interval), + None, + )?; + + Ok(self_) + } + + fn _evict(&mut self, py: Python<'_>) -> PyResult<()> { + let clock = self.clock.as_ref(py); + let now: u64 = clock.call_method0("time_msec")?.extract()?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + self.evict(now); + + Ok(()) + } + + fn handle_post(&mut self, py: Python<'_>, twisted_request: &PyAny) -> PyResult<()> { + let request = http_request_from_twisted(twisted_request)?; + + let content_type = self.check_input_headers(request.headers())?; + + let clock = self.clock.as_ref(py); + let now: u64 = clock.call_method0("time_msec")?.extract()?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + + // We trigger an immediate eviction if we're at 2x the capacity + if self.sessions.len() >= self.capacity * 2 { + self.evict(now); + } + + // Generate a new ULID for the session from the current time. + let id = Ulid::from_datetime(now); + + let uri = format!("{base}/{id}", base = self.base); + + let body = request.into_body(); + + let session = Session::new(body, content_type, now, self.ttl); + + let response = serde_json::json!({ + "url": uri, + }) + .to_string(); + + let mut response = Response::new(response.as_bytes()); + *response.status_mut() = StatusCode::CREATED; + response.headers_mut().typed_insert(ContentType::json()); + prepare_headers(response.headers_mut(), &session); + http_response_to_twisted(twisted_request, response)?; + + self.sessions.insert(id, session); + + Ok(()) + } + + fn handle_get(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> { + let request = http_request_from_twisted(twisted_request)?; + + let if_none_match: Option = request.headers().typed_get_optional()?; + + let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + + let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; + let session = self + .sessions + .get(&id) + .filter(|s| !s.expired(now)) + .ok_or_else(NotFoundError::new)?; + + if let Some(if_none_match) = if_none_match { + if !if_none_match.precondition_passes(&session.etag()) { + let mut response = Response::new(Bytes::new()); + *response.status_mut() = StatusCode::NOT_MODIFIED; + prepare_headers(response.headers_mut(), session); + http_response_to_twisted(twisted_request, response)?; + return Ok(()); + } + } + + let mut response = Response::new(session.data()); + *response.status_mut() = StatusCode::OK; + let headers = response.headers_mut(); + prepare_headers(headers, session); + headers.typed_insert(session.content_type()); + headers.typed_insert(session.content_length()); + http_response_to_twisted(twisted_request, response)?; + + Ok(()) + } + + fn handle_put(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> { + let request = http_request_from_twisted(twisted_request)?; + + let content_type = self.check_input_headers(request.headers())?; + + let if_match: IfMatch = request.headers().typed_get_required()?; + + let data = request.into_body(); + + let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + + let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; + let session = self + .sessions + .get_mut(&id) + .filter(|s| !s.expired(now)) + .ok_or_else(NotFoundError::new)?; + + if !if_match.precondition_passes(&session.etag()) { + let mut headers = HeaderMap::new(); + prepare_headers(&mut headers, session); + + let mut additional_fields = HashMap::with_capacity(1); + additional_fields.insert( + String::from("org.matrix.msc4108.errcode"), + String::from("M_CONCURRENT_WRITE"), + ); + + return Err(SynapseError::new( + StatusCode::PRECONDITION_FAILED, + "ETag does not match".to_owned(), + "M_UNKNOWN", // Would be M_CONCURRENT_WRITE + Some(additional_fields), + Some(headers), + )); + } + + session.update(data, content_type, now); + + let mut response = Response::new(Bytes::new()); + *response.status_mut() = StatusCode::ACCEPTED; + prepare_headers(response.headers_mut(), session); + http_response_to_twisted(twisted_request, response)?; + + Ok(()) + } + + fn handle_delete(&mut self, twisted_request: &PyAny, id: &str) -> PyResult<()> { + let _request = http_request_from_twisted(twisted_request)?; + + let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; + let _session = self.sessions.remove(&id).ok_or_else(NotFoundError::new)?; + + let mut response = Response::new(Bytes::new()); + *response.status_mut() = StatusCode::NO_CONTENT; + response + .headers_mut() + .typed_insert(AccessControlAllowOrigin::ANY); + http_response_to_twisted(twisted_request, response)?; + + Ok(()) + } +} + +pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> { + let child_module = PyModule::new(py, "rendezvous")?; + + child_module.add_class::()?; + + m.add_submodule(child_module)?; + + // We need to manually add the module to sys.modules to make `from + // synapse.synapse_rust import rendezvous` work. + py.import("sys")? + .getattr("modules")? + .set_item("synapse.synapse_rust.rendezvous", child_module)?; + + Ok(()) +} diff --git a/rust/src/rendezvous/session.rs b/rust/src/rendezvous/session.rs new file mode 100644 index 0000000000..179304edfe --- /dev/null +++ b/rust/src/rendezvous/session.rs @@ -0,0 +1,91 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +use std::time::{Duration, SystemTime}; + +use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine as _}; +use bytes::Bytes; +use headers::{ContentLength, ContentType, ETag, Expires, LastModified}; +use mime::Mime; +use sha2::{Digest, Sha256}; + +/// A single session, containing data, metadata, and expiry information. +pub struct Session { + hash: [u8; 32], + data: Bytes, + content_type: Mime, + last_modified: SystemTime, + expires: SystemTime, +} + +impl Session { + /// Create a new session with the given data, content type, and time-to-live. + pub fn new(data: Bytes, content_type: Mime, now: SystemTime, ttl: Duration) -> Self { + let hash = Sha256::digest(&data).into(); + Self { + hash, + data, + content_type, + expires: now + ttl, + last_modified: now, + } + } + + /// Returns true if the session has expired at the given time. + pub fn expired(&self, now: SystemTime) -> bool { + self.expires <= now + } + + /// Update the session with new data, content type, and last modified time. + pub fn update(&mut self, data: Bytes, content_type: Mime, now: SystemTime) { + self.hash = Sha256::digest(&data).into(); + self.data = data; + self.content_type = content_type; + self.last_modified = now; + } + + /// Returns the Content-Type header of the session. + pub fn content_type(&self) -> ContentType { + self.content_type.clone().into() + } + + /// Returns the Content-Length header of the session. + pub fn content_length(&self) -> ContentLength { + ContentLength(self.data.len() as _) + } + + /// Returns the ETag header of the session. + pub fn etag(&self) -> ETag { + let encoded = URL_SAFE_NO_PAD.encode(self.hash); + // SAFETY: Base64 encoding is URL-safe, so ETag-safe + format!("\"{encoded}\"") + .parse() + .expect("base64-encoded hash should be URL-safe") + } + + /// Returns the Last-Modified header of the session. + pub fn last_modified(&self) -> LastModified { + self.last_modified.into() + } + + /// Returns the Expires header of the session. + pub fn expires(&self) -> Expires { + self.expires.into() + } + + /// Returns the current data stored in the session. + pub fn data(&self) -> Bytes { + self.data.clone() + } +} diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 353ae23f91..baa3580f29 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -413,12 +413,22 @@ class ExperimentalConfig(Config): ) # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code + self.msc4108_enabled = experimental.get("msc4108_enabled", False) + self.msc4108_delegation_endpoint: Optional[str] = experimental.get( "msc4108_delegation_endpoint", None ) - if self.msc4108_delegation_endpoint is not None and not self.msc3861.enabled: + if ( + self.msc4108_enabled or self.msc4108_delegation_endpoint is not None + ) and not self.msc3861.enabled: raise ConfigError( "MSC4108 requires MSC3861 to be enabled", ("experimental", "msc4108_delegation_endpoint"), ) + + if self.msc4108_delegation_endpoint is not None and self.msc4108_enabled: + raise ConfigError( + "You cannot have MSC4108 both enabled and delegated at the same time", + ("experimental", "msc4108_delegation_endpoint"), + ) diff --git a/synapse/http/server.py b/synapse/http/server.py index 45b2cbffcd..211795dc39 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -909,8 +909,9 @@ def set_cors_headers(request: "SynapseRequest") -> None: request.setHeader( b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS" ) - if request.path is not None and request.path.startswith( - b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous" + if request.path is not None and ( + request.path == b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous" + or request.path.startswith(b"/_synapse/client/rendezvous") ): request.setHeader( b"Access-Control-Allow-Headers", diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py index ed06a29987..143f057651 100644 --- a/synapse/rest/client/rendezvous.py +++ b/synapse/rest/client/rendezvous.py @@ -97,9 +97,25 @@ class MSC4108DelegationRendezvousServlet(RestServlet): ) +class MSC4108RendezvousServlet(RestServlet): + PATTERNS = client_patterns( + "/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True + ) + + def __init__(self, hs: "HomeServer") -> None: + super().__init__() + self._handler = hs.get_rendezvous_handler() + + def on_POST(self, request: SynapseRequest) -> None: + self._handler.handle_post(request) + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.experimental.msc3886_endpoint is not None: MSC3886RendezvousServlet(hs).register(http_server) + if hs.config.experimental.msc4108_enabled: + MSC4108RendezvousServlet(hs).register(http_server) + if hs.config.experimental.msc4108_delegation_endpoint is not None: MSC4108DelegationRendezvousServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 638d4c45ae..fa453a3b02 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -141,8 +141,13 @@ class VersionsRestServlet(RestServlet): # Allows clients to handle push for encrypted events. "org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events, # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code - "org.matrix.msc4108": self.config.experimental.msc4108_delegation_endpoint - is not None, + "org.matrix.msc4108": ( + self.config.experimental.msc4108_enabled + or ( + self.config.experimental.msc4108_delegation_endpoint + is not None + ) + ), }, }, ) diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index 31544867d4..ba6576d4db 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -26,6 +26,7 @@ from twisted.web.resource import Resource from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource from synapse.rest.synapse.client.pick_idp import PickIdpResource from synapse.rest.synapse.client.pick_username import pick_username_resource +from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource from synapse.rest.synapse.client.sso_register import SsoRegisterResource from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource @@ -76,6 +77,9 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc # To be removed in Synapse v1.32.0. resources["/_matrix/saml2"] = res + if hs.config.experimental.msc4108_enabled: + resources["/_synapse/client/rendezvous"] = MSC4108RendezvousSessionResource(hs) + return resources diff --git a/synapse/rest/synapse/client/rendezvous.py b/synapse/rest/synapse/client/rendezvous.py new file mode 100644 index 0000000000..5216d30d1f --- /dev/null +++ b/synapse/rest/synapse/client/rendezvous.py @@ -0,0 +1,58 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + +import logging +from typing import TYPE_CHECKING, List + +from synapse.api.errors import UnrecognizedRequestError +from synapse.http.server import DirectServeJsonResource +from synapse.http.site import SynapseRequest + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class MSC4108RendezvousSessionResource(DirectServeJsonResource): + isLeaf = True + + def __init__(self, hs: "HomeServer") -> None: + super().__init__() + self._handler = hs.get_rendezvous_handler() + + async def _async_render_GET(self, request: SynapseRequest) -> None: + postpath: List[bytes] = request.postpath # type: ignore + if len(postpath) != 1: + raise UnrecognizedRequestError() + session_id = postpath[0].decode("ascii") + + self._handler.handle_get(request, session_id) + + def _async_render_PUT(self, request: SynapseRequest) -> None: + postpath: List[bytes] = request.postpath # type: ignore + if len(postpath) != 1: + raise UnrecognizedRequestError() + session_id = postpath[0].decode("ascii") + + self._handler.handle_put(request, session_id) + + def _async_render_DELETE(self, request: SynapseRequest) -> None: + postpath: List[bytes] = request.postpath # type: ignore + if len(postpath) != 1: + raise UnrecognizedRequestError() + session_id = postpath[0].decode("ascii") + + self._handler.handle_delete(request, session_id) diff --git a/synapse/server.py b/synapse/server.py index 6d5a18fb1d..95e319d2e6 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -143,6 +143,7 @@ from synapse.state import StateHandler, StateResolutionHandler from synapse.storage import Databases from synapse.storage.controllers import StorageControllers from synapse.streams.events import EventSources +from synapse.synapse_rust.rendezvous import RendezvousHandler from synapse.types import DomainSpecificString, ISynapseReactor from synapse.util import Clock from synapse.util.distributor import Distributor @@ -859,6 +860,10 @@ class HomeServer(metaclass=abc.ABCMeta): def get_room_forgetter_handler(self) -> RoomForgetterHandler: return RoomForgetterHandler(self) + @cache_in_self + def get_rendezvous_handler(self) -> RendezvousHandler: + return RendezvousHandler(self) + @cache_in_self def get_outbound_redis_connection(self) -> "ConnectionHandler": """ diff --git a/synapse/synapse_rust/rendezvous.pyi b/synapse/synapse_rust/rendezvous.pyi new file mode 100644 index 0000000000..03eae3a196 --- /dev/null +++ b/synapse/synapse_rust/rendezvous.pyi @@ -0,0 +1,30 @@ +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . + +from twisted.web.iweb import IRequest + +from synapse.server import HomeServer + +class RendezvousHandler: + def __init__( + self, + homeserver: HomeServer, + /, + capacity: int = 100, + max_content_length: int = 4 * 1024, # MSC4108 specifies 4KB + eviction_interval: int = 60 * 1000, + ttl: int = 60 * 1000, + ) -> None: ... + def handle_post(self, request: IRequest) -> None: ... + def handle_get(self, request: IRequest, session_id: str) -> None: ... + def handle_put(self, request: IRequest, session_id: str) -> None: ... + def handle_delete(self, request: IRequest, session_id: str) -> None: ... diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index c84704c090..0ab754a11a 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -2,7 +2,7 @@ # This file is licensed under the Affero General Public License (AGPL) version 3. # # Copyright 2022 The Matrix.org Foundation C.I.C. -# Copyright (C) 2023 New Vector, Ltd +# Copyright (C) 2023-2024 New Vector, Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -19,9 +19,14 @@ # # +from typing import Dict +from urllib.parse import urlparse + from twisted.test.proto_helpers import MemoryReactor +from twisted.web.resource import Resource from synapse.rest.client import rendezvous +from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource from synapse.server import HomeServer from synapse.util import Clock @@ -42,6 +47,12 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): self.hs = self.setup_test_homeserver() return self.hs + def create_resource_dict(self) -> Dict[str, Resource]: + return { + **super().create_resource_dict(), + "/_synapse/client/rendezvous": MSC4108RendezvousSessionResource(self.hs), + } + def test_disabled(self) -> None: channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None) self.assertEqual(channel.code, 404) @@ -75,3 +86,391 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None) self.assertEqual(channel.code, 307) self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"]) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108(self) -> None: + """ + Test the MSC4108 rendezvous endpoint, including: + - Creating a session + - Getting the data back + - Updating the data + - Deleting the data + - ETag handling + """ + # We can post arbitrary data to the endpoint + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + self.assertSubstring("/_synapse/client/rendezvous/", channel.json_body["url"]) + headers = dict(channel.headers.getAllRawHeaders()) + self.assertIn(b"ETag", headers) + self.assertIn(b"Expires", headers) + self.assertEqual(headers[b"Content-Type"], [b"application/json"]) + self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"]) + self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"]) + self.assertEqual(headers[b"Cache-Control"], [b"no-store"]) + self.assertEqual(headers[b"Pragma"], [b"no-cache"]) + self.assertIn("url", channel.json_body) + self.assertTrue(channel.json_body["url"].startswith("https://")) + + url = urlparse(channel.json_body["url"]) + session_endpoint = url.path + etag = headers[b"ETag"][0] + + # We can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 200) + headers = dict(channel.headers.getAllRawHeaders()) + self.assertEqual(headers[b"ETag"], [etag]) + self.assertIn(b"Expires", headers) + self.assertEqual(headers[b"Content-Type"], [b"text/plain"]) + self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"]) + self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"]) + self.assertEqual(headers[b"Cache-Control"], [b"no-store"]) + self.assertEqual(headers[b"Pragma"], [b"no-cache"]) + self.assertEqual(channel.text_body, "foo=bar") + + # We can make sure the data hasn't changed + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + custom_headers=[("If-None-Match", etag)], + ) + + self.assertEqual(channel.code, 304) + + # We can update the data + channel = self.make_request( + "PUT", + session_endpoint, + "foo=baz", + content_type=b"text/plain", + access_token=None, + custom_headers=[("If-Match", etag)], + ) + + self.assertEqual(channel.code, 202) + headers = dict(channel.headers.getAllRawHeaders()) + old_etag = etag + new_etag = headers[b"ETag"][0] + + # If we try to update it again with the old etag, it should fail + channel = self.make_request( + "PUT", + session_endpoint, + "bar=baz", + content_type=b"text/plain", + access_token=None, + custom_headers=[("If-Match", old_etag)], + ) + + self.assertEqual(channel.code, 412) + self.assertEqual(channel.json_body["errcode"], "M_UNKNOWN") + self.assertEqual( + channel.json_body["org.matrix.msc4108.errcode"], "M_CONCURRENT_WRITE" + ) + + # If we try to get with the old etag, we should get the updated data + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + custom_headers=[("If-None-Match", old_etag)], + ) + + self.assertEqual(channel.code, 200) + headers = dict(channel.headers.getAllRawHeaders()) + self.assertEqual(headers[b"ETag"], [new_etag]) + self.assertEqual(channel.text_body, "foo=baz") + + # We can delete the data + channel = self.make_request( + "DELETE", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 204) + + # If we try to get the data again, it should fail + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 404) + self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND") + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_expiration(self) -> None: + """ + Test that entries are evicted after a TTL. + """ + # Start a new session + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + session_endpoint = urlparse(channel.json_body["url"]).path + + # Sanity check that we can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.text_body, "foo=bar") + + # Advance the clock, TTL of entries is 1 minute + self.reactor.advance(60) + + # Get the data back, it should be gone + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 404) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_capacity(self) -> None: + """ + Test that a capacity limit is enforced on the rendezvous sessions, as old + entries are evicted at an interval when the limit is reached. + """ + # Start a new session + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + session_endpoint = urlparse(channel.json_body["url"]).path + + # Sanity check that we can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.text_body, "foo=bar") + + # Start a lot of new sessions + for _ in range(100): + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + + # Get the data back, it should still be there, as the eviction hasn't run yet + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 200) + + # Advance the clock, as it will trigger the eviction + self.reactor.advance(1) + + # Get the data back, it should be gone + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_hard_capacity(self) -> None: + """ + Test that a hard capacity limit is enforced on the rendezvous sessions, as old + entries are evicted immediately when the limit is reached. + """ + # Start a new session + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + session_endpoint = urlparse(channel.json_body["url"]).path + # We advance the clock to make sure that this entry is the "lowest" in the session list + self.reactor.advance(1) + + # Sanity check that we can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.text_body, "foo=bar") + + # Start a lot of new sessions + for _ in range(200): + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + + # Get the data back, it should already be gone as we hit the hard limit + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 404) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_content_type(self) -> None: + """ + Test that the content-type is restricted to text/plain. + """ + # We cannot post invalid content-type arbitrary data to the endpoint + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_is_form=True, + access_token=None, + ) + self.assertEqual(channel.code, 400) + self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM") + + # Make a valid request + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + url = urlparse(channel.json_body["url"]) + session_endpoint = url.path + headers = dict(channel.headers.getAllRawHeaders()) + etag = headers[b"ETag"][0] + + # We can't update the data with invalid content-type + channel = self.make_request( + "PUT", + session_endpoint, + "foo=baz", + content_is_form=True, + access_token=None, + custom_headers=[("If-Match", etag)], + ) + self.assertEqual(channel.code, 400) + self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM") diff --git a/tests/server.py b/tests/server.py index 4aaa91e956..434be3d22c 100644 --- a/tests/server.py +++ b/tests/server.py @@ -351,6 +351,7 @@ def make_request( request: Type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, + content_type: Optional[bytes] = None, content_is_form: bool = False, await_result: bool = True, custom_headers: Optional[Iterable[CustomHeaderType]] = None, @@ -373,6 +374,8 @@ def make_request( with the usual REST API path, if it doesn't contain it. federation_auth_origin: if set to not-None, we will add a fake Authorization header pretenting to be the given server name. + content_type: The content-type to use for the request. If not set then will default to + application/json unless content_is_form is true. content_is_form: Whether the content is URL encoded form data. Adds the 'Content-Type': 'application/x-www-form-urlencoded' header. await_result: whether to wait for the request to complete rendering. If true, @@ -436,7 +439,9 @@ def make_request( ) if content: - if content_is_form: + if content_type is not None: + req.requestHeaders.addRawHeader(b"Content-Type", content_type) + elif content_is_form: req.requestHeaders.addRawHeader( b"Content-Type", b"application/x-www-form-urlencoded" ) diff --git a/tests/unittest.py b/tests/unittest.py index 6fe0cd4a2d..e6aad9ed40 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -523,6 +523,7 @@ class HomeserverTestCase(TestCase): request: Type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, + content_type: Optional[bytes] = None, content_is_form: bool = False, await_result: bool = True, custom_headers: Optional[Iterable[CustomHeaderType]] = None, @@ -541,6 +542,9 @@ class HomeserverTestCase(TestCase): with the usual REST API path, if it doesn't contain it. federation_auth_origin: if set to not-None, we will add a fake Authorization header pretenting to be the given server name. + + content_type: The content-type to use for the request. If not set then will default to + application/json unless content_is_form is true. content_is_form: Whether the content is URL encoded form data. Adds the 'Content-Type': 'application/x-www-form-urlencoded' header. @@ -566,6 +570,7 @@ class HomeserverTestCase(TestCase): request, shorthand, federation_auth_origin, + content_type, content_is_form, await_result, custom_headers, From 47773232b034c0d7b72bb7419a01e772509c8814 Mon Sep 17 00:00:00 2001 From: Till <2353100+S7evinK@users.noreply.github.com> Date: Thu, 25 Apr 2024 15:25:31 +0200 Subject: [PATCH 03/19] Redact membership events if the user requested erasure upon deactivating (#17076) Fixes #15355 by redacting all membership events before leaving rooms. --- changelog.d/17076.bugfix | 1 + synapse/handlers/deactivate_account.py | 13 ++++++- synapse/storage/databases/main/roommember.py | 22 ++++++++++++ tests/handlers/test_deactivate_account.py | 37 ++++++++++++++++++++ 4 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17076.bugfix diff --git a/changelog.d/17076.bugfix b/changelog.d/17076.bugfix new file mode 100644 index 0000000000..a111ea2b88 --- /dev/null +++ b/changelog.d/17076.bugfix @@ -0,0 +1 @@ +Redact membership events if the user requested erasure upon deactivating. \ No newline at end of file diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index b13c4b6cb9..11ac377680 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -261,11 +261,22 @@ class DeactivateAccountHandler: user = UserID.from_string(user_id) rooms_for_user = await self.store.get_rooms_for_user(user_id) + requester = create_requester(user, authenticated_entity=self._server_name) + should_erase = await self.store.is_user_erased(user_id) + for room_id in rooms_for_user: logger.info("User parter parting %r from %r", user_id, room_id) try: + # Before parting the user, redact all membership events if requested + if should_erase: + event_ids = await self.store.get_membership_event_ids_for_user( + user_id, room_id + ) + for event_id in event_ids: + await self.store.expire_event(event_id) + await self._room_member_handler.update_membership( - create_requester(user, authenticated_entity=self._server_name), + requester, user, room_id, "leave", diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 5d51502595..9fddbb2caf 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1234,6 +1234,28 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore): return set(room_ids) + async def get_membership_event_ids_for_user( + self, user_id: str, room_id: str + ) -> Set[str]: + """Get all event_ids for the given user and room. + + Args: + user_id: The user ID to get the event IDs for. + room_id: The room ID to look up events for. + + Returns: + Set of event IDs + """ + + event_ids = await self.db_pool.simple_select_onecol( + table="room_memberships", + keyvalues={"user_id": user_id, "room_id": room_id}, + retcol="event_id", + desc="get_membership_event_ids_for_user", + ) + + return set(event_ids) + @cached(max_entries=5000) async def _get_membership_from_event_id( self, member_event_id: str diff --git a/tests/handlers/test_deactivate_account.py b/tests/handlers/test_deactivate_account.py index b3f9e50f0f..c698771a06 100644 --- a/tests/handlers/test_deactivate_account.py +++ b/tests/handlers/test_deactivate_account.py @@ -424,3 +424,40 @@ class DeactivateAccountTestCase(HomeserverTestCase): self._store.get_knocked_at_rooms_for_local_user(self.user) ) self.assertEqual(len(after_deactivate_knocks), 0) + + def test_membership_is_redacted_upon_deactivation(self) -> None: + """ + Tests that room membership events are redacted if erasure is requested. + """ + # Create a room + room_id = self.helper.create_room_as( + self.user, + is_public=True, + tok=self.token, + ) + + # Change the displayname + membership_event, _ = self.get_success( + self.handler.update_membership( + requester=create_requester(self.user), + target=UserID.from_string(self.user), + room_id=room_id, + action=Membership.JOIN, + content={"displayname": "Hello World!"}, + ) + ) + + # Deactivate the account + self._deactivate_my_account() + + # Get the all membership event IDs + membership_event_ids = self.get_success( + self._store.get_membership_event_ids_for_user(self.user, room_id=room_id) + ) + + # Get the events incl. JSON + events = self.get_success(self._store.get_events_as_list(membership_event_ids)) + + # Validate that there is no displayname in any of the events + for event in events: + self.assertTrue("displayname" not in event.content) From 48a90c697b7d6faf1d44273dfe5c4e76467a0bc4 Mon Sep 17 00:00:00 2001 From: Olivier 'reivilibre Date: Thu, 25 Apr 2024 15:55:18 +0100 Subject: [PATCH 04/19] 1.106.0rc1 --- CHANGES.md | 42 +++++++++++++++++++++++++++++++++++++++ changelog.d/16819.feature | 1 - changelog.d/16920.bugfix | 1 - changelog.d/16923.bugfix | 1 - changelog.d/16943.bugfix | 1 - changelog.d/17032.misc | 1 - changelog.d/17036.misc | 1 - changelog.d/17056.feature | 1 - changelog.d/17069.doc | 1 - changelog.d/17076.bugfix | 1 - changelog.d/17079.misc | 1 - changelog.d/17081.misc | 1 - changelog.d/17086.feature | 1 - changelog.d/17096.misc | 1 - changelog.d/17099.doc | 1 - changelog.d/17125.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 18 files changed, 49 insertions(+), 16 deletions(-) delete mode 100644 changelog.d/16819.feature delete mode 100644 changelog.d/16920.bugfix delete mode 100644 changelog.d/16923.bugfix delete mode 100644 changelog.d/16943.bugfix delete mode 100644 changelog.d/17032.misc delete mode 100644 changelog.d/17036.misc delete mode 100644 changelog.d/17056.feature delete mode 100644 changelog.d/17069.doc delete mode 100644 changelog.d/17076.bugfix delete mode 100644 changelog.d/17079.misc delete mode 100644 changelog.d/17081.misc delete mode 100644 changelog.d/17086.feature delete mode 100644 changelog.d/17096.misc delete mode 100644 changelog.d/17099.doc delete mode 100644 changelog.d/17125.misc diff --git a/CHANGES.md b/CHANGES.md index ec5bc22a98..913e6fbc8c 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,45 @@ +# Synapse 1.106.0rc1 (2024-04-25) + +### Features + +- Send an email if the address is already bound to an user account. ([\#16819](https://github.com/element-hq/synapse/issues/16819)) +- Implement the rendezvous mechanism described by MSC4108. ([\#17056](https://github.com/element-hq/synapse/issues/17056)) +- Support delegating the rendezvous mechanism described MSC4108 to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086)) + +### Bugfixes + +- Add validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. ([\#16920](https://github.com/element-hq/synapse/issues/16920)) +- Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. ([\#16923](https://github.com/element-hq/synapse/issues/16923)) +- Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. ([\#16943](https://github.com/element-hq/synapse/issues/16943)) +- Redact membership events if the user requested erasure upon deactivating. ([\#17076](https://github.com/element-hq/synapse/issues/17076)) + +### Improved Documentation + +- Add a prompt in the contributing guide to manually configure icu4c. ([\#17069](https://github.com/element-hq/synapse/issues/17069)) +- Clarify what part of message retention is still experimental. ([\#17099](https://github.com/element-hq/synapse/issues/17099)) + +### Internal Changes + +- Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). ([\#17032](https://github.com/element-hq/synapse/issues/17032), [\#17096](https://github.com/element-hq/synapse/issues/17096)) +- Fix mypy with latest Twisted release. ([\#17036](https://github.com/element-hq/synapse/issues/17036)) +- Bump minimum supported Rust version to 1.66.0. ([\#17079](https://github.com/element-hq/synapse/issues/17079)) +- Add helpers to transform Twisted requests to Rust http Requests/Responses. ([\#17081](https://github.com/element-hq/synapse/issues/17081)) +- Fix type annotation for `visited_chains` after `mypy` upgrade. ([\#17125](https://github.com/element-hq/synapse/issues/17125)) + + + +### Updates to locked dependencies + +* Bump anyhow from 1.0.81 to 1.0.82. ([\#17095](https://github.com/element-hq/synapse/issues/17095)) +* Bump peaceiris/actions-gh-pages from 3.9.3 to 4.0.0. ([\#17087](https://github.com/element-hq/synapse/issues/17087)) +* Bump peaceiris/actions-mdbook from 1.2.0 to 2.0.0. ([\#17089](https://github.com/element-hq/synapse/issues/17089)) +* Bump pyasn1-modules from 0.3.0 to 0.4.0. ([\#17093](https://github.com/element-hq/synapse/issues/17093)) +* Bump pygithub from 2.2.0 to 2.3.0. ([\#17092](https://github.com/element-hq/synapse/issues/17092)) +* Bump ruff from 0.3.5 to 0.3.7. ([\#17094](https://github.com/element-hq/synapse/issues/17094)) +* Bump sigstore/cosign-installer from 3.4.0 to 3.5.0. ([\#17088](https://github.com/element-hq/synapse/issues/17088)) +* Bump twine from 4.0.2 to 5.0.0. ([\#17091](https://github.com/element-hq/synapse/issues/17091)) +* Bump types-pillow from 10.2.0.20240406 to 10.2.0.20240415. ([\#17090](https://github.com/element-hq/synapse/issues/17090)) + # Synapse 1.105.1 (2024-04-23) ## Security advisory diff --git a/changelog.d/16819.feature b/changelog.d/16819.feature deleted file mode 100644 index 1af6f466b7..0000000000 --- a/changelog.d/16819.feature +++ /dev/null @@ -1 +0,0 @@ -Send an email if the address is already bound to an user account. diff --git a/changelog.d/16920.bugfix b/changelog.d/16920.bugfix deleted file mode 100644 index 460f4f7160..0000000000 --- a/changelog.d/16920.bugfix +++ /dev/null @@ -1 +0,0 @@ -Adds validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. diff --git a/changelog.d/16923.bugfix b/changelog.d/16923.bugfix deleted file mode 100644 index bd6f24925e..0000000000 --- a/changelog.d/16923.bugfix +++ /dev/null @@ -1 +0,0 @@ -Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. \ No newline at end of file diff --git a/changelog.d/16943.bugfix b/changelog.d/16943.bugfix deleted file mode 100644 index 4360741132..0000000000 --- a/changelog.d/16943.bugfix +++ /dev/null @@ -1 +0,0 @@ -Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. \ No newline at end of file diff --git a/changelog.d/17032.misc b/changelog.d/17032.misc deleted file mode 100644 index b03f6f42e5..0000000000 --- a/changelog.d/17032.misc +++ /dev/null @@ -1 +0,0 @@ -Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/17036.misc b/changelog.d/17036.misc deleted file mode 100644 index 3296668059..0000000000 --- a/changelog.d/17036.misc +++ /dev/null @@ -1 +0,0 @@ -Fix mypy with latest Twisted release. diff --git a/changelog.d/17056.feature b/changelog.d/17056.feature deleted file mode 100644 index b4cbe849e4..0000000000 --- a/changelog.d/17056.feature +++ /dev/null @@ -1 +0,0 @@ -Implement the rendezvous mechanism described by MSC4108. diff --git a/changelog.d/17069.doc b/changelog.d/17069.doc deleted file mode 100644 index f5a7f599d1..0000000000 --- a/changelog.d/17069.doc +++ /dev/null @@ -1 +0,0 @@ -Add a prompt in the contributing guide to manually configure icu4c. diff --git a/changelog.d/17076.bugfix b/changelog.d/17076.bugfix deleted file mode 100644 index a111ea2b88..0000000000 --- a/changelog.d/17076.bugfix +++ /dev/null @@ -1 +0,0 @@ -Redact membership events if the user requested erasure upon deactivating. \ No newline at end of file diff --git a/changelog.d/17079.misc b/changelog.d/17079.misc deleted file mode 100644 index 340e40d194..0000000000 --- a/changelog.d/17079.misc +++ /dev/null @@ -1 +0,0 @@ -Bump minimum supported Rust version to 1.66.0. diff --git a/changelog.d/17081.misc b/changelog.d/17081.misc deleted file mode 100644 index d1ab69126c..0000000000 --- a/changelog.d/17081.misc +++ /dev/null @@ -1 +0,0 @@ -Add helpers to transform Twisted requests to Rust http Requests/Responses. diff --git a/changelog.d/17086.feature b/changelog.d/17086.feature deleted file mode 100644 index 08b407d316..0000000000 --- a/changelog.d/17086.feature +++ /dev/null @@ -1 +0,0 @@ -Support delegating the rendezvous mechanism described MSC4108 to an external implementation. diff --git a/changelog.d/17096.misc b/changelog.d/17096.misc deleted file mode 100644 index b03f6f42e5..0000000000 --- a/changelog.d/17096.misc +++ /dev/null @@ -1 +0,0 @@ -Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/17099.doc b/changelog.d/17099.doc deleted file mode 100644 index d8d10fa53a..0000000000 --- a/changelog.d/17099.doc +++ /dev/null @@ -1 +0,0 @@ -Clarify what part of message retention is still experimental. diff --git a/changelog.d/17125.misc b/changelog.d/17125.misc deleted file mode 100644 index a7d9ce6491..0000000000 --- a/changelog.d/17125.misc +++ /dev/null @@ -1 +0,0 @@ -Fix type annotation for `visited_chains` after `mypy` upgrade. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 214ed59426..de912c2ac8 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.106.0~rc1) stable; urgency=medium + + * New Synapse release 1.106.0rc1. + + -- Synapse Packaging team Thu, 25 Apr 2024 15:54:59 +0100 + matrix-synapse-py3 (1.105.1) stable; urgency=medium * New Synapse release 1.105.1. diff --git a/pyproject.toml b/pyproject.toml index ed0f5ef4ba..5e47a46cd7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.105.1" +version = "1.106.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 30c50e024075f7046baa5465d27a1c490b54dc21 Mon Sep 17 00:00:00 2001 From: Olivier 'reivilibre Date: Thu, 25 Apr 2024 16:00:37 +0100 Subject: [PATCH 05/19] Tweak changelog --- CHANGES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 913e6fbc8c..451581fa63 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -3,8 +3,8 @@ ### Features - Send an email if the address is already bound to an user account. ([\#16819](https://github.com/element-hq/synapse/issues/16819)) -- Implement the rendezvous mechanism described by MSC4108. ([\#17056](https://github.com/element-hq/synapse/issues/17056)) -- Support delegating the rendezvous mechanism described MSC4108 to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086)) +- Implement the rendezvous mechanism described by [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108). ([\#17056](https://github.com/element-hq/synapse/issues/17056)) +- Support delegating the rendezvous mechanism described [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108) to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086)) ### Bugfixes From 922656fc77e03b50daf09f91af5b4d67879c896a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:36:21 +0100 Subject: [PATCH 06/19] Bump phonenumbers from 8.13.29 to 8.13.35 (#17106) Bumps [phonenumbers](https://github.com/daviddrysdale/python-phonenumbers) from 8.13.29 to 8.13.35.
Commits
  • 9369ff4 Prep for 8.13.35 release
  • 2e1e133 Generated files for metadata
  • 25a306f Merge metadata changes from upstream 8.13.35
  • 7105292 Prep for 8.13.34 release
  • e7b328d Generated files for metadata
  • 315eb10 Merge metadata changes from upstream 8.13.34
  • 29dab75 Prep for 8.13.33 release
  • f5b9401 Generated files for metadata
  • aa21158 Merge metadata changes from upstream 8.13.33
  • 92c242c Prep for 8.13.32 release
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=phonenumbers&package-manager=pip&previous-version=8.13.29&new-version=8.13.35)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d916c627a0..fe56051238 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1638,13 +1638,13 @@ files = [ [[package]] name = "phonenumbers" -version = "8.13.29" +version = "8.13.35" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.29-py2.py3-none-any.whl", hash = "sha256:9d7863dc8a37e8127f3c9dde65be93a5b46649b779184f8b0a85bdd043b0b293"}, - {file = "phonenumbers-8.13.29.tar.gz", hash = "sha256:a6c85b53e28410aba2f312255cc8015f384a43e7e241ffb84ca5cde80f094cdf"}, + {file = "phonenumbers-8.13.35-py2.py3-none-any.whl", hash = "sha256:58286a8e617bd75f541e04313b28c36398be6d4443a778c85e9617a93c391310"}, + {file = "phonenumbers-8.13.35.tar.gz", hash = "sha256:64f061a967dcdae11e1c59f3688649e697b897110a33bb74d5a69c3e35321245"}, ] [[package]] From 31664455142ac38bacd4c03958b5f418ac24c8b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:36:47 +0100 Subject: [PATCH 07/19] Bump pydantic from 2.6.4 to 2.7.0 (#17107) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.6.4 to 2.7.0.
Release notes

Sourced from pydantic's releases.

v2.7.0 (2024-04-11)

The code released in v2.7.0 is practically identical to that of v2.7.0b1.

What's Changed

Packaging

New Features

Finalized in v2.7.0, rather than v2.7.0b1:

  • Add support for field level number to str coercion option by @​NeevCohen in #9137
  • Update warnings parameter for serialization utilities to allow raising a warning by @​Lance-Drane in #9166

Changes

Performance

... (truncated)

Changelog

Sourced from pydantic's changelog.

v2.7.0 (2024-04-11)

GitHub release

The code released in v2.7.0 is practically identical to that of v2.7.0b1.

What's Changed

Packaging

New Features

Finalized in v2.7.0, rather than v2.7.0b1:

  • Add support for field level number to str coercion option by @​NeevCohen in #9137
  • Update warnings parameter for serialization utilities to allow raising a warning by @​Lance-Drane in #9166

Changes

Performance

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.6.4&new-version=2.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 170 ++++++++++++++++++++++++++-------------------------- 1 file changed, 85 insertions(+), 85 deletions(-) diff --git a/poetry.lock b/poetry.lock index fe56051238..4bb9ab590e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1873,18 +1873,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.4" +version = "2.7.0" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, + {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, + {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.18.1" typing-extensions = ">=4.6.1" [package.extras] @@ -1892,90 +1892,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.18.1" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, + {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, + {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, + {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, + {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, + {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, + {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, + {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, + {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, + {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, + {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, + {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, ] [package.dependencies] From 0d4d00a07ce5d1b7b538d16744558a85cc142ff3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:39:30 +0100 Subject: [PATCH 08/19] Bump pyicu from 2.12 to 2.13 (#17109) [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pyicu&package-manager=pip&previous-version=2.12&new-version=2.13)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4bb9ab590e..953e9fe4c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2016,12 +2016,12 @@ plugins = ["importlib-metadata"] [[package]] name = "pyicu" -version = "2.12" +version = "2.13" description = "Python extension wrapping the ICU C++ API" optional = true python-versions = "*" files = [ - {file = "PyICU-2.12.tar.gz", hash = "sha256:bd7ab5efa93ad692e6daa29cd249364e521218329221726a113ca3cb281c8611"}, + {file = "PyICU-2.13.tar.gz", hash = "sha256:d481be888975df3097c2790241bbe8518f65c9676a74957cdbe790e559c828f6"}, ] [[package]] From 947e8a6cb04d3b9cfa75fa7b6623c03fc7fe0e89 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:39:36 +0100 Subject: [PATCH 09/19] Bump types-bleach from 6.1.0.1 to 6.1.0.20240331 (#17110) Bumps [types-bleach](https://github.com/python/typeshed) from 6.1.0.1 to 6.1.0.20240331.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=types-bleach&package-manager=pip&previous-version=6.1.0.1&new-version=6.1.0.20240331)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 953e9fe4c8..cfe0045afc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3051,15 +3051,18 @@ twisted = "*" [[package]] name = "types-bleach" -version = "6.1.0.1" +version = "6.1.0.20240331" description = "Typing stubs for bleach" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-bleach-6.1.0.1.tar.gz", hash = "sha256:1e43c437e734a90efe4f40ebfe831057599568d3b275939ffbd6094848a18a27"}, - {file = "types_bleach-6.1.0.1-py3-none-any.whl", hash = "sha256:f83f80e0709f13d809a9c79b958a1089df9b99e68059287beb196e38967e4ddf"}, + {file = "types-bleach-6.1.0.20240331.tar.gz", hash = "sha256:2ee858a84fb06fc2225ff56ba2f7f6c88b65638659efae0d7bfd6b24a1b5a524"}, + {file = "types_bleach-6.1.0.20240331-py3-none-any.whl", hash = "sha256:399bc59bfd20a36a56595f13f805e56c8a08e5a5c07903e5cf6fafb5a5107dd4"}, ] +[package.dependencies] +types-html5lib = "*" + [[package]] name = "types-commonmark" version = "0.9.2.20240106" @@ -3071,6 +3074,17 @@ files = [ {file = "types_commonmark-0.9.2.20240106-py3-none-any.whl", hash = "sha256:606d9de1e3a96cab0b1c0b6cccf4df099116148d1d864d115fde2e27ad6877c3"}, ] +[[package]] +name = "types-html5lib" +version = "1.1.11.20240228" +description = "Typing stubs for html5lib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-html5lib-1.1.11.20240228.tar.gz", hash = "sha256:22736b7299e605ec4ba539d48691e905fd0c61c3ea610acc59922232dc84cede"}, + {file = "types_html5lib-1.1.11.20240228-py3-none-any.whl", hash = "sha256:af5de0125cb0fe5667543b158db83849b22e25c0e36c9149836b095548bf1020"}, +] + [[package]] name = "types-jsonschema" version = "4.21.0.20240311" From 31742149d42578360ebc828fc8b8c290f6894831 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:39:49 +0100 Subject: [PATCH 10/19] Bump serde from 1.0.197 to 1.0.198 (#17111) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [serde](https://github.com/serde-rs/serde) from 1.0.197 to 1.0.198.
Release notes

Sourced from serde's releases.

v1.0.198

Commits
  • c4fb923 Release 1.0.198
  • 65b7eea Merge pull request #2729 from dtolnay/saturating
  • 01cd696 Integrate Saturating<T> deserialization into impl_deserialize_num macro
  • c13b3f7 Format PR 2709
  • a6571ee Merge pull request #2709 from jbethune/master
  • 6e38aff Revert "Temporarily disable miri on doctests"
  • 3d1b19e Implement Ser+De for Saturating\<T>
  • 5b24f88 Resolve legacy_numeric_constants clippy lints
  • 74d0670 Explicitly install a Rust toolchain for cargo-outdated job
  • 3bfab6e Temporarily disable miri on doctests
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=serde&package-manager=cargo&previous-version=1.0.197&new-version=1.0.198)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4474dfb903..a454b6eff5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -485,18 +485,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.197" +version = "1.0.198" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.198" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9" dependencies = [ "proc-macro2", "quote", From 9985aa682193d7af989f1582e379a8cbf8f5b1bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:39:57 +0100 Subject: [PATCH 11/19] Bump serde_json from 1.0.115 to 1.0.116 (#17112) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.115 to 1.0.116.
Release notes

Sourced from serde_json's releases.

v1.0.116

Commits
  • a3f62bb Release 1.0.116
  • 12c8ee0 Hide "non-exhaustive patterns" errors when crate fails to compile
  • 051ce97 Merge pull request 1124 from mleonhard/master
  • 25dc750 Replace features_check mod with a call to std::compile_error!. Fixes htt...
  • 2e15e3d Revert "Temporarily disable miri on doctests"
  • 0baba28 Resolve legacy_numeric_constants clippy lints
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=serde_json&package-manager=cargo&previous-version=1.0.115&new-version=1.0.116)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a454b6eff5..8cbcdbd470 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -505,9 +505,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ "itoa", "ryu", From 59710437e4a885252de5e5555fbcf42d223b092c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melvyn=20La=C3=AFly?= Date: Fri, 26 Apr 2024 10:43:52 +0200 Subject: [PATCH 12/19] Return the search terms as search highlights for SQLite instead of nothing (#17000) Fixes https://github.com/element-hq/synapse/issues/16999 and https://github.com/element-hq/element-android/pull/8729 by returning the search terms as search highlights. --- changelog.d/17000.bugfix | 1 + synapse/storage/databases/main/search.py | 31 ++++++++++++++++++------ tests/storage/test_room_search.py | 13 +++++----- 3 files changed, 31 insertions(+), 14 deletions(-) create mode 100644 changelog.d/17000.bugfix diff --git a/changelog.d/17000.bugfix b/changelog.d/17000.bugfix new file mode 100644 index 0000000000..86b21c9615 --- /dev/null +++ b/changelog.d/17000.bugfix @@ -0,0 +1 @@ +Fixed search feature of Element Android on homesevers using SQLite by returning search terms as search highlights. \ No newline at end of file diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py index 4a0afb50ac..20fcfd3122 100644 --- a/synapse/storage/databases/main/search.py +++ b/synapse/storage/databases/main/search.py @@ -470,6 +470,8 @@ class SearchStore(SearchBackgroundUpdateStore): count_args = args count_clauses = clauses + sqlite_highlights: List[str] = [] + if isinstance(self.database_engine, PostgresEngine): search_query = search_term sql = """ @@ -486,7 +488,7 @@ class SearchStore(SearchBackgroundUpdateStore): """ count_args = [search_query] + count_args elif isinstance(self.database_engine, Sqlite3Engine): - search_query = _parse_query_for_sqlite(search_term) + search_query, sqlite_highlights = _parse_query_for_sqlite(search_term) sql = """ SELECT rank(matchinfo(event_search)) as rank, room_id, event_id @@ -531,9 +533,11 @@ class SearchStore(SearchBackgroundUpdateStore): event_map = {ev.event_id: ev for ev in events} - highlights = None + highlights: Collection[str] = [] if isinstance(self.database_engine, PostgresEngine): highlights = await self._find_highlights_in_postgres(search_query, events) + else: + highlights = sqlite_highlights count_sql += " GROUP BY room_id" @@ -597,6 +601,8 @@ class SearchStore(SearchBackgroundUpdateStore): count_args = list(args) count_clauses = list(clauses) + sqlite_highlights: List[str] = [] + if pagination_token: try: origin_server_ts_str, stream_str = pagination_token.split(",") @@ -647,7 +653,7 @@ class SearchStore(SearchBackgroundUpdateStore): CROSS JOIN events USING (event_id) WHERE """ - search_query = _parse_query_for_sqlite(search_term) + search_query, sqlite_highlights = _parse_query_for_sqlite(search_term) args = [search_query] + args count_sql = """ @@ -694,9 +700,11 @@ class SearchStore(SearchBackgroundUpdateStore): event_map = {ev.event_id: ev for ev in events} - highlights = None + highlights: Collection[str] = [] if isinstance(self.database_engine, PostgresEngine): highlights = await self._find_highlights_in_postgres(search_query, events) + else: + highlights = sqlite_highlights count_sql += " GROUP BY room_id" @@ -892,19 +900,25 @@ def _tokenize_query(query: str) -> TokenList: return tokens -def _tokens_to_sqlite_match_query(tokens: TokenList) -> str: +def _tokens_to_sqlite_match_query(tokens: TokenList) -> Tuple[str, List[str]]: """ Convert the list of tokens to a string suitable for passing to sqlite's MATCH. Assume sqlite was compiled with enhanced query syntax. + Returns the sqlite-formatted query string and the tokenized search terms + that can be used as highlights. + Ref: https://www.sqlite.org/fts3.html#full_text_index_queries """ match_query = [] + highlights = [] for token in tokens: if isinstance(token, str): match_query.append(token) + highlights.append(token) elif isinstance(token, Phrase): match_query.append('"' + " ".join(token.phrase) + '"') + highlights.append(" ".join(token.phrase)) elif token == SearchToken.Not: # TODO: SQLite treats NOT as a *binary* operator. Hopefully a search # term has already been added before this. @@ -916,11 +930,14 @@ def _tokens_to_sqlite_match_query(tokens: TokenList) -> str: else: raise ValueError(f"unknown token {token}") - return "".join(match_query) + return "".join(match_query), highlights -def _parse_query_for_sqlite(search_term: str) -> str: +def _parse_query_for_sqlite(search_term: str) -> Tuple[str, List[str]]: """Takes a plain unicode string from the user and converts it into a form that can be passed to sqllite's matchinfo(). + + Returns the converted query string and the tokenized search terms + that can be used as highlights. """ return _tokens_to_sqlite_match_query(_tokenize_query(search_term)) diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index 1eab89f140..340642b7e7 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -71,17 +71,16 @@ class EventSearchInsertionTest(HomeserverTestCase): store.search_msgs([room_id], "hi bob", ["content.body"]) ) self.assertEqual(result.get("count"), 1) - if isinstance(store.database_engine, PostgresEngine): - self.assertIn("hi", result.get("highlights")) - self.assertIn("bob", result.get("highlights")) + self.assertIn("hi", result.get("highlights")) + self.assertIn("bob", result.get("highlights")) # Check that search works for an unrelated message result = self.get_success( store.search_msgs([room_id], "another", ["content.body"]) ) self.assertEqual(result.get("count"), 1) - if isinstance(store.database_engine, PostgresEngine): - self.assertIn("another", result.get("highlights")) + + self.assertIn("another", result.get("highlights")) # Check that search works for a search term that overlaps with the message # containing a null byte and an unrelated message. @@ -90,8 +89,8 @@ class EventSearchInsertionTest(HomeserverTestCase): result = self.get_success( store.search_msgs([room_id], "hi alice", ["content.body"]) ) - if isinstance(store.database_engine, PostgresEngine): - self.assertIn("alice", result.get("highlights")) + + self.assertIn("alice", result.get("highlights")) def test_non_string(self) -> None: """Test that non-string `value`s are not inserted into `event_search`. From 0ef2315a99859217e319e4cb5a29d88a054952ff Mon Sep 17 00:00:00 2001 From: "Amanda H. L. de Andrade Katz" Date: Fri, 26 Apr 2024 05:44:54 -0300 Subject: [PATCH 13/19] Update event_cache_size and global_factor configurations documentation (#17071) ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/17071.doc | 1 + docs/usage/configuration/config_documentation.md | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 changelog.d/17071.doc diff --git a/changelog.d/17071.doc b/changelog.d/17071.doc new file mode 100644 index 0000000000..28773414d8 --- /dev/null +++ b/changelog.d/17071.doc @@ -0,0 +1 @@ +Update event_cache_size and global_factor configurations documentation. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 985f90c8a1..bcd53145f1 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1317,6 +1317,12 @@ Options related to caching. The number of events to cache in memory. Defaults to 10K. Like other caches, this is affected by `caches.global_factor` (see below). +For example, the default is 10K and the global_factor default is 0.5. + +Since 10K * 0.5 is 5K then the event cache size will be 5K. + +The cache affected by this configuration is named as "*getEvent*". + Note that this option is not part of the `caches` section. Example configuration: @@ -1342,6 +1348,8 @@ number of entries that can be stored. Defaults to 0.5, which will halve the size of all caches. + Note that changing this value also affects the HTTP connection pool. + * `per_cache_factors`: A dictionary of cache name to cache factor for that individual cache. Overrides the global cache factor for a given cache. From 516fd891eeb3cade255298a2239ca607bfbec16a Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Fri, 26 Apr 2024 17:46:42 +0900 Subject: [PATCH 14/19] Use recommended endpoint for MSC3266 requests (#17078) Keep the existing endpoint for backwards compatibility Signed-off-by: Andrew Ferrazzutti --- changelog.d/17078.bugfix | 1 + docs/workers.md | 2 +- synapse/rest/client/room.py | 6 ++++++ 3 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17078.bugfix diff --git a/changelog.d/17078.bugfix b/changelog.d/17078.bugfix new file mode 100644 index 0000000000..286a772a1e --- /dev/null +++ b/changelog.d/17078.bugfix @@ -0,0 +1 @@ +For MSC3266 room summaries, support queries at the recommended endpoint of `/_matrix/client/unstable/im.nheko.summary/summary/{roomIdOrAlias}`. The existing endpoint of `/_matrix/client/unstable/im.nheko.summary/rooms/{roomIdOrAlias}/summary` is deprecated. diff --git a/docs/workers.md b/docs/workers.md index ab9c1db86b..9a0cc9f2f4 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -232,7 +232,7 @@ information. ^/_matrix/client/v1/rooms/.*/hierarchy$ ^/_matrix/client/(v1|unstable)/rooms/.*/relations/ ^/_matrix/client/v1/rooms/.*/threads$ - ^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$ + ^/_matrix/client/unstable/im.nheko.summary/summary/.*$ ^/_matrix/client/(r0|v3|unstable)/account/3pid$ ^/_matrix/client/(r0|v3|unstable)/account/whoami$ ^/_matrix/client/(r0|v3|unstable)/devices$ diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index e4c7dd1a58..fb4d44211e 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -1442,10 +1442,16 @@ class RoomHierarchyRestServlet(RestServlet): class RoomSummaryRestServlet(ResolveRoomIdMixin, RestServlet): PATTERNS = ( + # deprecated endpoint, to be removed re.compile( "^/_matrix/client/unstable/im.nheko.summary" "/rooms/(?P[^/]*)/summary$" ), + # recommended endpoint + re.compile( + "^/_matrix/client/unstable/im.nheko.summary" + "/summary/(?P[^/]*)$" + ), ) CATEGORY = "Client API requests" From 90cc9e5b29bc6c2433df5e62da1ad8bcb83ac038 Mon Sep 17 00:00:00 2001 From: "Amanda H. L. de Andrade Katz" Date: Fri, 26 Apr 2024 05:52:58 -0300 Subject: [PATCH 15/19] Rephrase enable_notifs configuration (#17116) --- changelog.d/17116.doc | 1 + docs/usage/configuration/config_documentation.md | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/17116.doc diff --git a/changelog.d/17116.doc b/changelog.d/17116.doc new file mode 100644 index 0000000000..8712737c05 --- /dev/null +++ b/changelog.d/17116.doc @@ -0,0 +1 @@ +Update enable_notifs configuration documentation. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index bcd53145f1..0c582d0387 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -676,8 +676,8 @@ This setting has the following sub-options: trailing 's'. * `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email subjects. It defaults to 'Matrix'. -* `enable_notifs`: Set to true to enable sending emails for messages that the user - has missed. Disabled by default. +* `enable_notifs`: Set to true to allow users to receive e-mail notifications. If this is not set, + users can configure e-mail notifications but will not receive them. Disabled by default. * `notif_for_new_users`: Set to false to disable automatic subscription to email notifications for new users. Enabled by default. * `notif_delay_before_mail`: The time to wait before emailing about a notification. From 41fbe387d64c3b47202926711bb574d4a7b74d47 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Fri, 26 Apr 2024 09:54:30 +0100 Subject: [PATCH 16/19] Improve error message for cross signing reset with MSC3861 enabled (#17121) --- changelog.d/17121.bugfix | 1 + synapse/rest/client/keys.py | 13 ++++++++----- 2 files changed, 9 insertions(+), 5 deletions(-) create mode 100644 changelog.d/17121.bugfix diff --git a/changelog.d/17121.bugfix b/changelog.d/17121.bugfix new file mode 100644 index 0000000000..f160839aac --- /dev/null +++ b/changelog.d/17121.bugfix @@ -0,0 +1 @@ +Improve error message for cross signing reset with MSC3861 enabled. diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 86c9515854..a0017257ce 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -393,17 +393,20 @@ class SigningKeyUploadServlet(RestServlet): # time. Because there is no UIA in MSC3861, for now we throw an error if the # user tries to reset the device signing key when MSC3861 is enabled, but allow # first-time setup. - # - # XXX: We now have a get-out clause by which MAS can temporarily mark the master - # key as replaceable. It should do its own equivalent of user interactive auth - # before doing so. if self.hs.config.experimental.msc3861.enabled: # The auth service has to explicitly mark the master key as replaceable # without UIA to reset the device signing key with MSC3861. if is_cross_signing_setup and not master_key_updatable_without_uia: + config = self.hs.config.experimental.msc3861 + if config.account_management_url is not None: + url = f"{config.account_management_url}?action=org.matrix.cross_signing_reset" + else: + url = config.issuer + raise SynapseError( HTTPStatus.NOT_IMPLEMENTED, - "Resetting cross signing keys is not yet supported with MSC3861", + "To reset your end-to-end encryption cross-signing identity, " + f"you first need to approve it at {url} and then try again.", Codes.UNRECOGNIZED, ) # But first-time setup is fine From 9c918739220bb548d9e5e5c2f5692c79bd38668d Mon Sep 17 00:00:00 2001 From: villepeh <100730729+villepeh@users.noreply.github.com> Date: Fri, 26 Apr 2024 11:56:20 +0300 Subject: [PATCH 17/19] Add RuntimeDirectory to matrix-synapse.service (#17084) This makes it easy to store UNIX sockets with correct permissions. Those would be located in /run/synapse which is the directory used in many examples in Synapse configuration manual. Additionally, the directory and sockets are deleted when Synapse is shut down. --- changelog.d/17084.doc | 1 + docs/systemd-with-workers/system/matrix-synapse.service | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/17084.doc diff --git a/changelog.d/17084.doc b/changelog.d/17084.doc new file mode 100644 index 0000000000..8b97c81096 --- /dev/null +++ b/changelog.d/17084.doc @@ -0,0 +1 @@ +Add RuntimeDirectory to example matrix-synapse.service systemd unit. diff --git a/docs/systemd-with-workers/system/matrix-synapse.service b/docs/systemd-with-workers/system/matrix-synapse.service index 0c73fb55fb..31ceccb77f 100644 --- a/docs/systemd-with-workers/system/matrix-synapse.service +++ b/docs/systemd-with-workers/system/matrix-synapse.service @@ -9,6 +9,7 @@ ReloadPropagatedFrom=matrix-synapse.target Type=notify NotifyAccess=main User=matrix-synapse +RuntimeDirectory=synapse WorkingDirectory=/var/lib/matrix-synapse EnvironmentFile=-/etc/default/matrix-synapse ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys From 89fc579329d7c81c040b1c178099860e7de37bed Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 26 Apr 2024 10:52:24 +0100 Subject: [PATCH 18/19] Fix filtering of rooms when supplying the `destination` query parameter to `/_synapse/admin/v1/federation/destinations//rooms` (#17077) --- changelog.d/17077.bugfix | 1 + .../storage/databases/main/transactions.py | 1 + tests/rest/admin/test_federation.py | 67 ++++++++++++++++++- 3 files changed, 66 insertions(+), 3 deletions(-) create mode 100644 changelog.d/17077.bugfix diff --git a/changelog.d/17077.bugfix b/changelog.d/17077.bugfix new file mode 100644 index 0000000000..7d8ea37406 --- /dev/null +++ b/changelog.d/17077.bugfix @@ -0,0 +1 @@ +Fixes a bug introduced in v1.52.0 where the `destination` query parameter for the [Destination Rooms Admin API](https://element-hq.github.io/synapse/v1.105/usage/administration/admin_api/federation.html#destination-rooms) failed to actually filter returned rooms. \ No newline at end of file diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index 08e0241f68..770802483c 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -660,6 +660,7 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore): limit=limit, retcols=("room_id", "stream_ordering"), order_direction=order, + keyvalues={"destination": destination}, ), ) return rooms, count diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index c1d88f0176..c2015774a1 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -778,20 +778,81 @@ class DestinationMembershipTestCase(unittest.HomeserverTestCase): self.assertEqual(number_rooms, len(channel.json_body["rooms"])) self._check_fields(channel.json_body["rooms"]) - def _create_destination_rooms(self, number_rooms: int) -> None: - """Create a number rooms for destination + def test_room_filtering(self) -> None: + """Tests that rooms are correctly filtered""" + + # Create two rooms on the homeserver. Each has a different remote homeserver + # participating in it. + other_destination = "other.destination.org" + room_ids_self_dest = self._create_destination_rooms(2, destination=self.dest) + room_ids_other_dest = self._create_destination_rooms( + 1, destination=other_destination + ) + + # Ask for the rooms that `self.dest` is participating in. + channel = self.make_request("GET", self.url, access_token=self.admin_user_tok) + self.assertEqual(200, channel.code, msg=channel.json_body) + + # Verify that we received only the rooms that `self.dest` is participating in. + # This assertion method name is a bit misleading. It does check that both lists + # contain the same items, and the same counts. + self.assertCountEqual( + [r["room_id"] for r in channel.json_body["rooms"]], room_ids_self_dest + ) + self.assertEqual(channel.json_body["total"], len(room_ids_self_dest)) + + # Ask for the rooms that `other_destination` is participating in. + channel = self.make_request( + "GET", + self.url.replace(self.dest, other_destination), + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + + # Verify that we received only the rooms that `other_destination` is + # participating in. + self.assertCountEqual( + [r["room_id"] for r in channel.json_body["rooms"]], room_ids_other_dest + ) + self.assertEqual(channel.json_body["total"], len(room_ids_other_dest)) + + def _create_destination_rooms( + self, + number_rooms: int, + destination: Optional[str] = None, + ) -> List[str]: + """ + Create the given number of rooms. The given `destination` homeserver will + be recorded as a participant. Args: number_rooms: Number of rooms to be created + destination: The domain of the homeserver that will be considered + as a participant in the rooms. + + Returns: + The IDs of the rooms that have been created. """ + room_ids = [] + + # If no destination was provided, default to `self.dest`. + if destination is None: + destination = self.dest + for _ in range(number_rooms): room_id = self.helper.create_room_as( self.admin_user, tok=self.admin_user_tok ) + room_ids.append(room_id) + self.get_success( - self.store.store_destination_rooms_entries((self.dest,), room_id, 1234) + self.store.store_destination_rooms_entries( + (destination,), room_id, 1234 + ) ) + return room_ids + def _check_fields(self, content: List[JsonDict]) -> None: """Checks that the expected room attributes are present in content From 0fd6b269d32340c367a67ee34b963c32da080697 Mon Sep 17 00:00:00 2001 From: devonh Date: Fri, 26 Apr 2024 18:10:45 +0000 Subject: [PATCH 19/19] Fix various typos in docs (#17114) ### Pull Request Checklist * [X] Pull request is based on the develop branch * [X] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [X] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/17114.doc | 1 + docs/admin_api/room_membership.md | 2 +- docs/message_retention_policies.md | 6 +++--- docs/postgres.md | 2 +- docs/setup/installation.md | 4 ++-- docs/usage/administration/admin_api/background_updates.md | 2 +- docs/usage/administration/admin_faq.md | 2 +- docs/user_directory.md | 4 ++-- docs/workers.md | 2 +- 9 files changed, 13 insertions(+), 12 deletions(-) create mode 100644 changelog.d/17114.doc diff --git a/changelog.d/17114.doc b/changelog.d/17114.doc new file mode 100644 index 0000000000..042bd89618 --- /dev/null +++ b/changelog.d/17114.doc @@ -0,0 +1 @@ +Fix various small typos throughout the docs. diff --git a/docs/admin_api/room_membership.md b/docs/admin_api/room_membership.md index 94bc95a8d5..6cbaba3dcc 100644 --- a/docs/admin_api/room_membership.md +++ b/docs/admin_api/room_membership.md @@ -1,6 +1,6 @@ # Edit Room Membership API -This API allows an administrator to join an user account with a given `user_id` +This API allows an administrator to join a user account with a given `user_id` to a room with a given `room_id_or_alias`. You can only modify the membership of local users. The server administrator must be in the room and have permission to invite users. diff --git a/docs/message_retention_policies.md b/docs/message_retention_policies.md index c64d1539b0..01f67c952a 100644 --- a/docs/message_retention_policies.md +++ b/docs/message_retention_policies.md @@ -51,8 +51,8 @@ clients. ## Server configuration -Support for this feature can be enabled and configured by adding a the -`retention` in the Synapse configuration file (see +Support for this feature can be enabled and configured by adding the +`retention` option in the Synapse configuration file (see [configuration manual](usage/configuration/config_documentation.md#retention)). To enable support for message retention policies, set the setting @@ -117,7 +117,7 @@ In this example, we define three jobs: policy's `max_lifetime` is greater than a week. Note that this example is tailored to show different configurations and -features slightly more jobs than it's probably necessary (in practice, a +features slightly more jobs than is probably necessary (in practice, a server admin would probably consider it better to replace the two last jobs with one that runs once a day and handles rooms which policy's `max_lifetime` is greater than 3 days). diff --git a/docs/postgres.md b/docs/postgres.md index 921bae9877..ae34f7689b 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -128,7 +128,7 @@ can read more about that [here](https://www.postgresql.org/docs/10/kernel-resour ### Overview The script `synapse_port_db` allows porting an existing synapse server -backed by SQLite to using PostgreSQL. This is done in as a two phase +backed by SQLite to using PostgreSQL. This is done as a two phase process: 1. Copy the existing SQLite database to a separate location and run diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 9126874d44..ed3e59c470 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -259,9 +259,9 @@ users, etc.) to the developers via the `--report-stats` argument. This command will generate you a config file that you can then customise, but it will also generate a set of keys for you. These keys will allow your homeserver to -identify itself to other homeserver, so don't lose or delete them. It would be +identify itself to other homeservers, so don't lose or delete them. It would be wise to back them up somewhere safe. (If, for whatever reason, you do need to -change your homeserver's keys, you may find that other homeserver have the +change your homeserver's keys, you may find that other homeservers have the old key cached. If you update the signing key, you should change the name of the key in the `.signing.key` file (the second word) to something different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management). diff --git a/docs/usage/administration/admin_api/background_updates.md b/docs/usage/administration/admin_api/background_updates.md index 9f6ac7d567..7b75ee5587 100644 --- a/docs/usage/administration/admin_api/background_updates.md +++ b/docs/usage/administration/admin_api/background_updates.md @@ -44,7 +44,7 @@ For each update: ## Enabled -This API allow pausing background updates. +This API allows pausing background updates. Background updates should *not* be paused for significant periods of time, as this can affect the performance of Synapse. diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md index 092dcc1c84..0d98f73fb1 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md @@ -241,7 +241,7 @@ in memory constrained environments, or increased if performance starts to degrade. However, degraded performance due to a low cache factor, common on -machines with slow disks, often leads to explosions in memory use due +machines with slow disks, often leads to explosions in memory use due to backlogged requests. In this case, reducing the cache factor will make things worse. Instead, try increasing it drastically. 2.0 is a good starting value. diff --git a/docs/user_directory.md b/docs/user_directory.md index 1271cfb862..be8664a016 100644 --- a/docs/user_directory.md +++ b/docs/user_directory.md @@ -86,9 +86,9 @@ The search term is then split into words: * If unavailable, then runs of ASCII characters, numbers, underscores, and hyphens are considered words. -The queries for PostgreSQL and SQLite are detailed below, by their overall goal +The queries for PostgreSQL and SQLite are detailed below, but their overall goal is to find matching users, preferring users who are "real" (e.g. not bots, -not deactivated). It is assumed that real users will have an display name and +not deactivated). It is assumed that real users will have a display name and avatar set. ### PostgreSQL diff --git a/docs/workers.md b/docs/workers.md index 9a0cc9f2f4..82f4bfc1d1 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -634,7 +634,7 @@ worker application type. #### Push Notifications -You can designate generic worker to sending push notifications to +You can designate generic workers to send push notifications to a [push gateway](https://spec.matrix.org/v1.5/push-gateway-api/) such as [sygnal](https://github.com/matrix-org/sygnal) and email.