diff --git a/CHANGES.md b/CHANGES.md index 9fedd579717d..168e29f1b235 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,145 @@ +# Synapse 1.104.0 (2024-04-02) + +### Bugfixes + +- Fix regression when using OIDC provider. Introduced in v1.104.0rc1. ([\#17031](https://github.com/element-hq/synapse/issues/17031)) + + +# Synapse 1.104.0rc1 (2024-03-26) + +### Features + +- Add an OIDC config to specify extra parameters for the authorization grant URL. IT can be useful to pass an ACR value for example. ([\#16971](https://github.com/element-hq/synapse/issues/16971)) +- Add support for OIDC provider returning JWT. ([\#16972](https://github.com/element-hq/synapse/issues/16972), [\#17031](https://github.com/element-hq/synapse/issues/17031)) + +### Bugfixes + +- Fix a bug which meant that, under certain circumstances, we might never retry sending events or to-device messages over federation after a failure. ([\#16925](https://github.com/element-hq/synapse/issues/16925)) +- Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations. ([\#16949](https://github.com/element-hq/synapse/issues/16949)) +- Fix case in which `m.fully_read` marker would not get updated. Contributed by @SpiritCroc. ([\#16990](https://github.com/element-hq/synapse/issues/16990)) +- Fix bug which did not retract a user's pending knocks at rooms when their account was deactivated. Contributed by @hanadi92. ([\#17010](https://github.com/element-hq/synapse/issues/17010)) + +### Updates to the Docker image + +- Updated `start.py` to generate config using the correct user ID when running as root (fixes [\#16824](https://github.com/element-hq/synapse/issues/16824), [\#15202](https://github.com/element-hq/synapse/issues/15202)). ([\#16978](https://github.com/element-hq/synapse/issues/16978)) + +### Improved Documentation + +- Add a query to force a refresh of a remote user's device list to the "Useful SQL for Admins" documentation page. ([\#16892](https://github.com/element-hq/synapse/issues/16892)) +- Minor grammatical corrections to the upgrade documentation. ([\#16965](https://github.com/element-hq/synapse/issues/16965)) +- Fix the sort order for the documentation version picker, so that newer releases appear above older ones. ([\#16966](https://github.com/element-hq/synapse/issues/16966)) +- Remove recommendation for a specific poetry version from contributing guide. ([\#17002](https://github.com/element-hq/synapse/issues/17002)) + +### Internal Changes + +- Improve lock performance when a lot of locks are all waiting for a single lock to be released. ([\#16840](https://github.com/element-hq/synapse/issues/16840)) +- Update power level default for public rooms. ([\#16907](https://github.com/element-hq/synapse/issues/16907)) +- Improve event validation. ([\#16908](https://github.com/element-hq/synapse/issues/16908)) +- Multi-worker-docker-container: disable log buffering. ([\#16919](https://github.com/element-hq/synapse/issues/16919)) +- Refactor state delta calculation in `/sync` handler. ([\#16929](https://github.com/element-hq/synapse/issues/16929)) +- Clarify docs for some room state functions. ([\#16950](https://github.com/element-hq/synapse/issues/16950)) +- Specify IP subnets in canonical form. ([\#16953](https://github.com/element-hq/synapse/issues/16953)) +- As done for SAML mapping provider, let's pass the module API to the OIDC one so the mapper can do more logic in its code. ([\#16974](https://github.com/element-hq/synapse/issues/16974)) +- Allow containers building on top of Synapse's Complement container is use the included PostgreSQL cluster. ([\#16985](https://github.com/element-hq/synapse/issues/16985)) +- Raise poetry-core version cap to 1.9.0. ([\#16986](https://github.com/element-hq/synapse/issues/16986)) +- Patch the db conn pool sooner in tests. ([\#17017](https://github.com/element-hq/synapse/issues/17017)) + + + +### Updates to locked dependencies + +* Bump anyhow from 1.0.80 to 1.0.81. ([\#17009](https://github.com/element-hq/synapse/issues/17009)) +* Bump black from 23.10.1 to 24.2.0. ([\#16936](https://github.com/element-hq/synapse/issues/16936)) +* Bump cryptography from 41.0.7 to 42.0.5. ([\#16958](https://github.com/element-hq/synapse/issues/16958)) +* Bump dawidd6/action-download-artifact from 3.1.1 to 3.1.2. ([\#16960](https://github.com/element-hq/synapse/issues/16960)) +* Bump dawidd6/action-download-artifact from 3.1.2 to 3.1.4. ([\#17008](https://github.com/element-hq/synapse/issues/17008)) +* Bump jinja2 from 3.1.2 to 3.1.3. ([\#17005](https://github.com/element-hq/synapse/issues/17005)) +* Bump log from 0.4.20 to 0.4.21. ([\#16977](https://github.com/element-hq/synapse/issues/16977)) +* Bump mypy from 1.5.1 to 1.8.0. ([\#16901](https://github.com/element-hq/synapse/issues/16901)) +* Bump netaddr from 0.9.0 to 1.2.1. ([\#17006](https://github.com/element-hq/synapse/issues/17006)) +* Bump pydantic from 2.6.0 to 2.6.4. ([\#17004](https://github.com/element-hq/synapse/issues/17004)) +* Bump pyo3 from 0.20.2 to 0.20.3. ([\#16962](https://github.com/element-hq/synapse/issues/16962)) +* Bump ruff from 0.1.14 to 0.3.2. ([\#16994](https://github.com/element-hq/synapse/issues/16994)) +* Bump serde from 1.0.196 to 1.0.197. ([\#16963](https://github.com/element-hq/synapse/issues/16963)) +* Bump serde_json from 1.0.113 to 1.0.114. ([\#16961](https://github.com/element-hq/synapse/issues/16961)) +* Bump types-jsonschema from 4.21.0.20240118 to 4.21.0.20240311. ([\#17007](https://github.com/element-hq/synapse/issues/17007)) +* Bump types-psycopg2 from 2.9.21.16 to 2.9.21.20240311. ([\#16995](https://github.com/element-hq/synapse/issues/16995)) +* Bump types-pyopenssl from 23.3.0.0 to 24.0.0.20240311. ([\#17003](https://github.com/element-hq/synapse/issues/17003)) + +# Synapse 1.103.0 (2024-03-19) + +No significant changes since 1.103.0rc1. + + + + +# Synapse 1.103.0rc1 (2024-03-12) + +### Features + +- Add a new [List Accounts v3](https://element-hq.github.io/synapse/v1.103/admin_api/user_admin_api.html#list-accounts-v3) Admin API with improved deactivated user filtering capabilities. ([\#16874](https://github.com/element-hq/synapse/issues/16874)) +- Include `Retry-After` header by default per [MSC4041](https://github.com/matrix-org/matrix-spec-proposals/pull/4041). Contributed by @clokep. ([\#16947](https://github.com/element-hq/synapse/issues/16947)) + +### Bugfixes + +- Fix joining remote rooms when a module uses the `on_new_event` callback. This callback may now pass partial state events instead of the full state for remote rooms. Introduced in v1.76.0. ([\#16973](https://github.com/element-hq/synapse/issues/16973)) +- Fix performance issue when joining very large rooms that can cause the server to lock up. Introduced in v1.100.0. Contributed by @ggogel. ([\#16968](https://github.com/element-hq/synapse/issues/16968)) + +### Improved Documentation + +- Add HAProxy example for single port operation to reverse proxy documentation. Contributed by Georg Pfuetzenreuter (@tacerus). ([\#16768](https://github.com/element-hq/synapse/issues/16768)) +- Improve the documentation around running Complement tests with new configuration parameters. ([\#16946](https://github.com/element-hq/synapse/issues/16946)) +- Add docs on upgrading from a very old version. ([\#16951](https://github.com/element-hq/synapse/issues/16951)) + + +### Updates to locked dependencies + +* Bump JasonEtco/create-an-issue from 2.9.1 to 2.9.2. ([\#16934](https://github.com/element-hq/synapse/issues/16934)) +* Bump anyhow from 1.0.79 to 1.0.80. ([\#16935](https://github.com/element-hq/synapse/issues/16935)) +* Bump dawidd6/action-download-artifact from 3.0.0 to 3.1.1. ([\#16933](https://github.com/element-hq/synapse/issues/16933)) +* Bump furo from 2023.9.10 to 2024.1.29. ([\#16939](https://github.com/element-hq/synapse/issues/16939)) +* Bump pyopenssl from 23.3.0 to 24.0.0. ([\#16937](https://github.com/element-hq/synapse/issues/16937)) +* Bump types-netaddr from 0.10.0.20240106 to 1.2.0.20240219. ([\#16938](https://github.com/element-hq/synapse/issues/16938)) + + +# Synapse 1.102.0 (2024-03-05) + +### Bugfixes + +- Revert https://github.com/element-hq/synapse/pull/16756, which caused incorrect notification counts on mobile clients since v1.100.0. ([\#16979](https://github.com/element-hq/synapse/issues/16979)) + + +# Synapse 1.102.0rc1 (2024-02-20) + +### Features + +- A metric was added for emails sent by Synapse, broken down by type: `synapse_emails_sent_total`. Contributed by Remi Rampin. ([\#16881](https://github.com/element-hq/synapse/issues/16881)) + +### Bugfixes + +- Do not send multiple concurrent requests for keys for the same server. ([\#16894](https://github.com/element-hq/synapse/issues/16894)) +- Fix performance issue when joining very large rooms that can cause the server to lock up. Introduced in v1.100.0. ([\#16903](https://github.com/element-hq/synapse/issues/16903)) +- Always prefer unthreaded receipt when >1 exist ([MSC4102](https://github.com/matrix-org/matrix-spec-proposals/pull/4102)). ([\#16927](https://github.com/element-hq/synapse/issues/16927)) + +### Improved Documentation + +- Fix a small typo in the Rooms section of the Admin API documentation. Contributed by @RainerZufall187. ([\#16857](https://github.com/element-hq/synapse/issues/16857)) + +### Internal Changes + +- Don't invalidate the entire event cache when we purge history. ([\#16905](https://github.com/element-hq/synapse/issues/16905)) +- Add experimental config option to not send device list updates for specific users. ([\#16909](https://github.com/element-hq/synapse/issues/16909)) +- Fix incorrect docker hub link in release script. ([\#16910](https://github.com/element-hq/synapse/issues/16910)) + + + +### Updates to locked dependencies + +* Bump attrs from 23.1.0 to 23.2.0. ([\#16899](https://github.com/element-hq/synapse/issues/16899)) +* Bump bcrypt from 4.0.1 to 4.1.2. ([\#16900](https://github.com/element-hq/synapse/issues/16900)) +* Bump pygithub from 2.1.1 to 2.2.0. ([\#16902](https://github.com/element-hq/synapse/issues/16902)) +* Bump sentry-sdk from 1.40.0 to 1.40.3. ([\#16898](https://github.com/element-hq/synapse/issues/16898)) + # Synapse 1.101.0 (2024-02-13) ### Bugfixes diff --git a/Cargo.lock b/Cargo.lock index b1568dc7d80c..b2af0440548d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "arc-swap" @@ -138,9 +138,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "memchr" @@ -186,6 +186,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "proc-macro2" version = "1.0.76" @@ -197,9 +203,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0" +checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" dependencies = [ "anyhow", "cfg-if", @@ -207,6 +213,7 @@ dependencies = [ "libc", "memoffset", "parking_lot", + "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", @@ -215,9 +222,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be" +checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" dependencies = [ "once_cell", "target-lexicon", @@ -225,9 +232,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1" +checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" dependencies = [ "libc", "pyo3-build-config", @@ -246,9 +253,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3" +checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -258,12 +265,13 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f" +checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" dependencies = [ "heck", "proc-macro2", + "pyo3-build-config", "quote", "syn", ] @@ -339,18 +347,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", @@ -359,9 +367,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", diff --git a/debian/changelog b/debian/changelog index 33a658b66c6e..28451044ab23 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,39 @@ +matrix-synapse-py3 (1.104.0) stable; urgency=medium + + * New Synapse release 1.104.0. + + -- Synapse Packaging team Tue, 02 Apr 2024 17:15:45 +0100 + +matrix-synapse-py3 (1.104.0~rc1) stable; urgency=medium + + * New Synapse release 1.104.0rc1. + + -- Synapse Packaging team Tue, 26 Mar 2024 11:48:58 +0000 + +matrix-synapse-py3 (1.103.0) stable; urgency=medium + + * New Synapse release 1.103.0. + + -- Synapse Packaging team Tue, 19 Mar 2024 12:24:36 +0000 + +matrix-synapse-py3 (1.103.0~rc1) stable; urgency=medium + + * New Synapse release 1.103.0rc1. + + -- Synapse Packaging team Tue, 12 Mar 2024 15:02:56 +0000 + +matrix-synapse-py3 (1.102.0) stable; urgency=medium + + * New Synapse release 1.102.0. + + -- Synapse Packaging team Tue, 05 Mar 2024 14:47:03 +0000 + +matrix-synapse-py3 (1.102.0~rc1) stable; urgency=medium + + * New Synapse release 1.102.0rc1. + + -- Synapse Packaging team Tue, 20 Feb 2024 15:50:36 +0000 + matrix-synapse-py3 (1.101.0) stable; urgency=medium * New Synapse release 1.101.0. diff --git a/docker/complement/README.md b/docker/complement/README.md index 62682219e847..1ce84128184d 100644 --- a/docker/complement/README.md +++ b/docker/complement/README.md @@ -30,3 +30,14 @@ Consult `scripts-dev/complement.sh` in the repository root for a real example. [complement]: https://github.com/matrix-org/complement [complementEnv]: https://github.com/matrix-org/complement/pull/382 + +## How to modify homeserver.yaml for Complement tests + +It's common for MSCs to be gated behind a feature flag like this: +```yaml +experimental_features: + faster_joins: true +``` +To modify this for the Complement image, modify `./conf/workers-shared-extra.yaml.j2`. Despite the name, +this will affect non-worker mode as well. Remember to _rebuild_ the image (so don't use `-e` if using +`complement.sh`). diff --git a/docker/complement/conf/postgres.supervisord.conf b/docker/complement/conf/postgres.supervisord.conf index b88bfc772e40..657845dfdbc4 100644 --- a/docker/complement/conf/postgres.supervisord.conf +++ b/docker/complement/conf/postgres.supervisord.conf @@ -1,7 +1,7 @@ [program:postgres] command=/usr/local/bin/prefix-log gosu postgres postgres -# Only start if START_POSTGRES=1 +# Only start if START_POSTGRES=true autostart=%(ENV_START_POSTGRES)s # Lower priority number = starts first diff --git a/docker/complement/conf/start_for_complement.sh b/docker/complement/conf/start_for_complement.sh index 7b012ce8abe3..cc798a321062 100755 --- a/docker/complement/conf/start_for_complement.sh +++ b/docker/complement/conf/start_for_complement.sh @@ -32,8 +32,9 @@ case "$SYNAPSE_COMPLEMENT_DATABASE" in ;; sqlite|"") - # Configure supervisord not to start Postgres, as we don't need it - export START_POSTGRES=false + # Set START_POSTGRES to false unless it has already been set + # (i.e. by another container image inheriting our own). + export START_POSTGRES=${START_POSTGRES:-false} ;; *) diff --git a/docker/prefix-log b/docker/prefix-log index 0e26a4f19d33..32dddbbfd4e0 100755 --- a/docker/prefix-log +++ b/docker/prefix-log @@ -7,6 +7,9 @@ # prefix-log command [args...] # -exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1) -exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2) +# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on +# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce +# confusion due to to interleaving of the different processes. +exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1) +exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2) exec "$@" diff --git a/docker/start.py b/docker/start.py index 12c444da9a39..818a5355ca41 100755 --- a/docker/start.py +++ b/docker/start.py @@ -160,11 +160,6 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml") data_dir = environ.get("SYNAPSE_DATA_DIR", "/data") - if ownership is not None: - # make sure that synapse has perms to write to the data dir. - log(f"Setting ownership on {data_dir} to {ownership}") - subprocess.run(["chown", ownership, data_dir], check=True) - # create a suitable log config from our template log_config_file = "%s/%s.log.config" % (config_dir, server_name) if not os.path.exists(log_config_file): @@ -189,9 +184,15 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> "--generate-config", "--open-private-ports", ] + + if ownership is not None: + # make sure that synapse has perms to write to the data dir. + log(f"Setting ownership on {data_dir} to {ownership}") + subprocess.run(["chown", ownership, data_dir], check=True) + args = ["gosu", ownership] + args + # log("running %s" % (args, )) - flush_buffers() - os.execv(sys.executable, args) + subprocess.run(args, check=True) def main(args: List[str], environ: MutableMapping[str, str]) -> None: diff --git a/docs/admin_api/rooms.md b/docs/admin_api/rooms.md index ad011e5c36e6..6935ec4a45b8 100644 --- a/docs/admin_api/rooms.md +++ b/docs/admin_api/rooms.md @@ -913,7 +913,7 @@ With all that being said, if you still want to try and recover the room: them handle rejoining themselves. 4. If `new_room_user_id` was given, a 'Content Violation' will have been - created. Consider whether you want to delete that roomm. + created. Consider whether you want to delete that room. # Make Room Admin API diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index 9dc600b87537..9736fe302145 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -164,6 +164,7 @@ Body parameters: Other allowed options are: `bot` and `support`. ## List Accounts +### List Accounts (V2) This API returns all local user accounts. By default, the response is ordered by ascending user ID. @@ -287,6 +288,19 @@ The following fields are returned in the JSON response body: *Added in Synapse 1.93:* the `locked` query parameter and response field. +### List Accounts (V3) + +This API returns all local user accounts (see v2). In contrast to v2, the query parameter `deactivated` is handled differently. + +``` +GET /_synapse/admin/v3/users +``` + +**Parameters** +- `deactivated` - Optional flag to filter deactivated users. If `true`, only deactivated users are returned. + If `false`, deactivated users are excluded from the query. When the flag is absent (the default), + users are not filtered by deactivation status. + ## Query current sessions for a user This API returns information about the active sessions for a specific user. diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index df6451273aa9..ac8a7039d18e 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -68,7 +68,7 @@ Of their installation methods, we recommend ```shell pip install --user pipx -pipx install poetry==1.5.1 # Problems with Poetry 1.6, see https://github.com/matrix-org/synapse/issues/16147 +pipx install poetry ``` but see poetry's [installation instructions](https://python-poetry.org/docs/#installation) diff --git a/docs/modules/third_party_rules_callbacks.md b/docs/modules/third_party_rules_callbacks.md index d06bff25eb1a..b97e28db110e 100644 --- a/docs/modules/third_party_rules_callbacks.md +++ b/docs/modules/third_party_rules_callbacks.md @@ -142,6 +142,10 @@ Called after sending an event into a room. The module is passed the event, as we as the state of the room _after_ the event. This means that if the event is a state event, it will be included in this state. +The state map may not be complete if Synapse hasn't yet loaded the full state +of the room. This can happen for events in rooms that were just joined from +a remote server. + Note that this callback is called when the event has already been processed and stored into the room, which means this callback cannot be used to deny persisting the event. To deny an incoming event, see [`check_event_for_spam`](spam_checker_callbacks.md#check_event_for_spam) instead. diff --git a/docs/postgres.md b/docs/postgres.md index ad7c6a0738d3..921bae987754 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -182,7 +182,7 @@ synapse_port_db --sqlite-database homeserver.db.snapshot \ --postgres-config homeserver-postgres.yaml ``` -The flag `--curses` displays a coloured curses progress UI. +The flag `--curses` displays a coloured curses progress UI. (NOTE: if your terminal is too small the script will error out) If the script took a long time to complete, or time has otherwise passed since the original snapshot was taken, repeat the previous steps with a diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md index de72fbde9674..7128af114e96 100644 --- a/docs/reverse_proxy.md +++ b/docs/reverse_proxy.md @@ -186,6 +186,25 @@ Example configuration, if using a UNIX socket. The configuration lines regarding backend matrix server matrix unix@/run/synapse/main_public.sock ``` +Example configuration when using a single port for both client and federation traffic. +``` +frontend https + bind *:443,[::]:443 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1 + http-request set-header X-Forwarded-Proto https if { ssl_fc } + http-request set-header X-Forwarded-Proto http if !{ ssl_fc } + http-request set-header X-Forwarded-For %[src] + + acl matrix-host hdr(host) -i matrix.example.com matrix.example.com:443 + acl matrix-sni ssl_fc_sni matrix.example.com + acl matrix-path path_beg /_matrix + acl matrix-path path_beg /_synapse/client + + use_backend matrix if matrix-host matrix-path + use_backend matrix if matrix-sni + +backend matrix + server matrix 127.0.0.1:8008 +``` [Delegation](delegate.md) example: ``` diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 324cdc67b2ff..9126874d445b 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -26,7 +26,7 @@ for most users. #### Docker images and Ansible playbooks There is an official synapse image available at - or at [`ghcr.io/element-hq/synapse`](https://ghcr.io/element-hq/synapse) + or at [`ghcr.io/element-hq/synapse`](https://ghcr.io/element-hq/synapse) which can be used with the docker-compose file available at [contrib/docker](https://github.com/element-hq/synapse/tree/develop/contrib/docker). Further information on this including configuration options is available in the README diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md index 77cc02c541f0..10c695029fb9 100644 --- a/docs/sso_mapping_providers.md +++ b/docs/sso_mapping_providers.md @@ -50,11 +50,13 @@ comment these options out and use those specified by the module instead. A custom mapping provider must specify the following methods: -* `def __init__(self, parsed_config)` +* `def __init__(self, parsed_config, module_api)` - Arguments: - `parsed_config` - A configuration object that is the return value of the `parse_config` method. You should set any configuration options needed by the module here. + - `module_api` - a `synapse.module_api.ModuleApi` object which provides the + stable API available for extension modules. * `def parse_config(config)` - This method should have the `@staticmethod` decoration. - Arguments: diff --git a/docs/upgrade.md b/docs/upgrade.md index 7f67ef88067c..e7247676d142 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -88,15 +88,35 @@ process, for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` -Generally Synapse database schemas are compatible across multiple versions, once -a version of Synapse is deployed you may not be able to rollback automatically. +Generally Synapse database schemas are compatible across multiple versions, but once +a version of Synapse is deployed you may not be able to roll back automatically. The following table gives the version ranges and the earliest version they can be rolled back to. E.g. Synapse versions v1.58.0 through v1.61.1 can be rolled -back safely to v1.57.0, but starting with v1.62.0 it is only safe to rollback to +back safely to v1.57.0, but starting with v1.62.0 it is only safe to roll back to v1.61.0. +## Upgrading from a very old version + +You need to read all of the upgrade notes for each version between your current +version and the latest so that you can update your dependencies, environment, +config files, etc. if necessary. But you do not need to perform an +upgrade to each individual version that was missed. + +We do not have a list of which versions must be installed. Instead, we recommend +that you upgrade through each incompatible database schema version, which would +give you the ability to roll back the maximum number of versions should anything +go wrong. See [Rolling back to older versions](#rolling-back-to-older-versions) +above. + +Additionally, new versions of Synapse will occasionally run database migrations +and background updates to update the database. Synapse will not start until +database migrations are complete. You should wait until background updates from +each upgrade are complete before moving on to the next upgrade, to avoid +stacking them up. You can monitor the currently running background updates with +[the Admin API](usage/administration/admin_api/background_updates.html#status). + # Upgrading to v1.100.0 ## Minimum supported Rust version diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md index 5c9ee7d0aa6f..092dcc1c846c 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md @@ -120,6 +120,11 @@ for file in $source_directory/*; do done ``` +How do I upgrade from a very old version of Synapse to the latest? +--- +See [this](../../upgrade.html#upgrading-from-a-very-old-version) section in the +upgrade docs. + Manually resetting passwords --- Users can reset their password through their client. Alternatively, a server admin diff --git a/docs/usage/administration/useful_sql_for_admins.md b/docs/usage/administration/useful_sql_for_admins.md index 9f2cc9b95725..41755cd3b60b 100644 --- a/docs/usage/administration/useful_sql_for_admins.md +++ b/docs/usage/administration/useful_sql_for_admins.md @@ -205,3 +205,12 @@ SELECT user_id, device_id, user_agent, TO_TIMESTAMP(last_seen / 1000) AS "last_s FROM devices WHERE last_seen < DATE_PART('epoch', NOW() - INTERVAL '3 month') * 1000; ``` + +## Clear the cache of a remote user's device list + +Forces the resync of a remote user's device list - if you have somehow cached a bad state, and the remote server is +will not send out a device list update. +```sql +INSERT INTO device_lists_remote_resync +VALUES ('USER_ID', (EXTRACT(epoch FROM NOW()) * 1000)::BIGINT); +``` diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 638a459ed52b..985f90c8a16a 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3349,6 +3349,9 @@ Options for each entry include: not included in `scopes`. Set to `userinfo_endpoint` to always use the userinfo endpoint. +* `additional_authorization_parameters`: String to string dictionary that will be passed as + additional parameters to the authorization grant URL. + * `allow_existing_users`: set to true to allow a user logging in via OIDC to match a pre-existing account instead of failing. This could be used if switching from password logins to OIDC. Defaults to false. @@ -3473,6 +3476,8 @@ oidc_providers: token_endpoint: "https://accounts.example.com/oauth2/token" userinfo_endpoint: "https://accounts.example.com/userinfo" jwks_uri: "https://accounts.example.com/.well-known/jwks.json" + additional_authorization_parameters: + acr_values: 2fa skip_verification: true enable_registration: true user_mapping_provider: diff --git a/docs/website_files/version-picker.js b/docs/website_files/version-picker.js index b6f35f29c7ab..3174b5d0bca8 100644 --- a/docs/website_files/version-picker.js +++ b/docs/website_files/version-picker.js @@ -100,10 +100,30 @@ function sortVersions(a, b) { if (a === 'develop' || a === 'latest') return -1; if (b === 'develop' || b === 'latest') return 1; - const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0]; - const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0]; + // If any of the versions do not confrom to a semantic version string, they + // will be sorted behind a valid version. + const versionA = (a.match(/v(\d+(\.\d+)+)/) || [])[1]?.split('.') ?? ''; + const versionB = (b.match(/v(\d+(\.\d+)+)/) || [])[1]?.split('.') ?? ''; - return versionB.localeCompare(versionA); + for (let i = 0; i < Math.max(versionA.length, versionB.length); i++) { + if (versionB[i] === undefined) { + return -1; + } + if (versionA[i] === undefined) { + return 1; + } + + const partA = parseInt(versionA[i], 10); + const partB = parseInt(versionB[i], 10); + + if (partA > partB) { + return -1; + } else if (partB > partA) { + return 1; + } + } + + return 0; } /** diff --git a/poetry.lock b/poetry.lock index c9e5ad9e153f..5332774705be 100644 --- a/poetry.lock +++ b/poetry.lock @@ -46,21 +46,22 @@ wrapt = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "authlib" @@ -110,32 +111,38 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] @@ -162,29 +169,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.10.1" +version = "24.2.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, ] [package.dependencies] @@ -198,7 +209,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -454,47 +465,56 @@ files = [ [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] - -[package.dependencies] -cffi = ">=1.12" + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -552,13 +572,13 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", [[package]] name = "furo" -version = "2023.9.10" +version = "2024.1.29" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" files = [ - {file = "furo-2023.9.10-py3-none-any.whl", hash = "sha256:513092538537dc5c596691da06e3c370714ec99bc438680edc1debffb73e5bfc"}, - {file = "furo-2023.9.10.tar.gz", hash = "sha256:5707530a476d2a63b8cad83b4f961f3739a69f4b058bcf38a03a39fa537195b2"}, + {file = "furo-2024.1.29-py3-none-any.whl", hash = "sha256:3548be2cef45a32f8cdc0272d415fcb3e5fa6a0eb4ddfe21df3ecf1fe45a13cf"}, + {file = "furo-2024.1.29.tar.gz", hash = "sha256:4d6b2fe3f10a6e36eb9cc24c1e7beb38d7a23fc7b3c382867503b7fcac8a1e02"}, ] [package.dependencies] @@ -981,13 +1001,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1452,38 +1472,38 @@ files = [ [[package]] name = "mypy" -version = "1.5.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, - {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, - {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, - {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, - {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, - {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, - {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, - {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, - {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, - {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, - {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, - {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, - {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, - {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -1494,6 +1514,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1554,15 +1575,18 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, [[package]] name = "netaddr" -version = "0.9.0" +version = "1.2.1" description = "A network address manipulation library for Python" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "netaddr-0.9.0-py3-none-any.whl", hash = "sha256:5148b1055679d2a1ec070c521b7db82137887fabd6d7e37f5199b44f775c3bb1"}, - {file = "netaddr-0.9.0.tar.gz", hash = "sha256:7b46fa9b1a2d71fd5de9e4a3784ef339700a53a08c8040f08baf5f1194da0128"}, + {file = "netaddr-1.2.1-py3-none-any.whl", hash = "sha256:bd9e9534b0d46af328cf64f0e5a23a5a43fca292df221c85580b27394793496e"}, + {file = "netaddr-1.2.1.tar.gz", hash = "sha256:6eb8fedf0412c6d294d06885c110de945cf4d22d2b510d0404f4e06950857987"}, ] +[package.extras] +nicer-shell = ["ipython"] + [[package]] name = "opentracing" version = "2.4.0" @@ -1849,18 +1873,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.0" +version = "2.6.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, - {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.1" +pydantic-core = "2.16.3" typing-extensions = ">=4.6.1" [package.extras] @@ -1868,90 +1892,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.1" +version = "2.16.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, - {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, - {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, - {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, - {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, - {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, - {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, - {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, - {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, - {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, - {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, - {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, - {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, - {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [package.dependencies] @@ -1959,20 +1983,19 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygithub" -version = "2.1.1" +version = "2.2.0" description = "Use the full Github API v3" optional = false python-versions = ">=3.7" files = [ - {file = "PyGithub-2.1.1-py3-none-any.whl", hash = "sha256:4b528d5d6f35e991ea5fd3f942f58748f24938805cb7fcf24486546637917337"}, - {file = "PyGithub-2.1.1.tar.gz", hash = "sha256:ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c"}, + {file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"}, + {file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"}, ] [package.dependencies] Deprecated = "*" pyjwt = {version = ">=2.4.0", extras = ["crypto"]} pynacl = ">=1.4.0" -python-dateutil = "*" requests = ">=2.14.0" typing-extensions = ">=4.0.0" urllib3 = ">=1.26.0" @@ -2075,17 +2098,17 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "23.3.0" +version = "24.0.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, - {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, + {file = "pyOpenSSL-24.0.0-py3-none-any.whl", hash = "sha256:ba07553fb6fd6a7a2259adb9b84e12302a9a8a75c44046e8bb5d3e5ee887e3c3"}, + {file = "pyOpenSSL-24.0.0.tar.gz", hash = "sha256:6aa33039a93fffa4563e655b61d11364d01264be8ccb49906101e02a334530bf"}, ] [package.dependencies] -cryptography = ">=41.0.5,<42" +cryptography = ">=41.0.5,<43" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] @@ -2119,7 +2142,7 @@ s2repoze = ["paste", "repoze.who", "zope.interface"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, @@ -2421,28 +2444,28 @@ files = [ [[package]] name = "ruff" -version = "0.1.14" +version = "0.3.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, + {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"}, + {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"}, + {file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"}, + {file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"}, + {file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"}, + {file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"}, + {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"}, + {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"}, ] [[package]] @@ -2477,13 +2500,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.40.0" +version = "1.40.3" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.40.0.tar.gz", hash = "sha256:34ad8cfc9b877aaa2a8eb86bfe5296a467fffe0619b931a05b181c45f6da59bf"}, - {file = "sentry_sdk-1.40.0-py2.py3-none-any.whl", hash = "sha256:78575620331186d32f34b7ece6edea97ce751f58df822547d3ab85517881a27a"}, + {file = "sentry-sdk-1.40.3.tar.gz", hash = "sha256:3c2b027979bb400cd65a47970e64f8cef8acda86b288a27f42a98692505086cd"}, + {file = "sentry_sdk-1.40.3-py2.py3-none-any.whl", hash = "sha256:73383f28311ae55602bb6cc3b013830811135ba5521e41333a6e68f269413502"}, ] [package.dependencies] @@ -3050,13 +3073,13 @@ files = [ [[package]] name = "types-jsonschema" -version = "4.21.0.20240118" +version = "4.21.0.20240311" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.21.0.20240118.tar.gz", hash = "sha256:31aae1b5adc0176c1155c2d4f58348b22d92ae64315e9cc83bd6902168839232"}, - {file = "types_jsonschema-4.21.0.20240118-py3-none-any.whl", hash = "sha256:77a4ac36b0be4f24274d5b9bf0b66208ee771c05f80e34c4641de7d63e8a872d"}, + {file = "types-jsonschema-4.21.0.20240311.tar.gz", hash = "sha256:f7165ce70abd91df490c73b089873afd2899c5e56430ee495b64f851ad01f287"}, + {file = "types_jsonschema-4.21.0.20240311-py3-none-any.whl", hash = "sha256:e872f5661513824edf9698f73a66c9c114713d93eab58699bd0532e7e6db5750"}, ] [package.dependencies] @@ -3064,13 +3087,13 @@ referencing = "*" [[package]] name = "types-netaddr" -version = "0.10.0.20240106" +version = "1.2.0.20240219" description = "Typing stubs for netaddr" optional = false python-versions = ">=3.8" files = [ - {file = "types-netaddr-0.10.0.20240106.tar.gz", hash = "sha256:7cc6c16bc76f57faf4a042184f748a05e9642b189caf7fe7e36c07cb87c057b3"}, - {file = "types_netaddr-0.10.0.20240106-py3-none-any.whl", hash = "sha256:0acd8116293b06abe66484cf033c2d597f039326c28e3df83b8abd5709f6c65d"}, + {file = "types-netaddr-1.2.0.20240219.tar.gz", hash = "sha256:984e70ad838218d3032f37f05a7e294f7b007fe274ec9d774265c8c06698395f"}, + {file = "types_netaddr-1.2.0.20240219-py3-none-any.whl", hash = "sha256:b26144e878acb8a1a9008e6997863714db04f8029a0f7f6bfe483c977d21b522"}, ] [[package]] @@ -3097,24 +3120,24 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.16" +version = "2.9.21.20240311" description = "Typing stubs for psycopg2" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"}, - {file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"}, + {file = "types-psycopg2-2.9.21.20240311.tar.gz", hash = "sha256:722945dffa6a729bebc660f14137f37edfcead5a2c15eb234212a7d017ee8072"}, + {file = "types_psycopg2-2.9.21.20240311-py3-none-any.whl", hash = "sha256:2e137ae2b516ee0dbaab6f555086b6cfb723ba4389d67f551b0336adf4efcf1b"}, ] [[package]] name = "types-pyopenssl" -version = "23.3.0.0" +version = "24.0.0.20240311" description = "Typing stubs for pyOpenSSL" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, - {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, + {file = "types-pyOpenSSL-24.0.0.20240311.tar.gz", hash = "sha256:7bca00cfc4e7ef9c5d2663c6a1c068c35798e59670595439f6296e7ba3d58083"}, + {file = "types_pyOpenSSL-24.0.0.20240311-py3-none-any.whl", hash = "sha256:6e8e8bfad34924067333232c93f7fc4b369856d8bea0d5c9d1808cb290ab1972"}, ] [package.dependencies] @@ -3428,4 +3451,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "053bda662e95c273f4eda41d7ece8de0e404783ac66d54cdbedc396e196fb63a" +content-hash = "b510fa05f4ea33194bec079f5d04ebb3f9ffbb5c1ea96a0341d57ba770ef81e6" diff --git a/pyproject.toml b/pyproject.toml index cd8bb3942259..9a645079c30c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.101.0" +version = "1.104.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" @@ -321,7 +321,7 @@ all = [ # This helps prevents merge conflicts when running a batch of dependabot updates. isort = ">=5.10.1" black = ">=22.7.0" -ruff = "0.1.14" +ruff = "0.3.2" # Type checking only works with the pydantic.v1 compat module from pydantic v2 pydantic = "^2" @@ -372,7 +372,7 @@ optional = true sphinx = {version = "^6.1", python = "^3.8"} sphinx-autodoc2 = {version = ">=0.4.2,<0.6.0", python = "^3.8"} myst-parser = {version = "^1.0.0", python = "^3.8"} -furo = ">=2022.12.7,<2024.0.0" +furo = ">=2022.12.7,<2025.0.0" [build-system] @@ -382,7 +382,7 @@ furo = ">=2022.12.7,<2024.0.0" # runtime errors caused by build system changes. # We are happy to raise these upper bounds upon request, # provided we check that it's safe to do so (i.e. that CI passes). -requires = ["poetry-core>=1.1.0,<=1.8.1", "setuptools_rust>=1.3,<=1.8.1"] +requires = ["poetry-core>=1.1.0,<=1.9.0", "setuptools_rust>=1.3,<=1.8.1"] build-backend = "poetry.core.masonry.api" diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 491848903b06..9ea9b41505ca 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -660,7 +660,7 @@ def _announce() -> None: Hi everyone. Synapse {current_version} has just been released. [notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \ -[docker](https://hub.docker.com/r/vectorim/synapse/tags?name={tag_name}) | \ +[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \ [debs](https://packages.matrix.org/debian/) | \ [pypi](https://pypi.org/project/matrix-synapse/{current_version}/)""" ) diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 1dcc289df304..a533cad5ae79 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -1040,10 +1040,10 @@ async def _get_total_count_to_port( return done, remaining + done async def _setup_state_group_id_seq(self) -> None: - curr_id: Optional[ - int - ] = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True + curr_id: Optional[int] = ( + await self.sqlite_store.db_pool.simple_select_one_onecol( + table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True + ) ) if not curr_id: @@ -1132,13 +1132,13 @@ def r(txn: LoggingTransaction) -> None: ) async def _setup_auth_chain_sequence(self) -> None: - curr_chain_id: Optional[ - int - ] = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="event_auth_chains", - keyvalues={}, - retcol="MAX(chain_id)", - allow_none=True, + curr_chain_id: Optional[int] = ( + await self.sqlite_store.db_pool.simple_select_one_onecol( + table="event_auth_chains", + keyvalues={}, + retcol="MAX(chain_id)", + allow_none=True, + ) ) def r(txn: LoggingTransaction) -> None: diff --git a/synapse/api/constants.py b/synapse/api/constants.py index f3d2c8073da7..98884b4967c3 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -43,7 +43,6 @@ class Membership: - """Represents the membership states of a user in a room.""" INVITE: Final = "invite" @@ -130,6 +129,8 @@ class EventTypes: Reaction: Final = "m.reaction" + CallInvite: Final = "m.call.invite" + class ToDeviceEventTypes: RoomKeyRequest: Final = "m.room_key_request" diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b44088f9b3e2..dd4a1ae70639 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -517,8 +517,6 @@ def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict": class LimitExceededError(SynapseError): """A client has sent too many requests and is being throttled.""" - include_retry_after_header = False - def __init__( self, limiter_name: str, @@ -526,9 +524,10 @@ def __init__( retry_after_ms: Optional[int] = None, errcode: str = Codes.LIMIT_EXCEEDED, ): + # Use HTTP header Retry-After to enable library-assisted retry handling. headers = ( {"Retry-After": str(math.ceil(retry_after_ms / 1000))} - if self.include_retry_after_header and retry_after_ms is not None + if retry_after_ms is not None else None ) super().__init__(code, "Too Many Requests", errcode, headers=headers) diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 7ff8ad2d5577..fbc1d58ecb1a 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -370,9 +370,11 @@ class RoomVersionCapability: MSC3244_CAPABILITIES = { cap.identifier: { - "preferred": cap.preferred_version.identifier - if cap.preferred_version is not None - else None, + "preferred": ( + cap.preferred_version.identifier + if cap.preferred_version is not None + else None + ), "support": [ v.identifier for v in KNOWN_ROOM_VERSIONS.values() diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b241dbf62789..8a545a86c1a7 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -188,9 +188,9 @@ def _configure_named_resource( PasswordResetSubmitTokenResource, ) - resources[ - "/_synapse/client/password_reset/email/submit_token" - ] = PasswordResetSubmitTokenResource(self) + resources["/_synapse/client/password_reset/email/submit_token"] = ( + PasswordResetSubmitTokenResource(self) + ) if name == "consent": from synapse.rest.consent.consent_resource import ConsentResource diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 34fa2bb655e9..19322471dc55 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -362,16 +362,16 @@ async def push_bulk( # TODO: Update to stable prefixes once MSC3202 completes FCP merge if service.msc3202_transaction_extensions: if one_time_keys_count: - body[ - "org.matrix.msc3202.device_one_time_key_counts" - ] = one_time_keys_count - body[ - "org.matrix.msc3202.device_one_time_keys_count" - ] = one_time_keys_count + body["org.matrix.msc3202.device_one_time_key_counts"] = ( + one_time_keys_count + ) + body["org.matrix.msc3202.device_one_time_keys_count"] = ( + one_time_keys_count + ) if unused_fallback_keys: - body[ - "org.matrix.msc3202.device_unused_fallback_key_types" - ] = unused_fallback_keys + body["org.matrix.msc3202.device_unused_fallback_key_types"] = ( + unused_fallback_keys + ) if device_list_summary: body["org.matrix.msc3202.device_lists"] = { "changed": list(device_list_summary.changed), diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index d43d9da956ca..0bd3befdc2ec 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -25,7 +25,6 @@ import attr import attr.validators -from synapse.api.errors import LimitExceededError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions from synapse.config import ConfigError from synapse.config._base import Config, RootConfig @@ -415,14 +414,6 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "msc4010_push_rules_account_data", False ) - # MSC4041: Use HTTP header Retry-After to enable library-assisted retry handling - # - # This is a bit hacky, but the most reasonable way to *alway* include the - # headers. - LimitExceededError.include_retry_after_header = experimental.get( - "msc4041_enabled", False - ) - self.msc4028_push_encrypted_events = experimental.get( "msc4028_push_encrypted_events", False ) diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 102dba0219b2..d0a03baf55d2 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -342,6 +342,9 @@ def _parse_oidc_config_dict( user_mapping_provider_config=user_mapping_provider_config, attribute_requirements=attribute_requirements, enable_registration=oidc_config.get("enable_registration", True), + additional_authorization_parameters=oidc_config.get( + "additional_authorization_parameters", {} + ), ) @@ -444,3 +447,6 @@ class OidcProviderConfig: # Whether automatic registrations are enabled in the ODIC flow. Defaults to True enable_registration: bool + + # Additional parameters that will be passed to the authorization grant URL + additional_authorization_parameters: Mapping[str, str] diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 9e2b1f3de1ae..c7f3e6d35ef3 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -171,9 +171,9 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: refreshable_access_token_lifetime = self.parse_duration( refreshable_access_token_lifetime ) - self.refreshable_access_token_lifetime: Optional[ - int - ] = refreshable_access_token_lifetime + self.refreshable_access_token_lifetime: Optional[int] = ( + refreshable_access_token_lifetime + ) if ( self.session_lifetime is not None @@ -237,6 +237,14 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.inhibit_user_in_use_error = config.get("inhibit_user_in_use_error", False) + # List of user IDs not to send out device list updates for when they + # register new devices. This is useful to handle bot accounts. + # + # Note: This will still send out device list updates if the device is + # later updated, e.g. end to end keys are added. + dont_notify_new_devices_for = config.get("dont_notify_new_devices_for", []) + self.dont_notify_new_devices_for = frozenset(dont_notify_new_devices_for) + def generate_config_section( self, generate_secrets: bool = False, **kwargs: Any ) -> str: diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 4655882b4b3b..164547049906 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -199,9 +199,9 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: provider_config["module"] == "file_system" or provider_config["module"] == "synapse.rest.media.v1.storage_provider" ): - provider_config[ - "module" - ] = "synapse.media.storage_provider.FileStorageProviderBackend" + provider_config["module"] = ( + "synapse.media.storage_provider.FileStorageProviderBackend" + ) provider_class, parsed_config = load_module( provider_config, ("media_storage_providers", "" % i) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 1e7e5f70fef0..8c301e077c29 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -839,11 +839,12 @@ async def _fetch_keys( Map from server_name -> key_id -> FetchKeyResult """ - results = {} + # We only need to do one request per server. + servers_to_fetch = {k.server_name for k in keys_to_fetch} - async def get_keys(key_to_fetch_item: _FetchKeyRequest) -> None: - server_name = key_to_fetch_item.server_name + results = {} + async def get_keys(server_name: str) -> None: try: keys = await self.get_server_verify_keys_v2_direct(server_name) results[server_name] = keys @@ -852,7 +853,7 @@ async def get_keys(key_to_fetch_item: _FetchKeyRequest) -> None: except Exception: logger.exception("Error getting keys from %s", server_name) - await yieldable_gather_results(get_keys, keys_to_fetch) + await yieldable_gather_results(get_keys, servers_to_fetch) return results async def get_server_verify_keys_v2_direct( diff --git a/synapse/event_auth.py b/synapse/event_auth.py index d922c8dc35b5..f5abcde2dbe6 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -23,7 +23,20 @@ import collections.abc import logging import typing -from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple, Union +from typing import ( + Any, + ChainMap, + Dict, + Iterable, + List, + Mapping, + MutableMapping, + Optional, + Set, + Tuple, + Union, + cast, +) from canonicaljson import encode_canonical_json from signedjson.key import decode_verify_key_bytes @@ -75,8 +88,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, "EventBase"]: - ... + ) -> Dict[str, "EventBase"]: ... def validate_event_for_room_version(event: "EventBase") -> None: @@ -175,12 +187,22 @@ async def check_state_independent_auth_rules( return # 2. Reject if event has auth_events that: ... + auth_events: ChainMap[str, EventBase] = ChainMap() if batched_auth_events: - # Copy the batched auth events to avoid mutating them. - auth_events = dict(batched_auth_events) - needed_auth_event_ids = set(event.auth_event_ids()) - batched_auth_events.keys() + # batched_auth_events can become very large. To avoid repeatedly copying it, which + # would significantly impact performance, we use a ChainMap. + # batched_auth_events must be cast to MutableMapping because .new_child() requires + # this type. This casting is safe as the mapping is never mutated. + auth_events = auth_events.new_child( + cast(MutableMapping[str, "EventBase"], batched_auth_events) + ) + needed_auth_event_ids = [ + event_id + for event_id in event.auth_event_ids() + if event_id not in batched_auth_events + ] if needed_auth_event_ids: - auth_events.update( + auth_events = auth_events.new_child( await store.get_events( needed_auth_event_ids, redact_behaviour=EventRedactBehaviour.as_is, @@ -188,10 +210,12 @@ async def check_state_independent_auth_rules( ) ) else: - auth_events = await store.get_events( - event.auth_event_ids(), - redact_behaviour=EventRedactBehaviour.as_is, - allow_rejected=True, + auth_events = auth_events.new_child( + await store.get_events( + event.auth_event_ids(), + redact_behaviour=EventRedactBehaviour.as_is, + allow_rejected=True, + ) ) room_id = event.room_id diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 7ec696c6c017..36e0f47e5141 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -93,16 +93,14 @@ def __get__( self, instance: Literal[None], owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> "DictProperty": - ... + ) -> "DictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> T: - ... + ) -> T: ... def __get__( self, @@ -161,16 +159,14 @@ def __get__( self, instance: Literal[None], owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> "DefaultDictProperty": - ... + ) -> "DefaultDictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> T: - ... + ) -> T: ... def __get__( self, diff --git a/synapse/events/utils.py b/synapse/events/utils.py index cc52d0d1e917..e0613d0dbc2d 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -612,9 +612,9 @@ async def _inject_bundled_aggregations( serialized_aggregations = {} if event_aggregations.references: - serialized_aggregations[ - RelationTypes.REFERENCE - ] = event_aggregations.references + serialized_aggregations[RelationTypes.REFERENCE] = ( + event_aggregations.references + ) if event_aggregations.replace: # Include information about it in the relations dict. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index dc8cd5ec9a97..65d3a661fe1e 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -169,9 +169,9 @@ def __init__(self, hs: "HomeServer"): # We cache responses to state queries, as they take a while and often # come in waves. - self._state_resp_cache: ResponseCache[ - Tuple[str, Optional[str]] - ] = ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( + ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + ) self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( hs.get_clock(), "state_ids_resp", timeout_ms=30000 ) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index e9a2386a5cb4..b5c9fcff7cfc 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -88,9 +88,9 @@ def __init__(self, hs: "HomeServer"): # Stores the destinations we need to explicitly send presence to about a # given user. # Stream position -> (user_id, destinations) - self.presence_destinations: SortedDict[ - int, Tuple[str, Iterable[str]] - ] = SortedDict() + self.presence_destinations: SortedDict[int, Tuple[str, Iterable[str]]] = ( + SortedDict() + ) # (destination, key) -> EDU self.keyed_edu: Dict[Tuple[str, tuple], Edu] = {} diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 9ed6fc98b515..188848088153 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -192,10 +192,9 @@ ) # Time (in s) to wait before trying to wake up destinations that have -# catch-up outstanding. This will also be the delay applied at startup -# before trying the same. +# catch-up outstanding. # Please note that rate limiting still applies, so while the loop is -# executed every X seconds the destinations may not be wake up because +# executed every X seconds the destinations may not be woken up because # they are being rate limited following previous attempt failures. WAKEUP_RETRY_PERIOD_SEC = 60 @@ -428,18 +427,17 @@ def __init__(self, hs: "HomeServer"): / hs.config.ratelimiting.federation_rr_transactions_per_room_per_second ) + self._external_cache = hs.get_external_cache() + self._destination_wakeup_queue = _DestinationWakeupQueue(self, self.clock) + # Regularly wake up destinations that have outstanding PDUs to be caught up - self.clock.looping_call( + self.clock.looping_call_now( run_as_background_process, WAKEUP_RETRY_PERIOD_SEC * 1000.0, "wake_destinations_needing_catchup", self._wake_destinations_needing_catchup, ) - self._external_cache = hs.get_external_cache() - - self._destination_wakeup_queue = _DestinationWakeupQueue(self, self.clock) - def _get_per_destination_queue(self, destination: str) -> PerDestinationQueue: """Get or create a PerDestinationQueue for the given destination diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py index 37cc3d3ff560..89e944bc1714 100644 --- a/synapse/handlers/account.py +++ b/synapse/handlers/account.py @@ -118,10 +118,10 @@ async def _get_local_account_status(self, user_id: UserID) -> JsonDict: } if self._use_account_validity_in_account_status: - status[ - "org.matrix.expired" - ] = await self._account_validity_handler.is_user_expired( - user_id.to_string() + status["org.matrix.expired"] = ( + await self._account_validity_handler.is_user_expired( + user_id.to_string() + ) ) return status diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f233f1b03475..a1fab99f6b70 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -2185,7 +2185,7 @@ async def check_auth( # result is always the right type, but as it is 3rd party code it might not be if not isinstance(result, tuple) or len(result) != 2: - logger.warning( + logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" " Optional[Tuple[str, Optional[Callable]]]", callback, @@ -2248,7 +2248,7 @@ async def check_3pid_auth( # result is always the right type, but as it is 3rd party code it might not be if not isinstance(result, tuple) or len(result) != 2: - logger.warning( + logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" " Optional[Tuple[str, Optional[Callable]]]", callback, diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index de5ed5d5b63e..b13c4b6cb930 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -18,9 +18,11 @@ # [This file includes modifications made by New Vector Limited] # # +import itertools import logging from typing import TYPE_CHECKING, Optional +from synapse.api.constants import Membership from synapse.api.errors import SynapseError from synapse.handlers.device import DeviceHandler from synapse.metrics.background_process_metrics import run_as_background_process @@ -168,9 +170,9 @@ async def deactivate_account( # parts users from rooms (if it isn't already running) self._start_user_parting() - # Reject all pending invites for the user, so that the user doesn't show up in the - # "invited" section of rooms' members list. - await self._reject_pending_invites_for_user(user_id) + # Reject all pending invites and knocks for the user, so that the + # user doesn't show up in the "invited" section of rooms' members list. + await self._reject_pending_invites_and_knocks_for_user(user_id) # Remove all information on the user from the account_validity table. if self._account_validity_enabled: @@ -194,34 +196,37 @@ async def deactivate_account( return identity_server_supports_unbinding - async def _reject_pending_invites_for_user(self, user_id: str) -> None: - """Reject pending invites addressed to a given user ID. + async def _reject_pending_invites_and_knocks_for_user(self, user_id: str) -> None: + """Reject pending invites and knocks addressed to a given user ID. Args: - user_id: The user ID to reject pending invites for. + user_id: The user ID to reject pending invites and knocks for. """ user = UserID.from_string(user_id) pending_invites = await self.store.get_invited_rooms_for_local_user(user_id) + pending_knocks = await self.store.get_knocked_at_rooms_for_local_user(user_id) - for room in pending_invites: + for room in itertools.chain(pending_invites, pending_knocks): try: await self._room_member_handler.update_membership( create_requester(user, authenticated_entity=self._server_name), user, room.room_id, - "leave", + Membership.LEAVE, ratelimit=False, require_consent=False, ) logger.info( - "Rejected invite for deactivated user %r in room %r", + "Rejected %r for deactivated user %r in room %r", + room.membership, user_id, room.room_id, ) except Exception: logger.exception( - "Failed to reject invite for user %r in room %r:" + "Failed to reject %r for user %r in room %r:" " ignoring and continuing", + room.membership, user_id, room.room_id, ) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 9062fac91a47..67953a3ed92c 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -429,6 +429,10 @@ def __init__(self, hs: "HomeServer"): self._storage_controllers = hs.get_storage_controllers() self.db_pool = hs.get_datastores().main.db_pool + self._dont_notify_new_devices_for = ( + hs.config.registration.dont_notify_new_devices_for + ) + self.device_list_updater = DeviceListUpdater(hs, self) federation_registry = hs.get_federation_registry() @@ -505,6 +509,9 @@ async def check_device_registered( self._check_device_name_length(initial_device_display_name) + # Check if we should send out device lists updates for this new device. + notify = user_id not in self._dont_notify_new_devices_for + if device_id is not None: new_device = await self.store.store_device( user_id=user_id, @@ -514,7 +521,8 @@ async def check_device_registered( auth_provider_session_id=auth_provider_session_id, ) if new_device: - await self.notify_device_update(user_id, [device_id]) + if notify: + await self.notify_device_update(user_id, [device_id]) return device_id # if the device id is not specified, we'll autogen one, but loop a few @@ -530,7 +538,8 @@ async def check_device_registered( auth_provider_session_id=auth_provider_session_id, ) if new_device: - await self.notify_device_update(user_id, [new_device_id]) + if notify: + await self.notify_device_update(user_id, [new_device_id]) return new_device_id attempts += 1 diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 5f3dc30b6315..ad2b0f5fcc97 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -265,9 +265,9 @@ async def _delete_association(self, room_alias: RoomAlias) -> Optional[str]: async def get_association(self, room_alias: RoomAlias) -> JsonDict: room_id = None if self.hs.is_mine(room_alias): - result: Optional[ - RoomAliasMapping - ] = await self.get_association_from_room_alias(room_alias) + result: Optional[RoomAliasMapping] = ( + await self.get_association_from_room_alias(room_alias) + ) if result: room_id = result.room_id diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2b7aad5b5816..299588e47681 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1001,11 +1001,11 @@ async def on_make_join_request( ) if include_auth_user_id: - event_content[ - EventContentFields.AUTHORISING_USER - ] = await self._event_auth_handler.get_user_which_could_invite( - room_id, - state_ids, + event_content[EventContentFields.AUTHORISING_USER] = ( + await self._event_auth_handler.get_user_which_could_invite( + room_id, + state_ids, + ) ) builder = self.event_builder_factory.for_room_version( diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index bde45308d40b..c85deaed5621 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -1367,9 +1367,9 @@ async def _get_state_ids_after_missing_prev_event( ) if remote_event.is_state() and remote_event.rejected_reason is None: - state_map[ - (remote_event.type, remote_event.state_key) - ] = remote_event.event_id + state_map[(remote_event.type, remote_event.state_key)] = ( + remote_event.event_id + ) return state_map @@ -1757,17 +1757,25 @@ async def prep(event: EventBase) -> None: events_and_contexts_to_persist.append((event, context)) - for event in sorted_auth_events: + for i, event in enumerate(sorted_auth_events): await prep(event) - await self.persist_events_and_notify( - room_id, - events_and_contexts_to_persist, - # Mark these events backfilled as they're historic events that will - # eventually be backfilled. For example, missing events we fetch - # during backfill should be marked as backfilled as well. - backfilled=True, - ) + # The above function is typically not async, and so won't yield to + # the reactor. For large rooms let's yield to the reactor + # occasionally to ensure we don't block other work. + if (i + 1) % 1000 == 0: + await self._clock.sleep(0) + + # Also persist the new event in batches for similar reasons as above. + for batch in batch_iter(events_and_contexts_to_persist, 1000): + await self.persist_events_and_notify( + room_id, + batch, + # Mark these events as backfilled as they're historic events that will + # eventually be backfilled. For example, missing events we fetch + # during backfill should be marked as backfilled as well. + backfilled=True, + ) @trace async def _check_event_auth( diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7e5bb97f2a4a..ccaa5508ff0e 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -34,6 +34,7 @@ EventTypes, GuestAccess, HistoryVisibility, + JoinRules, Membership, RelationTypes, UserTypes, @@ -1325,6 +1326,18 @@ async def create_new_client_event( self.validator.validate_new(event, self.config) await self._validate_event_relation(event) + + if event.type == EventTypes.CallInvite: + room_id = event.room_id + room_info = await self.store.get_room_with_stats(room_id) + assert room_info is not None + + if room_info.join_rules == JoinRules.PUBLIC: + raise SynapseError( + 403, + "Call invites are not allowed in public rooms.", + Codes.FORBIDDEN, + ) logger.debug("Created event %s", event.event_id) return event, context @@ -1654,9 +1667,9 @@ async def cache_joined_hosts_for_events( expiry_ms=60 * 60 * 1000, ) - self._external_cache_joined_hosts_updates[ - state_entry.state_group - ] = None + self._external_cache_joined_hosts_updates[state_entry.state_group] = ( + None + ) async def _validate_canonical_alias( self, diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index fe13d82b669d..22b59829fa87 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -65,6 +65,7 @@ from synapse.http.servlet import parse_string from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable +from synapse.module_api import ModuleApi from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart from synapse.util import Clock, json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall @@ -421,9 +422,19 @@ def __init__( # from the IdP's jwks_uri, if required. self._jwks = RetryOnExceptionCachedCall(self._load_jwks) - self._user_mapping_provider = provider.user_mapping_provider_class( - provider.user_mapping_provider_config + user_mapping_provider_init_method = ( + provider.user_mapping_provider_class.__init__ ) + if len(inspect.signature(user_mapping_provider_init_method).parameters) == 3: + self._user_mapping_provider = provider.user_mapping_provider_class( + provider.user_mapping_provider_config, + ModuleApi(hs, hs.get_auth_handler()), + ) + else: + self._user_mapping_provider = provider.user_mapping_provider_class( + provider.user_mapping_provider_config, + ) + self._skip_verification = provider.skip_verification self._allow_existing_users = provider.allow_existing_users @@ -442,6 +453,10 @@ def __init__( # optional brand identifier for this auth provider self.idp_brand = provider.idp_brand + self.additional_authorization_parameters = ( + provider.additional_authorization_parameters + ) + self._sso_handler = hs.get_sso_handler() self._device_handler = hs.get_device_handler() @@ -818,14 +833,38 @@ async def _fetch_userinfo(self, token: Token) -> UserInfo: logger.debug("Using the OAuth2 access_token to request userinfo") metadata = await self.load_metadata() - resp = await self._http_client.get_json( + resp = await self._http_client.request( + "GET", metadata["userinfo_endpoint"], - headers={"Authorization": ["Bearer {}".format(token["access_token"])]}, + headers=Headers( + {"Authorization": ["Bearer {}".format(token["access_token"])]} + ), ) - logger.debug("Retrieved user info from userinfo endpoint: %r", resp) + body = await readBody(resp) + + content_type_headers = resp.headers.getRawHeaders("Content-Type") + assert content_type_headers + # We use `startswith` because the header value can contain the `charset` parameter + # even if it is useless, and Twisted doesn't take care of that for us. + if content_type_headers[0].startswith("application/jwt"): + alg_values = metadata.get( + "id_token_signing_alg_values_supported", ["RS256"] + ) + jwt = JsonWebToken(alg_values) + jwk_set = await self.load_jwks() + try: + decoded_resp = jwt.decode(body, key=jwk_set) + except ValueError: + logger.info("Reloading JWKS after decode error") + jwk_set = await self.load_jwks(force=True) # try reloading the jwks + decoded_resp = jwt.decode(body, key=jwk_set) + else: + decoded_resp = json_decoder.decode(body.decode("utf-8")) + + logger.debug("Retrieved user info from userinfo endpoint: %r", decoded_resp) - return UserInfo(resp) + return UserInfo(decoded_resp) async def _verify_jwt( self, @@ -971,17 +1010,21 @@ async def handle_redirect_request( metadata = await self.load_metadata() + additional_authorization_parameters = dict( + self.additional_authorization_parameters + ) # Automatically enable PKCE if it is supported. - extra_grant_values = {} if metadata.get("code_challenge_methods_supported"): code_verifier = generate_token(48) # Note that we verified the server supports S256 earlier (in # OidcProvider._validate_metadata). - extra_grant_values = { - "code_challenge_method": "S256", - "code_challenge": create_s256_code_challenge(code_verifier), - } + additional_authorization_parameters.update( + { + "code_challenge_method": "S256", + "code_challenge": create_s256_code_challenge(code_verifier), + } + ) cookie = self._macaroon_generaton.generate_oidc_session_token( state=state, @@ -1020,7 +1063,7 @@ async def handle_redirect_request( scope=self._scopes, state=state, nonce=nonce, - **extra_grant_values, + **additional_authorization_parameters, ) async def handle_oidc_callback( @@ -1583,7 +1626,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]): This is the default mapping provider. """ - def __init__(self, config: JinjaOidcMappingConfig): + def __init__(self, config: JinjaOidcMappingConfig, module_api: ModuleApi): self._config = config @staticmethod diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 21d3c71d8e29..37ee625f7177 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -493,9 +493,9 @@ def __init__(self, hs: "HomeServer"): # The number of ongoing syncs on this process, by (user ID, device ID). # Empty if _presence_enabled is false. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int - ] = {} + self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = ( + {} + ) self.notifier = hs.get_notifier() self.instance_id = hs.get_instance_id() @@ -818,9 +818,9 @@ def __init__(self, hs: "HomeServer"): # Keeps track of the number of *ongoing* syncs on this process. While # this is non zero a user will never go offline. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int - ] = {} + self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = ( + {} + ) # Keeps track of the number of *ongoing* syncs on other processes. # diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 279d393a5a12..e51e282a9f78 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -320,9 +320,9 @@ async def check_avatar_size_and_mime_type(self, mxc: str) -> bool: server_name = host if self._is_mine_server_name(server_name): - media_info: Optional[ - Union[LocalMedia, RemoteMedia] - ] = await self.store.get_local_media(media_id) + media_info: Optional[Union[LocalMedia, RemoteMedia]] = ( + await self.store.get_local_media(media_id) + ) else: media_info = await self.store.get_cached_remote_media(server_name, media_id) diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 135a662267cb..fb39c8e04b5d 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -55,12 +55,12 @@ async def received_client_read_marker( should_update = True # Get event ordering, this also ensures we know about the event - event_ordering = await self.store.get_event_ordering(event_id) + event_ordering = await self.store.get_event_ordering(event_id, room_id) if existing_read_marker: try: old_event_ordering = await self.store.get_event_ordering( - existing_read_marker["event_id"] + existing_read_marker["event_id"], room_id ) except SynapseError: # Old event no longer exists, assume new is ahead. This may diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index 828a4b4cbd19..931ac0c81331 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -188,13 +188,13 @@ async def get_relations( if include_original_event: # Do not bundle aggregations when retrieving the original event because # we want the content before relations are applied to it. - return_value[ - "original_event" - ] = await self._event_serializer.serialize_event( - event, - now, - bundle_aggregations=None, - config=serialize_options, + return_value["original_event"] = ( + await self._event_serializer.serialize_event( + event, + now, + bundle_aggregations=None, + config=serialize_options, + ) ) if next_token: diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 6b116dce8cea..8b5ffb135e30 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -151,7 +151,7 @@ def __init__(self, hs: "HomeServer"): "history_visibility": HistoryVisibility.SHARED, "original_invitees_have_ops": False, "guest_can_join": False, - "power_level_content_override": {}, + "power_level_content_override": {EventTypes.CallInvite: 50}, }, } @@ -538,10 +538,10 @@ async def clone_existing_room( # deep-copy the power-levels event before we start modifying it # note that if frozen_dicts are enabled, `power_levels` will be a frozen # dict so we can't just copy.deepcopy it. - initial_state[ - (EventTypes.PowerLevels, "") - ] = power_levels = copy_and_fixup_power_levels_contents( - initial_state[(EventTypes.PowerLevels, "")] + initial_state[(EventTypes.PowerLevels, "")] = power_levels = ( + copy_and_fixup_power_levels_contents( + initial_state[(EventTypes.PowerLevels, "")] + ) ) # Resolve the minimum power level required to send any state event @@ -1362,9 +1362,11 @@ def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: visibility = room_config.get("visibility", "private") preset_name = room_config.get( "preset", - RoomCreationPreset.PRIVATE_CHAT - if visibility == "private" - else RoomCreationPreset.PUBLIC_CHAT, + ( + RoomCreationPreset.PRIVATE_CHAT + if visibility == "private" + else RoomCreationPreset.PUBLIC_CHAT + ), ) try: preset_config = self._presets_dict[preset_name] diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index d238c40bcf74..9e9f6cd06258 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1236,11 +1236,11 @@ async def _should_perform_remote_join( # If this is going to be a local join, additional information must # be included in the event content in order to efficiently validate # the event. - content[ - EventContentFields.AUTHORISING_USER - ] = await self.event_auth_handler.get_user_which_could_invite( - room_id, - state_before_join, + content[EventContentFields.AUTHORISING_USER] = ( + await self.event_auth_handler.get_user_which_could_invite( + room_id, + state_before_join, + ) ) return False, [] diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 437cb5509c6a..8e39e76c97a0 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -150,7 +150,7 @@ class UserAttributes: display_name: Optional[str] = None picture: Optional[str] = None # mypy thinks these are incompatible for some reason. - emails: StrCollection = attr.Factory(list) # type: ignore[assignment] + emails: StrCollection = attr.Factory(list) @attr.s(slots=True, auto_attribs=True) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 9122a79b4c05..3aa2e2b7ba51 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -41,6 +41,7 @@ AccountDataTypes, EventContentFields, EventTypes, + JoinRules, Membership, ) from synapse.api.filtering import FilterCollection @@ -675,13 +676,22 @@ async def _load_filtered_recents( ) ) - loaded_recents = await filter_events_for_client( + filtered_recents = await filter_events_for_client( self._storage_controllers, sync_config.user.to_string(), loaded_recents, always_include_ids=current_state_ids, ) + loaded_recents = [] + for event in filtered_recents: + if event.type == EventTypes.CallInvite: + room_info = await self.store.get_room_with_stats(event.room_id) + assert room_info is not None + if room_info.join_rules == JoinRules.PUBLIC: + continue + loaded_recents.append(event) + log_kv({"loaded_recents_after_client_filtering": len(loaded_recents)}) loaded_recents.extend(recents) @@ -1014,30 +1024,6 @@ async def compute_state_delta( if event.is_state(): timeline_state[(event.type, event.state_key)] = event.event_id - if full_state: - # always make sure we LL ourselves so we know we're in the room - # (if we are) to fix https://github.com/vector-im/riot-web/issues/7209 - # We only need apply this on full state syncs given we disabled - # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840. - # We don't insert ourselves into `members_to_fetch`, because in some - # rare cases (an empty event batch with a now_token after the user's - # leave in a partial state room which another local user has - # joined), the room state will be missing our membership and there - # is no guarantee that our membership will be in the auth events of - # timeline events when the room is partial stated. - state_filter = StateFilter.from_lazy_load_member_list( - members_to_fetch.union((sync_config.user.to_string(),)) - ) - else: - state_filter = StateFilter.from_lazy_load_member_list( - members_to_fetch - ) - - # We are happy to use partial state to compute the `/sync` response. - # Since partial state may not include the lazy-loaded memberships we - # require, we fix up the state response afterwards with memberships from - # auth events. - await_full_state = False else: timeline_state = { (event.type, event.state_key): event.event_id @@ -1045,9 +1031,6 @@ async def compute_state_delta( if event.is_state() } - state_filter = StateFilter.all() - await_full_state = True - # Now calculate the state to return in the sync response for the room. # This is more or less the change in state between the end of the previous # sync's timeline and the start of the current sync's timeline. @@ -1057,131 +1040,28 @@ async def compute_state_delta( # whether the room is partial stated *before* fetching it. is_partial_state_room = await self.store.is_partial_state_room(room_id) if full_state: - if batch: - state_at_timeline_end = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[-1].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - - state_at_timeline_start = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[0].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - - else: - state_at_timeline_end = await self.get_state_at( - room_id, - stream_position=now_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - - state_at_timeline_start = state_at_timeline_end - - state_ids = _calculate_state( - timeline_contains=timeline_state, - timeline_start=state_at_timeline_start, - timeline_end=state_at_timeline_end, - previous_timeline_end={}, - lazy_load_members=lazy_load_members, + state_ids = await self._compute_state_delta_for_full_sync( + room_id, + sync_config.user, + batch, + now_token, + members_to_fetch, + timeline_state, ) - elif batch.limited: - if batch: - state_at_timeline_start = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[0].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - else: - # We can get here if the user has ignored the senders of all - # the recent events. - state_at_timeline_start = await self.get_state_at( - room_id, - stream_position=now_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - - # for now, we disable LL for gappy syncs - see - # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346 - # N.B. this slows down incr syncs as we are now processing way - # more state in the server than if we were LLing. - # - # We still have to filter timeline_start to LL entries (above) in order - # for _calculate_state's LL logic to work, as we have to include LL - # members for timeline senders in case they weren't loaded in the initial - # sync. We do this by (counterintuitively) by filtering timeline_start - # members to just be ones which were timeline senders, which then ensures - # all of the rest get included in the state block (if we need to know - # about them). - state_filter = StateFilter.all() - + else: # If this is an initial sync then full_state should be set, and # that case is handled above. We assert here to ensure that this # is indeed the case. assert since_token is not None - state_at_previous_sync = await self.get_state_at( - room_id, - stream_position=since_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - if batch: - state_at_timeline_end = ( - await self._state_storage_controller.get_state_ids_for_event( - batch.events[-1].event_id, - state_filter=state_filter, - await_full_state=await_full_state, - ) - ) - else: - # We can get here if the user has ignored the senders of all - # the recent events. - state_at_timeline_end = await self.get_state_at( - room_id, - stream_position=now_token, - state_filter=state_filter, - await_full_state=await_full_state, - ) - - state_ids = _calculate_state( - timeline_contains=timeline_state, - timeline_start=state_at_timeline_start, - timeline_end=state_at_timeline_end, - previous_timeline_end=state_at_previous_sync, - # we have to include LL members in case LL initial sync missed them - lazy_load_members=lazy_load_members, + state_ids = await self._compute_state_delta_for_incremental_sync( + room_id, + batch, + since_token, + now_token, + members_to_fetch, + timeline_state, ) - else: - state_ids = {} - if lazy_load_members: - if members_to_fetch and batch.events: - # We're returning an incremental sync, with no - # "gap" since the previous sync, so normally there would be - # no state to return. - # But we're lazy-loading, so the client might need some more - # member events to understand the events in this timeline. - # So we fish out all the member events corresponding to the - # timeline here, and then dedupe any redundant ones below. - - state_ids = await self._state_storage_controller.get_state_ids_for_event( - batch.events[0].event_id, - # we only want members! - state_filter=StateFilter.from_types( - (EventTypes.Member, member) - for member in members_to_fetch - ), - await_full_state=False, - ) # If we only have partial state for the room, `state_ids` may be missing the # memberships we wanted. We attempt to find some by digging through the auth @@ -1245,6 +1125,227 @@ async def compute_state_delta( if e.type != EventTypes.Aliases # until MSC2261 or alternative solution } + async def _compute_state_delta_for_full_sync( + self, + room_id: str, + syncing_user: UserID, + batch: TimelineBatch, + now_token: StreamToken, + members_to_fetch: Optional[Set[str]], + timeline_state: StateMap[str], + ) -> StateMap[str]: + """Calculate the state events to be included in a full sync response. + + As with `_compute_state_delta_for_incremental_sync`, the result will include + the membership events for the senders of each event in `members_to_fetch`. + + Args: + room_id: The room we are calculating for. + syncing_user: The user that is calling `/sync`. + batch: The timeline batch for the room that will be sent to the user. + now_token: Token of the end of the current batch. + members_to_fetch: If lazy-loading is enabled, the memberships needed for + events in the timeline. + timeline_state: The contribution to the room state from state events in + `batch`. Only contains the last event for any given state key. + + Returns: + A map from (type, state_key) to event_id, for each event that we believe + should be included in the `state` part of the sync response. + """ + if members_to_fetch is not None: + # Lazy-loading of membership events is enabled. + # + # Always make sure we load our own membership event so we know if + # we're in the room, to fix https://github.com/vector-im/riot-web/issues/7209. + # + # We only need apply this on full state syncs given we disabled + # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840. + # + # We don't insert ourselves into `members_to_fetch`, because in some + # rare cases (an empty event batch with a now_token after the user's + # leave in a partial state room which another local user has + # joined), the room state will be missing our membership and there + # is no guarantee that our membership will be in the auth events of + # timeline events when the room is partial stated. + state_filter = StateFilter.from_lazy_load_member_list( + members_to_fetch.union((syncing_user.to_string(),)) + ) + + # We are happy to use partial state to compute the `/sync` response. + # Since partial state may not include the lazy-loaded memberships we + # require, we fix up the state response afterwards with memberships from + # auth events. + await_full_state = False + lazy_load_members = True + else: + state_filter = StateFilter.all() + await_full_state = True + lazy_load_members = False + + if batch: + state_at_timeline_end = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[-1].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + + state_at_timeline_start = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[0].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + else: + state_at_timeline_end = await self.get_state_at( + room_id, + stream_position=now_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + state_at_timeline_start = state_at_timeline_end + + state_ids = _calculate_state( + timeline_contains=timeline_state, + timeline_start=state_at_timeline_start, + timeline_end=state_at_timeline_end, + previous_timeline_end={}, + lazy_load_members=lazy_load_members, + ) + return state_ids + + async def _compute_state_delta_for_incremental_sync( + self, + room_id: str, + batch: TimelineBatch, + since_token: StreamToken, + now_token: StreamToken, + members_to_fetch: Optional[Set[str]], + timeline_state: StateMap[str], + ) -> StateMap[str]: + """Calculate the state events to be included in an incremental sync response. + + If lazy-loading of membership events is enabled (as indicated by + `members_to_fetch` being not-`None`), the result will include the membership + events for each member in `members_to_fetch`. The caller + (`compute_state_delta`) is responsible for keeping track of which membership + events we have already sent to the client, and hence ripping them out. + + Args: + room_id: The room we are calculating for. + batch: The timeline batch for the room that will be sent to the user. + since_token: Token of the end of the previous batch. + now_token: Token of the end of the current batch. + members_to_fetch: If lazy-loading is enabled, the memberships needed for + events in the timeline. Otherwise, `None`. + timeline_state: The contribution to the room state from state events in + `batch`. Only contains the last event for any given state key. + + Returns: + A map from (type, state_key) to event_id, for each event that we believe + should be included in the `state` part of the sync response. + """ + if members_to_fetch is not None: + # Lazy-loading is enabled. Only return the state that is needed. + state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch) + await_full_state = False + lazy_load_members = True + else: + state_filter = StateFilter.all() + await_full_state = True + lazy_load_members = False + + if batch.limited: + if batch: + state_at_timeline_start = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[0].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + else: + # We can get here if the user has ignored the senders of all + # the recent events. + state_at_timeline_start = await self.get_state_at( + room_id, + stream_position=now_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + # for now, we disable LL for gappy syncs - see + # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346 + # N.B. this slows down incr syncs as we are now processing way + # more state in the server than if we were LLing. + # + # We still have to filter timeline_start to LL entries (above) in order + # for _calculate_state's LL logic to work, as we have to include LL + # members for timeline senders in case they weren't loaded in the initial + # sync. We do this by (counterintuitively) by filtering timeline_start + # members to just be ones which were timeline senders, which then ensures + # all of the rest get included in the state block (if we need to know + # about them). + state_filter = StateFilter.all() + + state_at_previous_sync = await self.get_state_at( + room_id, + stream_position=since_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + if batch: + state_at_timeline_end = ( + await self._state_storage_controller.get_state_ids_for_event( + batch.events[-1].event_id, + state_filter=state_filter, + await_full_state=await_full_state, + ) + ) + else: + # We can get here if the user has ignored the senders of all + # the recent events. + state_at_timeline_end = await self.get_state_at( + room_id, + stream_position=now_token, + state_filter=state_filter, + await_full_state=await_full_state, + ) + + state_ids = _calculate_state( + timeline_contains=timeline_state, + timeline_start=state_at_timeline_start, + timeline_end=state_at_timeline_end, + previous_timeline_end=state_at_previous_sync, + lazy_load_members=lazy_load_members, + ) + else: + state_ids = {} + if lazy_load_members: + if members_to_fetch and batch.events: + # We're returning an incremental sync, with no + # "gap" since the previous sync, so normally there would be + # no state to return. + # But we're lazy-loading, so the client might need some more + # member events to understand the events in this timeline. + # So we fish out all the member events corresponding to the + # timeline here. The caller will then dedupe any redundant ones. + + state_ids = await self._state_storage_controller.get_state_ids_for_event( + batch.events[0].event_id, + # we only want members! + state_filter=StateFilter.from_types( + (EventTypes.Member, member) for member in members_to_fetch + ), + await_full_state=False, + ) + return state_ids + async def _find_missing_partial_state_memberships( self, room_id: str, @@ -1333,9 +1434,9 @@ async def _find_missing_partial_state_memberships( and auth_event.state_key == member ): missing_members.discard(member) - additional_state_ids[ - (EventTypes.Member, member) - ] = auth_event.event_id + additional_state_ids[(EventTypes.Member, member)] = ( + auth_event.event_id + ) break if missing_members: @@ -2746,7 +2847,7 @@ def calculate_user_changes(self) -> Tuple[AbstractSet[str], AbstractSet[str]]: if self.since_token: for joined_sync in self.joined: it = itertools.chain( - joined_sync.timeline.events, joined_sync.state.values() + joined_sync.state.values(), joined_sync.timeline.events ) for event in it: if event.type == EventTypes.Member: @@ -2758,13 +2859,20 @@ def calculate_user_changes(self) -> Tuple[AbstractSet[str], AbstractSet[str]]: newly_joined_or_invited_or_knocked_users.add( event.state_key ) + # If the user left and rejoined in the same batch, they + # count as a newly-joined user, *not* a newly-left user. + newly_left_users.discard(event.state_key) else: prev_content = event.unsigned.get("prev_content", {}) prev_membership = prev_content.get("membership", None) if prev_membership == Membership.JOIN: newly_left_users.add(event.state_key) + # If the user joined and left in the same batch, they + # count as a newly-left user, not a newly-joined user. + newly_joined_or_invited_or_knocked_users.discard( + event.state_key + ) - newly_left_users -= newly_joined_or_invited_or_knocked_users return newly_joined_or_invited_or_knocked_users, newly_left_users diff --git a/synapse/handlers/worker_lock.py b/synapse/handlers/worker_lock.py index a870fd11249d..7e578cf46208 100644 --- a/synapse/handlers/worker_lock.py +++ b/synapse/handlers/worker_lock.py @@ -182,12 +182,15 @@ def _on_lock_released( if not locks: return - def _wake_deferred(deferred: defer.Deferred) -> None: - if not deferred.called: - deferred.callback(None) - - for lock in locks: - self._clock.call_later(0, _wake_deferred, lock.deferred) + def _wake_all_locks( + locks: Collection[Union[WaitingLock, WaitingMultiLock]] + ) -> None: + for lock in locks: + deferred = lock.deferred + if not deferred.called: + deferred.callback(None) + + self._clock.call_later(0, _wake_all_locks, locks) @wrap_as_background_process("_cleanup_locks") async def _cleanup_locks(self) -> None: diff --git a/synapse/http/client.py b/synapse/http/client.py index 08a8634dae6d..4718517c979e 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -390,6 +390,13 @@ async def request( cooperator=self._cooperator, ) + # Always make sure we add a user agent to the request + if headers is None: + headers = Headers() + + if not headers.hasHeader("User-Agent"): + headers.addRawHeader("User-Agent", self.user_agent) + request_deferred: defer.Deferred = treq.request( method, uri, diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 884ecdacdda8..c73a589e6c6a 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -931,8 +931,7 @@ async def put_json( try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, backoff_on_all_error_codes: bool = False, - ) -> JsonDict: - ... + ) -> JsonDict: ... @overload async def put_json( @@ -949,8 +948,7 @@ async def put_json( try_trailing_slash_on_400: bool = False, parser: Optional[ByteParser[T]] = None, backoff_on_all_error_codes: bool = False, - ) -> T: - ... + ) -> T: ... async def put_json( self, @@ -1140,8 +1138,7 @@ async def get_json( ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> JsonDict: - ... + ) -> JsonDict: ... @overload async def get_json( @@ -1154,8 +1151,7 @@ async def get_json( ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> T: - ... + ) -> T: ... async def get_json( self, @@ -1236,8 +1232,7 @@ async def get_json_with_headers( ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: - ... + ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: ... @overload async def get_json_with_headers( @@ -1250,8 +1245,7 @@ async def get_json_with_headers( ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> Tuple[T, Dict[bytes, List[bytes]]]: - ... + ) -> Tuple[T, Dict[bytes, List[bytes]]]: ... async def get_json_with_headers( self, diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index b22eb727b15d..b73d06f1d3f8 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -61,20 +61,17 @@ @overload -def parse_integer(request: Request, name: str, default: int) -> int: - ... +def parse_integer(request: Request, name: str, default: int) -> int: ... @overload -def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: - ... +def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ... @overload def parse_integer( request: Request, name: str, default: Optional[int] = None, required: bool = False -) -> Optional[int]: - ... +) -> Optional[int]: ... def parse_integer( @@ -105,8 +102,7 @@ def parse_integer_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: Optional[int] = None, -) -> Optional[int]: - ... +) -> Optional[int]: ... @overload @@ -115,8 +111,7 @@ def parse_integer_from_args( name: str, *, required: Literal[True], -) -> int: - ... +) -> int: ... @overload @@ -125,8 +120,7 @@ def parse_integer_from_args( name: str, default: Optional[int] = None, required: bool = False, -) -> Optional[int]: - ... +) -> Optional[int]: ... def parse_integer_from_args( @@ -172,20 +166,17 @@ def parse_integer_from_args( @overload -def parse_boolean(request: Request, name: str, default: bool) -> bool: - ... +def parse_boolean(request: Request, name: str, default: bool) -> bool: ... @overload -def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: - ... +def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: ... @overload def parse_boolean( request: Request, name: str, default: Optional[bool] = None, required: bool = False -) -> Optional[bool]: - ... +) -> Optional[bool]: ... def parse_boolean( @@ -216,8 +207,7 @@ def parse_boolean_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: bool, -) -> bool: - ... +) -> bool: ... @overload @@ -226,8 +216,7 @@ def parse_boolean_from_args( name: str, *, required: Literal[True], -) -> bool: - ... +) -> bool: ... @overload @@ -236,8 +225,7 @@ def parse_boolean_from_args( name: str, default: Optional[bool] = None, required: bool = False, -) -> Optional[bool]: - ... +) -> Optional[bool]: ... def parse_boolean_from_args( @@ -289,8 +277,7 @@ def parse_bytes_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: Optional[bytes] = None, -) -> Optional[bytes]: - ... +) -> Optional[bytes]: ... @overload @@ -300,8 +287,7 @@ def parse_bytes_from_args( default: Literal[None] = None, *, required: Literal[True], -) -> bytes: - ... +) -> bytes: ... @overload @@ -310,8 +296,7 @@ def parse_bytes_from_args( name: str, default: Optional[bytes] = None, required: bool = False, -) -> Optional[bytes]: - ... +) -> Optional[bytes]: ... def parse_bytes_from_args( @@ -355,8 +340,7 @@ def parse_string( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -367,8 +351,7 @@ def parse_string( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -380,8 +363,7 @@ def parse_string( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... def parse_string( @@ -437,8 +419,7 @@ def parse_enum( name: str, E: Type[EnumT], default: EnumT, -) -> EnumT: - ... +) -> EnumT: ... @overload @@ -448,8 +429,7 @@ def parse_enum( E: Type[EnumT], *, required: Literal[True], -) -> EnumT: - ... +) -> EnumT: ... def parse_enum( @@ -526,8 +506,7 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: - ... +) -> Optional[List[str]]: ... @overload @@ -538,8 +517,7 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: - ... +) -> List[str]: ... @overload @@ -550,8 +528,7 @@ def parse_strings_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: - ... +) -> List[str]: ... @overload @@ -563,8 +540,7 @@ def parse_strings_from_args( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: - ... +) -> Optional[List[str]]: ... def parse_strings_from_args( @@ -625,8 +601,7 @@ def parse_string_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... @overload @@ -638,8 +613,7 @@ def parse_string_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -650,8 +624,7 @@ def parse_string_from_args( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... def parse_string_from_args( @@ -704,22 +677,19 @@ def parse_string_from_args( @overload -def parse_json_value_from_request(request: Request) -> JsonDict: - ... +def parse_json_value_from_request(request: Request) -> JsonDict: ... @overload def parse_json_value_from_request( request: Request, allow_empty_body: Literal[False] -) -> JsonDict: - ... +) -> JsonDict: ... @overload def parse_json_value_from_request( request: Request, allow_empty_body: bool = False -) -> Optional[JsonDict]: - ... +) -> Optional[JsonDict]: ... def parse_json_value_from_request( @@ -847,7 +817,6 @@ def assert_params_in_dict(body: JsonDict, required: StrCollection) -> None: class RestServlet: - """A Synapse REST Servlet. An implementing class can either provide its own custom 'register' method, diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 548d255b69d0..4650b60962d6 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -744,8 +744,7 @@ def preserve_fn( @overload -def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: - ... +def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: ... def preserve_fn( @@ -774,15 +773,10 @@ def run_in_background( @overload def run_in_background( f: Callable[P, R], *args: P.args, **kwargs: P.kwargs -) -> "defer.Deferred[R]": - ... +) -> "defer.Deferred[R]": ... -def run_in_background( # type: ignore[misc] - # The `type: ignore[misc]` above suppresses - # "Overloaded function implementation does not accept all possible arguments of signature 1" - # "Overloaded function implementation does not accept all possible arguments of signature 2" - # which seems like a bug in mypy. +def run_in_background( f: Union[ Callable[P, R], Callable[P, Awaitable[R]], diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 78b9fffbfbf5..7a3c805cc5e4 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -388,15 +388,13 @@ def _only_if_tracing_inner(*args: P.args, **kwargs: P.kwargs) -> Optional[R]: @overload def ensure_active_span( message: str, -) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: - ... +) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: ... @overload def ensure_active_span( message: str, ret: T -) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: - ... +) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: ... def ensure_active_span( diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index 52859ed4904c..0e875132f6fb 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -1002,9 +1002,9 @@ async def _generate_thumbnails( ) t_width = min(m_width, t_width) t_height = min(m_height, t_height) - thumbnails[ - (t_width, t_height, requirement.media_type) - ] = requirement.method + thumbnails[(t_width, t_height, requirement.media_type)] = ( + requirement.method + ) # Now we generate the thumbnails for each dimension, store it for (t_width, t_height, t_type), t_method in thumbnails.items(): diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py index 6b4c64f7a59a..bd25985686d2 100644 --- a/synapse/metrics/jemalloc.py +++ b/synapse/metrics/jemalloc.py @@ -42,14 +42,12 @@ class JemallocStats: @overload def _mallctl( self, name: str, read: Literal[True] = True, write: Optional[int] = None - ) -> int: - ... + ) -> int: ... @overload def _mallctl( self, name: str, read: Literal[False], write: Optional[int] = None - ) -> None: - ... + ) -> None: ... def _mallctl( self, name: str, read: bool = True, write: Optional[int] = None diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 6ec56a7f1436..17079ff781cf 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -455,7 +455,7 @@ async def check_event_for_spam( # mypy complains that we can't reach this code because of the # return type in CHECK_EVENT_FOR_SPAM_CALLBACK, but we don't know # for sure that the module actually returns it. - logger.warning( + logger.warning( # type: ignore[unreachable] "Module returned invalid value, rejecting message as spam" ) res = "This message has been rejected as probable spam" diff --git a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py index 7a3255aeef6d..9f7a04372deb 100644 --- a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py +++ b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py @@ -366,7 +366,7 @@ async def check_threepid_can_be_invited( if len(self._check_threepid_can_be_invited_callbacks) == 0: return True - state_events = await self._get_state_map_for_room(room_id) + state_events = await self._storage_controllers.state.get_current_state(room_id) for callback in self._check_threepid_can_be_invited_callbacks: try: @@ -399,7 +399,7 @@ async def check_visibility_can_be_modified( if len(self._check_visibility_can_be_modified_callbacks) == 0: return True - state_events = await self._get_state_map_for_room(room_id) + state_events = await self._storage_controllers.state.get_current_state(room_id) for callback in self._check_visibility_can_be_modified_callbacks: try: @@ -427,7 +427,13 @@ async def on_new_event(self, event_id: str) -> None: return event = await self.store.get_event(event_id) - state_events = await self._get_state_map_for_room(event.room_id) + + # We *don't* want to wait for the full state here, because waiting for full + # state will persist event, which in turn will call this method. + # This would end up in a deadlock. + state_events = await self._storage_controllers.state.get_current_state( + event.room_id, await_full_state=False + ) for callback in self._on_new_event_callbacks: try: @@ -490,17 +496,6 @@ async def check_can_deactivate_user( ) return True - async def _get_state_map_for_room(self, room_id: str) -> StateMap[EventBase]: - """Given a room ID, return the state events of that room. - - Args: - room_id: The ID of the room. - - Returns: - A dict mapping (event type, state key) to state event. - """ - return await self._storage_controllers.state.get_current_state(room_id) - async def on_profile_update( self, user_id: str, new_profile: ProfileInfo, by_admin: bool, deactivation: bool ) -> None: diff --git a/synapse/notifier.py b/synapse/notifier.py index 62d954298c58..e87333a80aa3 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -469,8 +469,7 @@ def on_new_event( new_token: RoomStreamToken, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... @overload def on_new_event( @@ -479,8 +478,7 @@ def on_new_event( new_token: MultiWriterStreamToken, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... @overload def on_new_event( @@ -497,8 +495,7 @@ def on_new_event( new_token: int, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... def on_new_event( self, diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 60161e86f2cd..f1ffc8115f5b 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -26,6 +26,7 @@ import bleach import jinja2 from markupsafe import Markup +from prometheus_client import Counter from synapse.api.constants import EventTypes, Membership, RoomTypes from synapse.api.errors import StoreError @@ -56,6 +57,12 @@ T = TypeVar("T") +emails_sent_counter = Counter( + "synapse_emails_sent_total", + "Emails sent by type", + ["type"], +) + CONTEXT_BEFORE = 1 CONTEXT_AFTER = 1 @@ -130,6 +137,8 @@ def __init__( logger.info("Created Mailer for app_name %s" % app_name) + emails_sent_counter.labels("password_reset") + async def send_password_reset_mail( self, email_address: str, token: str, client_secret: str, sid: str ) -> None: @@ -153,6 +162,8 @@ async def send_password_reset_mail( template_vars: TemplateVars = {"link": link} + emails_sent_counter.labels("password_reset").inc() + await self.send_email( email_address, self.email_subjects.password_reset @@ -160,6 +171,8 @@ async def send_password_reset_mail( template_vars, ) + emails_sent_counter.labels("registration") + async def send_registration_mail( self, email_address: str, token: str, client_secret: str, sid: str ) -> None: @@ -183,6 +196,8 @@ async def send_registration_mail( template_vars: TemplateVars = {"link": link} + emails_sent_counter.labels("registration").inc() + await self.send_email( email_address, self.email_subjects.email_validation @@ -190,6 +205,8 @@ async def send_registration_mail( template_vars, ) + emails_sent_counter.labels("add_threepid") + async def send_add_threepid_mail( self, email_address: str, token: str, client_secret: str, sid: str ) -> None: @@ -214,6 +231,8 @@ async def send_add_threepid_mail( template_vars: TemplateVars = {"link": link} + emails_sent_counter.labels("add_threepid").inc() + await self.send_email( email_address, self.email_subjects.email_validation @@ -221,6 +240,8 @@ async def send_add_threepid_mail( template_vars, ) + emails_sent_counter.labels("notification") + async def send_notification_mail( self, app_id: str, @@ -315,6 +336,8 @@ async def _fetch_room_state(room_id: str) -> None: "reason": reason, } + emails_sent_counter.labels("notification").inc() + await self.send_email( email_address, summary_text, template_vars, unsubscribe_link ) @@ -354,12 +377,14 @@ async def send_email( # # Note that many email clients will not render the unsubscribe link # unless DKIM, etc. is properly setup. - additional_headers={ - "List-Unsubscribe-Post": "List-Unsubscribe=One-Click", - "List-Unsubscribe": f"<{unsubscribe_link}>", - } - if unsubscribe_link - else None, + additional_headers=( + { + "List-Unsubscribe-Post": "List-Unsubscribe=One-Click", + "List-Unsubscribe": f"<{unsubscribe_link}>", + } + if unsubscribe_link + else None + ), ) async def _get_room_vars( diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index 76c7ab647732..1ef881f70247 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -29,11 +29,17 @@ async def get_badge_count(store: DataStore, user_id: str, group_by_room: bool) -> int: invites = await store.get_invited_rooms_for_local_user(user_id) + joins = await store.get_rooms_for_user(user_id) badge = len(invites) room_to_count = await store.get_unread_counts_by_room_for_user(user_id) - for _room_id, notify_count in room_to_count.items(): + for room_id, notify_count in room_to_count.items(): + # room_to_count may include rooms which the user has left, + # ignore those. + if room_id not in joins: + continue + if notify_count == 0: continue diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index a82ad49e013c..9aa8d90bfe30 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -259,9 +259,9 @@ async def send_request( url_args.append(txn_id) if cls.METHOD == "POST": - request_func: Callable[ - ..., Awaitable[Any] - ] = client.post_json_get_json + request_func: Callable[..., Awaitable[Any]] = ( + client.post_json_get_json + ) elif cls.METHOD == "PUT": request_func = client.put_json elif cls.METHOD == "GET": diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index ce47d8035c76..a95771b5f607 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -70,9 +70,9 @@ class ExternalCache: def __init__(self, hs: "HomeServer"): if hs.config.redis.redis_enabled: - self._redis_connection: Optional[ - "ConnectionHandler" - ] = hs.get_outbound_redis_connection() + self._redis_connection: Optional["ConnectionHandler"] = ( + hs.get_outbound_redis_connection() + ) else: self._redis_connection = None diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 0e413f02ab09..6da1d7916862 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -109,6 +109,7 @@ UserReplaceMasterCrossSigningKeyRestServlet, UserRestServletV2, UsersRestServletV2, + UsersRestServletV3, UserTokenRestServlet, WhoisRestServlet, ) @@ -236,10 +237,12 @@ async def on_GET( raise NotFoundError("purge id '%s' not found" % purge_id) result: JsonDict = { - "status": purge_task.status - if purge_task.status == TaskStatus.COMPLETE - or purge_task.status == TaskStatus.FAILED - else "active", + "status": ( + purge_task.status + if purge_task.status == TaskStatus.COMPLETE + or purge_task.status == TaskStatus.FAILED + else "active" + ), } if purge_task.error: result["error"] = purge_task.error @@ -289,6 +292,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: UserTokenRestServlet(hs).register(http_server) UserRestServletV2(hs).register(http_server) UsersRestServletV2(hs).register(http_server) + UsersRestServletV3(hs).register(http_server) UserMediaStatisticsRestServlet(hs).register(http_server) LargestRoomsStatistics(hs).register(http_server) EventReportDetailRestServlet(hs).register(http_server) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 0229e87f4311..4e34e4651285 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -23,7 +23,7 @@ import logging import secrets from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union import attr @@ -118,7 +118,8 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: errcode=Codes.INVALID_PARAM, ) - deactivated = parse_boolean(request, "deactivated", default=False) + deactivated = self._parse_parameter_deactivated(request) + locked = parse_boolean(request, "locked", default=False) admins = parse_boolean(request, "admins") @@ -182,6 +183,22 @@ def _filter(a: attr.Attribute) -> bool: return HTTPStatus.OK, ret + def _parse_parameter_deactivated(self, request: SynapseRequest) -> Optional[bool]: + """ + Return None (no filtering) if `deactivated` is `true`, otherwise return `False` + (exclude deactivated users from the results). + """ + return None if parse_boolean(request, "deactivated") else False + + +class UsersRestServletV3(UsersRestServletV2): + PATTERNS = admin_patterns("/users$", "v3") + + def _parse_parameter_deactivated( + self, request: SynapseRequest + ) -> Union[bool, None]: + return parse_boolean(request, "deactivated") + class UserRestServletV2(RestServlet): PATTERNS = admin_patterns("/users/(?P[^/]*)$", "v2") @@ -1167,12 +1184,14 @@ async def on_GET( # convert `null` to `0` for consistency # both values do the same in retelimit handler ret = { - "messages_per_second": 0 - if ratelimit.messages_per_second is None - else ratelimit.messages_per_second, - "burst_count": 0 - if ratelimit.burst_count is None - else ratelimit.burst_count, + "messages_per_second": ( + 0 + if ratelimit.messages_per_second is None + else ratelimit.messages_per_second + ), + "burst_count": ( + 0 if ratelimit.burst_count is None else ratelimit.burst_count + ), } else: ret = {} diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 0cdc4cc4f787..12ffca984f82 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -112,9 +112,9 @@ async def on_GET( self._hs.config.experimental.msc4010_push_rules_account_data and account_data_type == AccountDataTypes.PUSH_RULES ): - account_data: Optional[ - JsonMapping - ] = await self._push_rules_handler.push_rules_for_user(requester.user) + account_data: Optional[JsonMapping] = ( + await self._push_rules_handler.push_rules_for_user(requester.user) + ) else: account_data = await self.store.get_global_account_data_by_type_for_user( user_id, account_data_type diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 3af2b7dfd9aa..2b103ca6a873 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -313,12 +313,12 @@ async def encode_response( # https://github.com/matrix-org/matrix-doc/blob/54255851f642f84a4f1aaf7bc063eebe3d76752b/proposals/2732-olm-fallback-keys.md # states that this field should always be included, as long as the server supports the feature. - response[ - "org.matrix.msc2732.device_unused_fallback_key_types" - ] = sync_result.device_unused_fallback_key_types - response[ - "device_unused_fallback_key_types" - ] = sync_result.device_unused_fallback_key_types + response["org.matrix.msc2732.device_unused_fallback_key_types"] = ( + sync_result.device_unused_fallback_key_types + ) + response["device_unused_fallback_key_types"] = ( + sync_result.device_unused_fallback_key_types + ) if joined: response["rooms"][Membership.JOIN] = joined @@ -543,9 +543,9 @@ async def encode_room( if room.unread_thread_notifications: result["unread_thread_notifications"] = room.unread_thread_notifications if self._msc3773_enabled: - result[ - "org.matrix.msc3773.unread_thread_notifications" - ] = room.unread_thread_notifications + result["org.matrix.msc3773.unread_thread_notifications"] = ( + room.unread_thread_notifications + ) result["summary"] = room.summary if self._msc2654_enabled: result["org.matrix.msc2654.unread_count"] = room.unread_count diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 6afe4a7bcc96..dc7325fc57db 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -191,10 +191,10 @@ async def query_keys( server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {} for server_name, key_ids in query.items(): if key_ids: - results: Mapping[ - str, Optional[FetchKeyResultForRemote] - ] = await self.store.get_server_keys_json_for_remote( - server_name, key_ids + results: Mapping[str, Optional[FetchKeyResultForRemote]] = ( + await self.store.get_server_keys_json_for_remote( + server_name, key_ids + ) ) else: results = await self.store.get_all_server_keys_json_for_remote( diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 015e49ab81e2..72b291889bb1 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -603,15 +603,15 @@ def __init__(self, hs: "HomeServer"): self.resolve_linearizer = Linearizer(name="state_resolve_lock") # dict of set of event_ids -> _StateCacheEntry. - self._state_cache: ExpiringCache[ - FrozenSet[int], _StateCacheEntry - ] = ExpiringCache( - cache_name="state_cache", - clock=self.clock, - max_len=100000, - expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, - iterable=True, - reset_expiry_on_get=True, + self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( + ExpiringCache( + cache_name="state_cache", + clock=self.clock, + max_len=100000, + expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, + iterable=True, + reset_expiry_on_get=True, + ) ) # diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 8de16db1d02a..da926ad14646 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -52,8 +52,7 @@ class Clock(Protocol): # This is usually synapse.util.Clock, but it's replaced with a FakeClock in tests. # We only ever sleep(0) though, so that other async functions can make forward # progress without waiting for stateres to complete. - def sleep(self, duration_ms: float) -> Awaitable[None]: - ... + def sleep(self, duration_ms: float) -> Awaitable[None]: ... class StateResolutionStore(Protocol): @@ -61,13 +60,11 @@ class StateResolutionStore(Protocol): # TestStateResolutionStore in tests. def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: - ... + ) -> Awaitable[Dict[str, EventBase]]: ... def get_auth_chain_difference( self, room_id: str, state_sets: List[Set[str]] - ) -> Awaitable[Set[str]]: - ... + ) -> Awaitable[Set[str]]: ... # We want to await to the reactor occasionally during state res when dealing @@ -742,8 +739,7 @@ async def _get_event( event_map: Dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[False] = False, -) -> EventBase: - ... +) -> EventBase: ... @overload @@ -753,8 +749,7 @@ async def _get_event( event_map: Dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[True], -) -> Optional[EventBase]: - ... +) -> Optional[EventBase]: ... async def _get_event( diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 9df4edee381d..f4732940702f 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -836,9 +836,9 @@ def create_index_sqlite(conn: "LoggingDatabaseConnection") -> None: c.execute(sql) if isinstance(self.db_pool.engine, engines.PostgresEngine): - runner: Optional[ - Callable[[LoggingDatabaseConnection], None] - ] = create_index_psql + runner: Optional[Callable[[LoggingDatabaseConnection], None]] = ( + create_index_psql + ) elif psql_only: runner = None else: diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index 69d5999c0a2f..84699a2ee115 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -773,9 +773,9 @@ async def _calculate_new_extremities( ) # Remove any events which are prev_events of any existing events. - existing_prevs: Collection[ - str - ] = await self.persist_events_store._get_events_which_are_prevs(result) + existing_prevs: Collection[str] = ( + await self.persist_events_store._get_events_which_are_prevs(result) + ) result.difference_update(existing_prevs) # Finally handle the case where the new events have soft-failed prev diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 99666d79a9b3..f9eced23bffe 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -273,8 +273,10 @@ async def get_state_ids_for_events( await_full_state: bool = True, ) -> Dict[str, StateMap[str]]: """ - Get the state dicts corresponding to a list of events, containing the event_ids - of the state events (as opposed to the events themselves) + Get the room states after each of a list of events. + + For each event in `event_ids`, the result contains a map from state tuple + to the event_ids of the state event (as opposed to the events themselves). Args: event_ids: events whose state should be returned @@ -347,7 +349,7 @@ async def get_state_ids_for_event( await_full_state: bool = True, ) -> StateMap[str]: """ - Get the state dict corresponding to a particular event + Get the state dict corresponding to the state after a particular event Args: event_id: event whose state should be returned @@ -562,10 +564,15 @@ async def get_current_state_deltas( @trace @tag_args async def get_current_state( - self, room_id: str, state_filter: Optional[StateFilter] = None + self, + room_id: str, + state_filter: Optional[StateFilter] = None, + await_full_state: bool = True, ) -> StateMap[EventBase]: """Same as `get_current_state_ids` but also fetches the events""" - state_map_ids = await self.get_current_state_ids(room_id, state_filter) + state_map_ids = await self.get_current_state_ids( + room_id, state_filter, await_full_state + ) event_map = await self.stores.main.get_events(list(state_map_ids.values())) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 8dc9080842f6..d9c85e411e33 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -111,8 +111,7 @@ class _PoolConnection(Connection): A Connection from twisted.enterprise.adbapi.Connection. """ - def reconnect(self) -> None: - ... + def reconnect(self) -> None: ... def make_pool( @@ -914,9 +913,9 @@ async def _runInteraction() -> R: try: with opentracing.start_active_span(f"db.{desc}"): - result = await self.runWithConnection( + result: R = await self.runWithConnection( # mypy seems to have an issue with this, maybe a bug? - self.new_transaction, # type: ignore[arg-type] + self.new_transaction, desc, after_callbacks, async_after_callbacks, @@ -935,7 +934,7 @@ async def _runInteraction() -> R: await async_callback(*async_args, **async_kwargs) for after_callback, after_args, after_kwargs in after_callbacks: after_callback(*after_args, **after_kwargs) - return cast(R, result) + return result except Exception: for exception_callback, after_args, after_kwargs in exception_callbacks: exception_callback(*after_args, **after_kwargs) @@ -1603,8 +1602,7 @@ async def simple_select_one( retcols: Collection[str], allow_none: Literal[False] = False, desc: str = "simple_select_one", - ) -> Tuple[Any, ...]: - ... + ) -> Tuple[Any, ...]: ... @overload async def simple_select_one( @@ -1614,8 +1612,7 @@ async def simple_select_one( retcols: Collection[str], allow_none: Literal[True] = True, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: - ... + ) -> Optional[Tuple[Any, ...]]: ... async def simple_select_one( self, @@ -1654,8 +1651,7 @@ async def simple_select_one_onecol( retcol: str, allow_none: Literal[False] = False, desc: str = "simple_select_one_onecol", - ) -> Any: - ... + ) -> Any: ... @overload async def simple_select_one_onecol( @@ -1665,8 +1661,7 @@ async def simple_select_one_onecol( retcol: str, allow_none: Literal[True] = True, desc: str = "simple_select_one_onecol", - ) -> Optional[Any]: - ... + ) -> Optional[Any]: ... async def simple_select_one_onecol( self, @@ -1706,8 +1701,7 @@ def simple_select_one_onecol_txn( keyvalues: Dict[str, Any], retcol: str, allow_none: Literal[False] = False, - ) -> Any: - ... + ) -> Any: ... @overload @classmethod @@ -1718,8 +1712,7 @@ def simple_select_one_onecol_txn( keyvalues: Dict[str, Any], retcol: str, allow_none: Literal[True] = True, - ) -> Optional[Any]: - ... + ) -> Optional[Any]: ... @classmethod def simple_select_one_onecol_txn( @@ -2501,8 +2494,7 @@ def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, columns: Tuple[str, str], iterable: Collection[Tuple[Any, Any]], -) -> Tuple[str, list]: - ... +) -> Tuple[str, list]: ... def make_tuple_in_list_sql_clause( diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 394985d87f53..bf779587d953 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -176,7 +176,7 @@ async def get_users_paginate( user_id: Optional[str] = None, name: Optional[str] = None, guests: bool = True, - deactivated: bool = False, + deactivated: Optional[bool] = None, admins: Optional[bool] = None, order_by: str = UserSortOrder.NAME.value, direction: Direction = Direction.FORWARDS, @@ -232,8 +232,11 @@ def get_users_paginate_txn( if not guests: filters.append("is_guest = 0") - if not deactivated: - filters.append("deactivated = 0") + if deactivated is not None: + if deactivated: + filters.append("deactivated = 1") + else: + filters.append("deactivated = 0") if not locked: filters.append("locked IS FALSE") diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 7314d87404b7..bfd492d95d33 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -373,7 +373,7 @@ def _invalidate_caches_for_room_events(self, room_id: str) -> None: deleted. """ - self._invalidate_local_get_event_cache_all() # type: ignore[attr-defined] + self._invalidate_local_get_event_cache_room_id(room_id) # type: ignore[attr-defined] self._attempt_to_invalidate_cache("have_seen_event", (room_id,)) self._attempt_to_invalidate_cache("get_latest_event_ids_in_room", (room_id,)) diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 3e011f334023..8dbcb3f5a058 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1701,9 +1701,9 @@ def __init__( # Map of (user_id, device_id) -> bool. If there is an entry that implies # the device exists. - self.device_id_exists_cache: LruCache[ - Tuple[str, str], Literal[True] - ] = LruCache(cache_name="device_id_exists", max_size=10000) + self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = ( + LruCache(cache_name="device_id_exists", max_size=10000) + ) async def store_device( self, diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index c96371a0d3c3..b219ea70ee22 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -256,8 +256,7 @@ async def get_e2e_device_keys_and_signatures( self, query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: - ... + ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( @@ -265,8 +264,7 @@ async def get_e2e_device_keys_and_signatures( query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: - ... + ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( @@ -274,8 +272,7 @@ async def get_e2e_device_keys_and_signatures( query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: Literal[True], include_deleted_devices: Literal[True], - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: - ... + ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ... @trace @cancellable diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index d7aa8a0ee0b7..3a5666cd9b0f 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -358,6 +358,10 @@ async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, in This function is intentionally not cached because it is called to calculate the unread badge for push notifications and thus the result is expected to change. + Note that this function assumes the user is a member of the room. Because + summary rows are not removed when a user leaves a room, the caller must + filter out those results from the result. + Returns: A map of room ID to notification counts for the given user. """ @@ -370,170 +374,127 @@ async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, in def _get_unread_counts_by_room_for_user_txn( self, txn: LoggingTransaction, user_id: str ) -> Dict[str, int]: - # To get the badge count of all rooms we need to make three queries: - # 1. Fetch all counts from `event_push_summary`, discarding any stale - # rooms. - # 2. Fetch all notifications from `event_push_actions` that haven't - # been rotated yet. - # 3. Fetch all notifications from `event_push_actions` for the stale - # rooms. - # - # The "stale room" scenario generally happens when there is a new read - # receipt that hasn't yet been processed to update the - # `event_push_summary` table. When that happens we ignore the - # `event_push_summary` table for that room and calculate the count - # manually from `event_push_actions`. - - # We need to only take into account read receipts of these types. - receipt_types_clause, receipt_types_args = make_in_list_sql_clause( + receipt_types_clause, args = make_in_list_sql_clause( self.database_engine, "receipt_type", (ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE), ) + args.extend([user_id, user_id]) + + receipts_cte = f""" + WITH all_receipts AS ( + SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering + FROM receipts_linearized + LEFT JOIN events USING (room_id, event_id) + WHERE + {receipt_types_clause} + AND user_id = ? + GROUP BY room_id, thread_id + ) + """ + + receipts_joins = """ + LEFT JOIN ( + SELECT room_id, thread_id, + max_receipt_stream_ordering AS threaded_receipt_stream_ordering + FROM all_receipts + WHERE thread_id IS NOT NULL + ) AS threaded_receipts USING (room_id, thread_id) + LEFT JOIN ( + SELECT room_id, thread_id, + max_receipt_stream_ordering AS unthreaded_receipt_stream_ordering + FROM all_receipts + WHERE thread_id IS NULL + ) AS unthreaded_receipts USING (room_id) + """ + + # First get summary counts by room / thread for the user. We use the max receipt + # stream ordering of both threaded & unthreaded receipts to compare against the + # summary table. + # + # PostgreSQL and SQLite differ in comparing scalar numerics. + if isinstance(self.database_engine, PostgresEngine): + # GREATEST ignores NULLs. + max_clause = """GREATEST( + threaded_receipt_stream_ordering, + unthreaded_receipt_stream_ordering + )""" + else: + # MAX returns NULL if any are NULL, so COALESCE to 0 first. + max_clause = """MAX( + COALESCE(threaded_receipt_stream_ordering, 0), + COALESCE(unthreaded_receipt_stream_ordering, 0) + )""" - # Step 1, fetch all counts from `event_push_summary` for the user. This - # is slightly convoluted as we also need to pull out the stream ordering - # of the most recent receipt of the user in the room (either a thread - # aware receipt or thread unaware receipt) in order to determine - # whether the row in `event_push_summary` is stale. Hence the outer - # GROUP BY and odd join condition against `receipts_linearized`. sql = f""" - SELECT room_id, notif_count, stream_ordering, thread_id, last_receipt_stream_ordering, - MAX(receipt_stream_ordering) - FROM ( - SELECT e.room_id, notif_count, e.stream_ordering, e.thread_id, last_receipt_stream_ordering, - ev.stream_ordering AS receipt_stream_ordering - FROM event_push_summary AS e - INNER JOIN local_current_membership USING (user_id, room_id) - LEFT JOIN receipts_linearized AS r ON ( - e.user_id = r.user_id - AND e.room_id = r.room_id - AND (e.thread_id = r.thread_id OR r.thread_id IS NULL) - AND {receipt_types_clause} + {receipts_cte} + SELECT eps.room_id, eps.thread_id, notif_count + FROM event_push_summary AS eps + {receipts_joins} + WHERE user_id = ? + AND notif_count != 0 + AND ( + (last_receipt_stream_ordering IS NULL AND stream_ordering > {max_clause}) + OR last_receipt_stream_ordering = {max_clause} ) - LEFT JOIN events AS ev ON (r.event_id = ev.event_id) - WHERE e.user_id = ? and notif_count > 0 - ) AS es - GROUP BY room_id, notif_count, stream_ordering, thread_id, last_receipt_stream_ordering """ + txn.execute(sql, args) - txn.execute( - sql, - receipt_types_args - + [ - user_id, - ], - ) - + seen_thread_ids = set() room_to_count: Dict[str, int] = defaultdict(int) - stale_room_ids = set() - for row in txn: - room_id = row[0] - notif_count = row[1] - stream_ordering = row[2] - _thread_id = row[3] - last_receipt_stream_ordering = row[4] - receipt_stream_ordering = row[5] - - if last_receipt_stream_ordering is None: - if receipt_stream_ordering is None: - room_to_count[room_id] += notif_count - elif stream_ordering > receipt_stream_ordering: - room_to_count[room_id] += notif_count - else: - # The latest read receipt from the user is after all the rows for - # this room in `event_push_summary`. We ignore them, and - # calculate the count from `event_push_actions` in step 3. - pass - elif last_receipt_stream_ordering == receipt_stream_ordering: - room_to_count[room_id] += notif_count - else: - # The row is stale if `last_receipt_stream_ordering` is set and - # *doesn't* match the latest receipt from the user. - stale_room_ids.add(room_id) - # Discard any stale rooms from `room_to_count`, as we will recalculate - # them in step 3. - for room_id in stale_room_ids: - room_to_count.pop(room_id, None) + for room_id, thread_id, notif_count in txn: + room_to_count[room_id] += notif_count + seen_thread_ids.add(thread_id) - # Step 2, basically the same query, except against `event_push_actions` - # and only fetching rows inserted since the last rotation. - rotated_upto_stream_ordering = self.db_pool.simple_select_one_onecol_txn( - txn, - table="event_push_summary_stream_ordering", - keyvalues={}, - retcol="stream_ordering", + # Now get any event push actions that haven't been rotated using the same OR + # join and filter by receipt and event push summary rotated up to stream ordering. + sql = f""" + {receipts_cte} + SELECT epa.room_id, epa.thread_id, COUNT(CASE WHEN epa.notif = 1 THEN 1 END) AS notif_count + FROM event_push_actions AS epa + {receipts_joins} + WHERE user_id = ? + AND epa.notif = 1 + AND stream_ordering > (SELECT stream_ordering FROM event_push_summary_stream_ordering) + AND (threaded_receipt_stream_ordering IS NULL OR stream_ordering > threaded_receipt_stream_ordering) + AND (unthreaded_receipt_stream_ordering IS NULL OR stream_ordering > unthreaded_receipt_stream_ordering) + GROUP BY epa.room_id, epa.thread_id + """ + txn.execute(sql, args) + + for room_id, thread_id, notif_count in txn: + # Note: only count push actions we have valid summaries for with up to date receipt. + if thread_id not in seen_thread_ids: + continue + room_to_count[room_id] += notif_count + + thread_id_clause, thread_ids_args = make_in_list_sql_clause( + self.database_engine, "epa.thread_id", seen_thread_ids ) + # Finally re-check event_push_actions for any rooms not in the summary, ignoring + # the rotated up-to position. This handles the case where a read receipt has arrived + # but not been rotated meaning the summary table is out of date, so we go back to + # the push actions table. sql = f""" - SELECT room_id, thread_id - FROM ( - SELECT e.room_id, e.stream_ordering, e.thread_id, - ev.stream_ordering AS receipt_stream_ordering - FROM event_push_actions AS e - INNER JOIN local_current_membership USING (user_id, room_id) - LEFT JOIN receipts_linearized AS r ON ( - e.user_id = r.user_id - AND e.room_id = r.room_id - AND (e.thread_id = r.thread_id OR r.thread_id IS NULL) - AND {receipt_types_clause} - ) - LEFT JOIN events AS ev ON (r.event_id = ev.event_id) - WHERE e.user_id = ? and notif > 0 - AND e.stream_ordering > ? - ) AS es - GROUP BY room_id, stream_ordering, thread_id - HAVING stream_ordering > COALESCE(MAX(receipt_stream_ordering), 0) + {receipts_cte} + SELECT epa.room_id, COUNT(CASE WHEN epa.notif = 1 THEN 1 END) AS notif_count + FROM event_push_actions AS epa + {receipts_joins} + WHERE user_id = ? + AND NOT {thread_id_clause} + AND epa.notif = 1 + AND (threaded_receipt_stream_ordering IS NULL OR stream_ordering > threaded_receipt_stream_ordering) + AND (unthreaded_receipt_stream_ordering IS NULL OR stream_ordering > unthreaded_receipt_stream_ordering) + GROUP BY epa.room_id """ - txn.execute( - sql, - receipt_types_args + [user_id, rotated_upto_stream_ordering], - ) - for room_id, _thread_id in txn: - # Again, we ignore any stale rooms. - if room_id not in stale_room_ids: - # For event push actions it is one notification per row. - room_to_count[room_id] += 1 - - # Step 3, if we have stale rooms then we need to recalculate the counts - # from `event_push_actions`. Again, this is basically the same query as - # above except without a lower bound on stream ordering and only against - # a specific set of rooms. - if stale_room_ids: - room_id_clause, room_id_args = make_in_list_sql_clause( - self.database_engine, - "e.room_id", - stale_room_ids, - ) + args.extend(thread_ids_args) + txn.execute(sql, args) - sql = f""" - SELECT room_id, thread_id - FROM ( - SELECT e.room_id, e.stream_ordering, e.thread_id, - ev.stream_ordering AS receipt_stream_ordering - FROM event_push_actions AS e - INNER JOIN local_current_membership USING (user_id, room_id) - LEFT JOIN receipts_linearized AS r ON ( - e.user_id = r.user_id - AND e.room_id = r.room_id - AND (e.thread_id = r.thread_id OR r.thread_id IS NULL) - AND {receipt_types_clause} - ) - LEFT JOIN events AS ev ON (r.event_id = ev.event_id) - WHERE e.user_id = ? and notif > 0 - AND {room_id_clause} - ) AS es - GROUP BY room_id, stream_ordering, thread_id - HAVING stream_ordering > COALESCE(MAX(receipt_stream_ordering), 0) - """ - txn.execute( - sql, - receipt_types_args + [user_id] + room_id_args, - ) - for room_id, _ in txn: - room_to_count[room_id] += 1 + for room_id, notif_count in txn: + room_to_count[room_id] += notif_count return room_to_count diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index d5942a10b27d..a6fda3f43c86 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1292,9 +1292,9 @@ def _filter_events_and_contexts_for_duplicates( Returns: filtered list """ - new_events_and_contexts: OrderedDict[ - str, Tuple[EventBase, EventContext] - ] = OrderedDict() + new_events_and_contexts: OrderedDict[str, Tuple[EventBase, EventContext]] = ( + OrderedDict() + ) for event, context in events_and_contexts: prev_event_context = new_events_and_contexts.get(event.event_id) if prev_event_context: diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 1fd458b51024..e39d4b962426 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -263,11 +263,13 @@ def __init__( 5 * 60 * 1000, ) - self._get_event_cache: AsyncLruCache[ - Tuple[str], EventCacheEntry - ] = AsyncLruCache( - cache_name="*getEvent*", - max_size=hs.config.caches.event_cache_size, + self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( + AsyncLruCache( + cache_name="*getEvent*", + max_size=hs.config.caches.event_cache_size, + # `extra_index_cb` Returns a tuple as that is the key type + extra_index_cb=lambda _, v: (v.event.room_id,), + ) ) # Map from event ID to a deferred that will result in a map from event @@ -457,8 +459,7 @@ async def get_event( allow_rejected: bool = ..., allow_none: Literal[False] = ..., check_room_id: Optional[str] = ..., - ) -> EventBase: - ... + ) -> EventBase: ... @overload async def get_event( @@ -469,8 +470,7 @@ async def get_event( allow_rejected: bool = ..., allow_none: Literal[True] = ..., check_room_id: Optional[str] = ..., - ) -> Optional[EventBase]: - ... + ) -> Optional[EventBase]: ... @cancellable async def get_event( @@ -782,9 +782,9 @@ async def get_unredacted_events_from_cache_or_db( if missing_events_ids: - async def get_missing_events_from_cache_or_db() -> Dict[ - str, EventCacheEntry - ]: + async def get_missing_events_from_cache_or_db() -> ( + Dict[str, EventCacheEntry] + ): """Fetches the events in `missing_event_ids` from the database. Also creates entries in `self._current_event_fetches` to allow @@ -798,9 +798,9 @@ async def get_missing_events_from_cache_or_db() -> Dict[ # to all the events we pulled from the DB (this will result in this # function returning more events than requested, but that can happen # already due to `_get_events_from_db`). - fetching_deferred: ObservableDeferred[ - Dict[str, EventCacheEntry] - ] = ObservableDeferred(defer.Deferred(), consumeErrors=True) + fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = ( + ObservableDeferred(defer.Deferred(), consumeErrors=True) + ) for event_id in missing_events_ids: self._current_event_fetches[event_id] = fetching_deferred @@ -910,12 +910,12 @@ def _invalidate_local_get_event_cache(self, event_id: str) -> None: self._event_ref.pop(event_id, None) self._current_event_fetches.pop(event_id, None) - def _invalidate_local_get_event_cache_all(self) -> None: - """Clears the in-memory get event caches. + def _invalidate_local_get_event_cache_room_id(self, room_id: str) -> None: + """Clears the in-memory get event caches for a room. Used when we purge room history. """ - self._get_event_cache.clear() + self._get_event_cache.invalidate_on_extra_index_local((room_id,)) self._event_ref.clear() self._current_event_fetches.clear() @@ -1869,14 +1869,14 @@ def get_all_new_backfill_event_rows( " LIMIT ?" ) txn.execute(sql, (-last_id, -current_id, instance_name, limit)) - new_event_updates: List[ - Tuple[int, Tuple[str, str, str, str, str, str]] - ] = [] + new_event_updates: List[Tuple[int, Tuple[str, str, str, str, str, str]]] = ( + [] + ) row: Tuple[int, str, str, str, str, str, str] # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a # variadic tuple to a fixed length tuple and flags it up as an error. - for row in txn: # type: ignore[assignment] + for row in txn: new_event_updates.append((row[0], row[1:])) limited = False @@ -1903,7 +1903,7 @@ def get_all_new_backfill_event_rows( # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a # variadic tuple to a fixed length tuple and flags it up as an error. - for row in txn: # type: ignore[assignment] + for row in txn: new_event_updates.append((row[0], row[1:])) if len(new_event_updates) >= limit: @@ -1995,16 +1995,18 @@ def get_deltas_for_stream_id_txn( return rows, to_token, True @cached(max_entries=5000) - async def get_event_ordering(self, event_id: str) -> Tuple[int, int]: + async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, int]: res = await self.db_pool.simple_select_one( table="events", retcols=["topological_ordering", "stream_ordering"], - keyvalues={"event_id": event_id}, + keyvalues={"event_id": event_id, "room_id": room_id}, allow_none=True, ) if not res: - raise SynapseError(404, "Could not find event %s" % (event_id,)) + raise SynapseError( + 404, "Could not find event %s in room %s" % (event_id, room_id) + ) return int(res[0]), int(res[1]) diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index 0794cc6d251a..8277ad8c33c3 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -79,9 +79,9 @@ def __init__( # A map from `(lock_name, lock_key)` to lock that we think we # currently hold. - self._live_lock_tokens: WeakValueDictionary[ - Tuple[str, str], Lock - ] = WeakValueDictionary() + self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = ( + WeakValueDictionary() + ) # A map from `(lock_name, lock_key, token)` to read/write lock that we # think we currently hold. For a given lock_name/lock_key, there can be diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index b5ed1bf9c839..6128332af8ba 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -158,9 +158,9 @@ def __init__( ) if hs.config.media.can_load_media_repo: - self.unused_expiration_time: Optional[ - int - ] = hs.config.media.unused_expiration_time + self.unused_expiration_time: Optional[int] = ( + hs.config.media.unused_expiration_time + ) else: self.unused_expiration_time = None diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 3c7708f5f358..d513c4253012 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -394,9 +394,9 @@ def f(txn: LoggingTransaction) -> List[Tuple[str, str, str, str]]: content: JsonDict = {} for receipt_type, user_id, event_id, data in rows: - content.setdefault(event_id, {}).setdefault(receipt_type, {})[ - user_id - ] = db_to_json(data) + content.setdefault(event_id, {}).setdefault(receipt_type, {})[user_id] = ( + db_to_json(data) + ) return [{"type": EduTypes.RECEIPT, "room_id": room_id, "content": content}] @@ -472,9 +472,24 @@ def f( event_entry = room_event["content"].setdefault(event_id, {}) receipt_type_dict = event_entry.setdefault(receipt_type, {}) - receipt_type_dict[user_id] = db_to_json(data) - if thread_id: - receipt_type_dict[user_id]["thread_id"] = thread_id + # MSC4102: always replace threaded receipts with unthreaded ones if there is a clash. + # Specifically: + # - if there is no existing receipt, great, set the data. + # - if there is an existing receipt, is it threaded (thread_id present)? + # YES: replace if this receipt has no thread id. NO: do not replace. + # This means we will drop some receipts, but MSC4102 is designed to drop semantically + # meaningless receipts, so this is okay. Previously, we would drop meaningful data! + receipt_data = db_to_json(data) + if user_id in receipt_type_dict: # existing receipt + # is the existing receipt threaded and we are currently processing an unthreaded one? + if "thread_id" in receipt_type_dict[user_id] and not thread_id: + receipt_type_dict[user_id] = ( + receipt_data # replace with unthreaded one + ) + else: # receipt does not exist, just set it + receipt_type_dict[user_id] = receipt_data + if thread_id: + receipt_type_dict[user_id]["thread_id"] = thread_id results = { room_id: [results[room_id]] if room_id in results else [] diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 5b0daffa46b5..5d5150259506 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -369,6 +369,22 @@ async def get_invited_rooms_for_local_user( user_id, [Membership.INVITE] ) + async def get_knocked_at_rooms_for_local_user( + self, user_id: str + ) -> Sequence[RoomsForUser]: + """Get all the rooms the *local* user has knocked at. + + Args: + user_id: The user ID. + + Returns: + A list of RoomsForUser. + """ + + return await self.get_rooms_for_local_user_where_membership_is( + user_id, [Membership.KNOCK] + ) + async def get_invite_for_local_user_in_room( self, user_id: str, room_id: str ) -> Optional[RoomsForUser]: diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index 3220d515d994..b2a67aff8955 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -768,12 +768,10 @@ def __getitem__(self, key: StateKey) -> str: return super().__getitem__(key) @overload - def get(self, key: Tuple[str, str]) -> Optional[str]: - ... + def get(self, key: Tuple[str, str]) -> Optional[str]: ... @overload - def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: - ... + def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: ... def get( self, key: StateKey, default: Union[str, _T, None] = None diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 19041cc35b6f..7ab6003f61e3 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -988,8 +988,7 @@ def get_stream_id_for_event_txn( txn: LoggingTransaction, event_id: str, allow_none: Literal[False] = False, - ) -> int: - ... + ) -> int: ... @overload def get_stream_id_for_event_txn( @@ -997,8 +996,7 @@ def get_stream_id_for_event_txn( txn: LoggingTransaction, event_id: str, allow_none: bool = False, - ) -> Optional[int]: - ... + ) -> Optional[int]: ... def get_stream_id_for_event_txn( self, @@ -1476,12 +1474,12 @@ def _paginate_room_events_txn( _EventDictReturn(event_id, topological_ordering, stream_ordering) for event_id, instance_name, topological_ordering, stream_ordering in txn if _filter_results( - lower_token=to_token - if direction == Direction.BACKWARDS - else from_token, - upper_token=from_token - if direction == Direction.BACKWARDS - else to_token, + lower_token=( + to_token if direction == Direction.BACKWARDS else from_token + ), + upper_token=( + from_token if direction == Direction.BACKWARDS else to_token + ), instance_name=instance_name, topological_ordering=topological_ordering, stream_ordering=stream_ordering, diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py index 7b9561643202..4956870b1afd 100644 --- a/synapse/storage/databases/main/task_scheduler.py +++ b/synapse/storage/databases/main/task_scheduler.py @@ -136,12 +136,12 @@ async def insert_scheduled_task(self, task: ScheduledTask) -> None: "status": task.status, "timestamp": task.timestamp, "resource_id": task.resource_id, - "params": None - if task.params is None - else json_encoder.encode(task.params), - "result": None - if task.result is None - else json_encoder.encode(task.result), + "params": ( + None if task.params is None else json_encoder.encode(task.params) + ), + "result": ( + None if task.result is None else json_encoder.encode(task.result) + ), "error": task.error, }, desc="insert_scheduled_task", diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index c91c44818f37..08e0241f6839 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -423,8 +423,11 @@ async def get_catch_up_outstanding_destinations( self, after_destination: Optional[str] ) -> List[str]: """ - Gets at most 25 destinations which have outstanding PDUs to be caught up, - and are not being backed off from + Get a list of destinations we should retry transaction sending to. + + Returns up to 25 destinations which have outstanding PDUs or to-device messages, + and are not subject to a backoff. + Args: after_destination: If provided, all destinations must be lexicographically greater @@ -448,30 +451,86 @@ async def get_catch_up_outstanding_destinations( def _get_catch_up_outstanding_destinations_txn( txn: LoggingTransaction, now_time_ms: int, after_destination: Optional[str] ) -> List[str]: + # We're looking for destinations which satisfy either of the following + # conditions: + # + # * There is at least one room where we have an event that we have not yet + # sent to them, indicated by a row in `destination_rooms` with a + # `stream_ordering` older than the `last_successful_stream_ordering` + # (if any) in `destinations`, or: + # + # * There is at least one to-device message outstanding for the destination, + # indicated by a row in `device_federation_outbox`. + # + # Of course, that may produce destinations where we are already busy sending + # the relevant PDU or to-device message, but in that case, waking up the + # sender will just be a no-op. + # + # From those two lists, we need to *exclude* destinations which are subject + # to a backoff (ie, where `destinations.retry_last_ts + destinations.retry_interval` + # is in the future). There is also an edge-case where, if the server was + # previously shut down in the middle of the first send attempt to a given + # destination, there may be no row in `destinations` at all; we need to include + # such rows in the output, which means we need to left-join rather than + # inner-join against `destinations`. + # + # The two sources of destinations (`destination_rooms` and + # `device_federation_outbox`) are queried separately and UNIONed; but the list + # may be very long, and we don't want to return all the rows at once. We + # therefore sort the output and just return the first 25 rows. Obviously that + # means there is no point in either of the inner queries returning more than + # 25 results, since any further results are certain to be dropped by the outer + # LIMIT. In order to help the query-optimiser understand that, we *also* sort + # and limit the *inner* queries, hence we express them as CTEs rather than + # sub-queries. + # + # (NB: we make sure to do the top-level sort and limit on the database, rather + # than making two queries and combining the result in Python. We could otherwise + # suffer from slight differences in sort order between Python and the database, + # which would make the `after_destination` condition unreliable.) + q = """ - SELECT DISTINCT destination FROM destinations - INNER JOIN destination_rooms USING (destination) - WHERE - stream_ordering > last_successful_stream_ordering - AND destination > ? - AND ( - retry_last_ts IS NULL OR - retry_last_ts + retry_interval < ? - ) - ORDER BY destination - LIMIT 25 + WITH pdu_destinations AS ( + SELECT DISTINCT destination FROM destination_rooms + LEFT JOIN destinations USING (destination) + WHERE + destination > ? + AND destination_rooms.stream_ordering > COALESCE(destinations.last_successful_stream_ordering, 0) + AND ( + destinations.retry_last_ts IS NULL OR + destinations.retry_last_ts + destinations.retry_interval < ? + ) + ORDER BY destination + LIMIT 25 + ), to_device_destinations AS ( + SELECT DISTINCT destination FROM device_federation_outbox + LEFT JOIN destinations USING (destination) + WHERE + destination > ? + AND ( + destinations.retry_last_ts IS NULL OR + destinations.retry_last_ts + destinations.retry_interval < ? + ) + ORDER BY destination + LIMIT 25 + ) + + SELECT destination FROM pdu_destinations + UNION SELECT destination FROM to_device_destinations + ORDER BY destination + LIMIT 25 """ + + # everything is lexicographically greater than "" so this gives + # us the first batch of up to 25. + after_destination = after_destination or "" + txn.execute( q, - ( - # everything is lexicographically greater than "" so this gives - # us the first batch of up to 25. - after_destination or "", - now_time_ms, - ), + (after_destination, now_time_ms, after_destination, now_time_ms), ) - destinations = [row[0] for row in txn] + return destinations async def get_destinations_paginate( diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index a1c4b8c6c3e1..0513e7dc06ef 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -745,9 +745,11 @@ def _update_profiles_in_user_dir_txn( p.user_id, get_localpart_from_id(p.user_id), get_domain_from_id(p.user_id), - _filter_text_for_index(p.display_name) - if p.display_name - else None, + ( + _filter_text_for_index(p.display_name) + if p.display_name + else None + ), ) for p in profiles ], diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index e64495ba8d5a..d4ac74c1ee52 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -120,11 +120,11 @@ def __init__( # TODO: this hasn't been tuned yet 50000, ) - self._state_group_members_cache: DictionaryCache[ - int, StateKey, str - ] = DictionaryCache( - "*stateGroupMembersCache*", - 500000, + self._state_group_members_cache: DictionaryCache[int, StateKey, str] = ( + DictionaryCache( + "*stateGroupMembersCache*", + 500000, + ) ) def get_max_state_group_txn(txn: Cursor) -> int: diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 8c29236b5939..ad222e7e2d66 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -48,8 +48,7 @@ def __init__(self, module: DBAPI2Module, config: Mapping[str, Any]): @property @abc.abstractmethod - def single_threaded(self) -> bool: - ... + def single_threaded(self) -> bool: ... @property @abc.abstractmethod @@ -68,8 +67,7 @@ def supports_returning(self) -> bool: @abc.abstractmethod def check_database( self, db_conn: ConnectionType, allow_outdated_version: bool = False - ) -> None: - ... + ) -> None: ... @abc.abstractmethod def check_new_database(self, txn: CursorType) -> None: @@ -79,27 +77,22 @@ def check_new_database(self, txn: CursorType) -> None: ... @abc.abstractmethod - def convert_param_style(self, sql: str) -> str: - ... + def convert_param_style(self, sql: str) -> str: ... # This method would ideally take a plain ConnectionType, but it seems that # the Sqlite engine expects to use LoggingDatabaseConnection.cursor # instead of sqlite3.Connection.cursor: only the former takes a txn_name. @abc.abstractmethod - def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: - ... + def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: ... @abc.abstractmethod - def is_deadlock(self, error: Exception) -> bool: - ... + def is_deadlock(self, error: Exception) -> bool: ... @abc.abstractmethod - def is_connection_closed(self, conn: ConnectionType) -> bool: - ... + def is_connection_closed(self, conn: ConnectionType) -> bool: ... @abc.abstractmethod - def lock_table(self, txn: Cursor, table: str) -> None: - ... + def lock_table(self, txn: Cursor, table: str) -> None: ... @property @abc.abstractmethod diff --git a/synapse/storage/types.py b/synapse/storage/types.py index b4e0a8f5762e..74f60cc59038 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -42,20 +42,17 @@ class Cursor(Protocol): - def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: - ... + def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: ... - def executemany(self, sql: str, parameters: Sequence[SQLQueryParameters]) -> Any: - ... + def executemany( + self, sql: str, parameters: Sequence[SQLQueryParameters] + ) -> Any: ... - def fetchone(self) -> Optional[Tuple]: - ... + def fetchone(self) -> Optional[Tuple]: ... - def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: - ... + def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ... - def fetchall(self) -> List[Tuple]: - ... + def fetchall(self) -> List[Tuple]: ... @property def description( @@ -70,36 +67,28 @@ def description( def rowcount(self) -> int: return 0 - def __iter__(self) -> Iterator[Tuple]: - ... + def __iter__(self) -> Iterator[Tuple]: ... - def close(self) -> None: - ... + def close(self) -> None: ... class Connection(Protocol): - def cursor(self) -> Cursor: - ... + def cursor(self) -> Cursor: ... - def close(self) -> None: - ... + def close(self) -> None: ... - def commit(self) -> None: - ... + def commit(self) -> None: ... - def rollback(self) -> None: - ... + def rollback(self) -> None: ... - def __enter__(self) -> "Connection": - ... + def __enter__(self) -> "Connection": ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], - ) -> Optional[bool]: - ... + ) -> Optional[bool]: ... class DBAPI2Module(Protocol): @@ -129,24 +118,20 @@ class DBAPI2Module(Protocol): # explain why this is necessary for safety. TL;DR: we shouldn't be able to write # to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 . @property - def Warning(self) -> Type[Exception]: - ... + def Warning(self) -> Type[Exception]: ... @property - def Error(self) -> Type[Exception]: - ... + def Error(self) -> Type[Exception]: ... # Errors are divided into `InterfaceError`s (something went wrong in the database # driver) and `DatabaseError`s (something went wrong in the database). These are # both subclasses of `Error`, but we can't currently express this in type # annotations due to https://github.com/python/mypy/issues/8397 @property - def InterfaceError(self) -> Type[Exception]: - ... + def InterfaceError(self) -> Type[Exception]: ... @property - def DatabaseError(self) -> Type[Exception]: - ... + def DatabaseError(self) -> Type[Exception]: ... # Everything below is a subclass of `DatabaseError`. @@ -155,8 +140,7 @@ def DatabaseError(self) -> Type[Exception]: # - An invalid date time was provided. # - A string contained a null code point. @property - def DataError(self) -> Type[Exception]: - ... + def DataError(self) -> Type[Exception]: ... # Roughly: something went wrong in the database, but it's not within the application # programmer's control. Examples: @@ -167,21 +151,18 @@ def DataError(self) -> Type[Exception]: # - The database ran out of resources, such as storage, memory, connections, etc. # - The database encountered an error from the operating system. @property - def OperationalError(self) -> Type[Exception]: - ... + def OperationalError(self) -> Type[Exception]: ... # Roughly: we've given the database data which breaks a rule we asked it to enforce. # Examples: # - Stop, criminal scum! You violated the foreign key constraint # - Also check constraints, non-null constraints, etc. @property - def IntegrityError(self) -> Type[Exception]: - ... + def IntegrityError(self) -> Type[Exception]: ... # Roughly: something went wrong within the database server itself. @property - def InternalError(self) -> Type[Exception]: - ... + def InternalError(self) -> Type[Exception]: ... # Roughly: the application did something silly that needs to be fixed. Examples: # - We don't have permissions to do something. @@ -189,13 +170,11 @@ def InternalError(self) -> Type[Exception]: # - We tried to use a reserved name. # - We referred to a column that doesn't exist. @property - def ProgrammingError(self) -> Type[Exception]: - ... + def ProgrammingError(self) -> Type[Exception]: ... # Roughly: we've tried to do something that this database doesn't support. @property - def NotSupportedError(self) -> Type[Exception]: - ... + def NotSupportedError(self) -> Type[Exception]: ... # We originally wrote # def connect(self, *args, **kwargs) -> Connection: ... @@ -204,8 +183,7 @@ def NotSupportedError(self) -> Type[Exception]: # psycopg2.connect doesn't have a mandatory positional argument. Instead, we use # the following slightly unusual workaround. @property - def connect(self) -> Callable[..., Connection]: - ... + def connect(self) -> Callable[..., Connection]: ... __all__ = ["Cursor", "Connection", "DBAPI2Module"] diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 7466488157a4..dd7401ac8e9b 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -57,12 +57,13 @@ def get_sources(self) -> Sequence[Tuple[StreamKeyType, EventSource]]: class EventSources: def __init__(self, hs: "HomeServer"): self.sources = _EventSourcesInner( - # mypy previously warned that attribute.type is `Optional`, but we know it's + # attribute.type is `Optional`, but we know it's # never `None` here since all the attributes of `_EventSourcesInner` are # annotated. - # As of the stubs in attrs 22.1.0, `attr.fields()` now returns Any, - # so the call to `attribute.type` is not checked. - *(attribute.type(hs) for attribute in attr.fields(_EventSourcesInner)) + *( + attribute.type(hs) # type: ignore[misc] + for attribute in attr.fields(_EventSourcesInner) + ) ) self.store = hs.get_datastores().main self._instance_name = hs.get_instance_name() diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index 423ede596903..69837617f551 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -56,7 +56,7 @@ class EventInternalMetadata: (Added in synapse 0.99.0, so may be unreliable for events received before that) """ - ... + def get_send_on_behalf_of(self) -> Optional[str]: """Whether this server should send the event on behalf of another server. This is used by the federation "send_join" API to forward the initial join @@ -64,7 +64,7 @@ class EventInternalMetadata: returns a str with the name of the server this event is sent on behalf of. """ - ... + def need_to_check_redaction(self) -> bool: """Whether the redaction event needs to be rechecked when fetching from the database. @@ -75,7 +75,7 @@ class EventInternalMetadata: If the sender of the redaction event is allowed to redact any event due to auth rules, then this will always return false. """ - ... + def is_soft_failed(self) -> bool: """Whether the event has been soft failed. @@ -85,7 +85,7 @@ class EventInternalMetadata: 2. They should not be added to the forward extremities (and therefore not to current state). """ - ... + def should_proactively_send(self) -> bool: """Whether the event, if ours, should be sent to other clients and servers. @@ -93,14 +93,13 @@ class EventInternalMetadata: This is used for sending dummy events internally. Servers and clients can still explicitly fetch the event. """ - ... + def is_redacted(self) -> bool: """Whether the event has been redacted. This is used for efficiently checking whether an event has been marked as redacted without needing to make another database call. """ - ... + def is_notifiable(self) -> bool: """Whether this event can trigger a push notification""" - ... diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index d3ee71837568..a88982a04c24 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -976,12 +976,12 @@ def copy_and_replace(self, key: StreamKeyType, new_value: Any) -> "StreamToken": return attr.evolve(self, **{key.value: new_value}) @overload - def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: - ... + def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: ... @overload - def get_field(self, key: Literal[StreamKeyType.RECEIPT]) -> MultiWriterStreamToken: - ... + def get_field( + self, key: Literal[StreamKeyType.RECEIPT] + ) -> MultiWriterStreamToken: ... @overload def get_field( @@ -995,14 +995,12 @@ def get_field( StreamKeyType.TYPING, StreamKeyType.UN_PARTIAL_STATED_ROOMS, ], - ) -> int: - ... + ) -> int: ... @overload def get_field( self, key: StreamKeyType - ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: - ... + ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: ... def get_field( self, key: StreamKeyType diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 9e374354ec36..e0d876e84bf3 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -117,7 +117,11 @@ def time_msec(self) -> int: return int(self.time() * 1000) def looping_call( - self, f: Callable[P, object], msec: float, *args: P.args, **kwargs: P.kwargs + self, + f: Callable[P, object], + msec: float, + *args: P.args, + **kwargs: P.kwargs, ) -> LoopingCall: """Call a function repeatedly. @@ -134,12 +138,46 @@ def looping_call( Args: f: The function to call repeatedly. msec: How long to wait between calls in milliseconds. - *args: Postional arguments to pass to function. + *args: Positional arguments to pass to function. **kwargs: Key arguments to pass to function. """ + return self._looping_call_common(f, msec, False, *args, **kwargs) + + def looping_call_now( + self, + f: Callable[P, object], + msec: float, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Call a function immediately, and then repeatedly thereafter. + + As with `looping_call`: subsequent calls are not scheduled until after the + the Awaitable returned by a previous call has finished. + + Also as with `looping_call`: the function is called with no logcontext and + you probably want to wrap it in `run_as_background_process`. + + Args: + f: The function to call repeatedly. + msec: How long to wait between calls in milliseconds. + *args: Positional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + return self._looping_call_common(f, msec, True, *args, **kwargs) + + def _looping_call_common( + self, + f: Callable[P, object], + msec: float, + now: bool, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Common functionality for `looping_call` and `looping_call_now`""" call = task.LoopingCall(f, *args, **kwargs) call.clock = self._reactor - d = call.start(msec / 1000.0, now=False) + d = call.start(msec / 1000.0, now=now) d.addErrback(log_failure, "Looping call died", consumeErrors=False) return call diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 914d4fd74785..70139beef298 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -284,15 +284,7 @@ async def yieldable_gather_results( try: return await make_deferred_yieldable( defer.gatherResults( - # type-ignore: mypy reports two errors: - # error: Argument 1 to "run_in_background" has incompatible type - # "Callable[[T, **P], Awaitable[R]]"; expected - # "Callable[[T, **P], Awaitable[R]]" [arg-type] - # error: Argument 2 to "run_in_background" has incompatible type - # "T"; expected "[T, **P.args]" [arg-type] - # The former looks like a mypy bug, and the latter looks like a - # false positive. - [run_in_background(func, item, *args, **kwargs) for item in iter], # type: ignore[arg-type] + [run_in_background(func, item, *args, **kwargs) for item in iter], consumeErrors=True, ) ) @@ -338,7 +330,7 @@ async def yieldable_gather_results_delaying_cancellation( return await make_deferred_yieldable( delay_cancellation( defer.gatherResults( - [run_in_background(func, item, *args, **kwargs) for item in iter], # type: ignore[arg-type] + [run_in_background(func, item, *args, **kwargs) for item in iter], consumeErrors=True, ) ) @@ -357,24 +349,21 @@ async def yieldable_gather_results_delaying_cancellation( @overload def gather_results( deferredList: Tuple[()], consumeErrors: bool = ... -) -> "defer.Deferred[Tuple[()]]": - ... +) -> "defer.Deferred[Tuple[()]]": ... @overload def gather_results( deferredList: Tuple["defer.Deferred[T1]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1]]": - ... +) -> "defer.Deferred[Tuple[T1]]": ... @overload def gather_results( deferredList: Tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2]]": - ... +) -> "defer.Deferred[Tuple[T1, T2]]": ... @overload @@ -383,8 +372,7 @@ def gather_results( "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]" ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3]]": - ... +) -> "defer.Deferred[Tuple[T1, T2, T3]]": ... @overload @@ -396,8 +384,7 @@ def gather_results( "defer.Deferred[T4]", ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": - ... +) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": ... def gather_results( # type: ignore[misc] @@ -782,18 +769,15 @@ def stop_cancellation(deferred: "defer.Deferred[T]") -> "defer.Deferred[T]": @overload -def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": - ... +def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": ... @overload -def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": - ... +def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": ... @overload -def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: - ... +def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: ... def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index 4245b7289cb6..1e6696332f5c 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -229,7 +229,7 @@ def get( for dict_key in missing: # We explicitly add each dict key to the cache, so that cache hit # rates and LRU times for each key can be tracked separately. - value = entry.get(dict_key, _Sentinel.sentinel) # type: ignore[arg-type] + value = entry.get(dict_key, _Sentinel.sentinel) self.cache[(key, dict_key)] = _PerKeyValue(value) if value is not _Sentinel.sentinel: diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index a52ba59a34cd..8017c031ee08 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -142,7 +142,7 @@ def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: return default if self.iterable: - self.metrics.inc_evictions(EvictionReason.invalidation, len(value.value)) # type: ignore[arg-type] + self.metrics.inc_evictions(EvictionReason.invalidation, len(value.value)) else: self.metrics.inc_evictions(EvictionReason.invalidation) @@ -152,12 +152,10 @@ def __contains__(self, key: KT) -> bool: return key in self._cache @overload - def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: - ... + def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: ... @overload - def get(self, key: KT, default: T) -> Union[VT, T]: - ... + def get(self, key: KT, default: T) -> Union[VT, T]: ... def get(self, key: KT, default: Optional[T] = None) -> Union[VT, Optional[T]]: try: diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 6e8c1e84acf3..481a1a621e9f 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -35,6 +35,7 @@ Iterable, List, Optional, + Set, Tuple, Type, TypeVar, @@ -386,6 +387,7 @@ def __init__( apply_cache_factor_from_config: bool = True, clock: Optional[Clock] = None, prune_unread_entries: bool = True, + extra_index_cb: Optional[Callable[[KT, VT], KT]] = None, ): """ Args: @@ -416,6 +418,20 @@ def __init__( prune_unread_entries: If True, cache entries that haven't been read recently will be evicted from the cache in the background. Set to False to opt-out of this behaviour. + + extra_index_cb: If provided, the cache keeps a second index from a + (different) key to a cache entry based on the return value of + the callback. This can then be used to invalidate entries based + on the second type of key. + + For example, for the event cache this would be a callback that + maps an event to its room ID, allowing invalidation of all + events in a given room. + + Note: Though the two types of key have the same type, they are + in different namespaces. + + Note: The new key does not have to be unique. """ # Default `clock` to something sensible. Note that we rename it to # `real_clock` so that mypy doesn't think its still `Optional`. @@ -463,6 +479,8 @@ def __init__( lock = threading.Lock() + extra_index: Dict[KT, Set[KT]] = {} + def evict() -> None: while cache_len() > self.max_size: # Get the last node in the list (i.e. the oldest node). @@ -521,6 +539,11 @@ def add_node( if size_callback: cached_cache_len[0] += size_callback(node.value) + if extra_index_cb: + index_key = extra_index_cb(node.key, node.value) + mapped_keys = extra_index.setdefault(index_key, set()) + mapped_keys.add(node.key) + if caches.TRACK_MEMORY_USAGE and metrics: metrics.inc_memory_usage(node.memory) @@ -537,6 +560,14 @@ def delete_node(node: _Node[KT, VT]) -> int: node.run_and_clear_callbacks() + if extra_index_cb: + index_key = extra_index_cb(node.key, node.value) + mapped_keys = extra_index.get(index_key) + if mapped_keys is not None: + mapped_keys.discard(node.key) + if not mapped_keys: + extra_index.pop(index_key, None) + if caches.TRACK_MEMORY_USAGE and metrics: metrics.dec_memory_usage(node.memory) @@ -549,8 +580,7 @@ def cache_get( callbacks: Collection[Callable[[], None]] = ..., update_metrics: bool = ..., update_last_access: bool = ..., - ) -> Optional[VT]: - ... + ) -> Optional[VT]: ... @overload def cache_get( @@ -559,8 +589,7 @@ def cache_get( callbacks: Collection[Callable[[], None]] = ..., update_metrics: bool = ..., update_last_access: bool = ..., - ) -> Union[T, VT]: - ... + ) -> Union[T, VT]: ... @synchronized def cache_get( @@ -603,16 +632,14 @@ def cache_get_multi( key: tuple, default: Literal[None] = None, update_metrics: bool = True, - ) -> Union[None, Iterable[Tuple[KT, VT]]]: - ... + ) -> Union[None, Iterable[Tuple[KT, VT]]]: ... @overload def cache_get_multi( key: tuple, default: T, update_metrics: bool = True, - ) -> Union[T, Iterable[Tuple[KT, VT]]]: - ... + ) -> Union[T, Iterable[Tuple[KT, VT]]]: ... @synchronized def cache_get_multi( @@ -697,12 +724,10 @@ def cache_set_default(key: KT, value: VT) -> VT: return value @overload - def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: - ... + def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: ... @overload - def cache_pop(key: KT, default: T) -> Union[T, VT]: - ... + def cache_pop(key: KT, default: T) -> Union[T, VT]: ... @synchronized def cache_pop(key: KT, default: Optional[T] = None) -> Union[None, T, VT]: @@ -748,6 +773,8 @@ def cache_clear() -> None: if size_callback: cached_cache_len[0] = 0 + extra_index.clear() + if caches.TRACK_MEMORY_USAGE and metrics: metrics.clear_memory_usage() @@ -755,6 +782,28 @@ def cache_clear() -> None: def cache_contains(key: KT) -> bool: return key in cache + @synchronized + def cache_invalidate_on_extra_index(index_key: KT) -> None: + """Invalidates all entries that match the given extra index key. + + This can only be called when `extra_index_cb` was specified. + """ + + assert extra_index_cb is not None + + keys = extra_index.pop(index_key, None) + if not keys: + return + + for key in keys: + node = cache.pop(key, None) + if not node: + continue + + evicted_len = delete_node(node) + if metrics: + metrics.inc_evictions(EvictionReason.invalidation, evicted_len) + # make sure that we clear out any excess entries after we get resized. self._on_resize = evict @@ -771,6 +820,7 @@ def cache_contains(key: KT) -> bool: self.len = synchronized(cache_len) self.contains = cache_contains self.clear = cache_clear + self.invalidate_on_extra_index = cache_invalidate_on_extra_index def __getitem__(self, key: KT) -> VT: result = self.get(key, _Sentinel.sentinel) @@ -864,6 +914,9 @@ async def invalidate(self, key: KT) -> None: # This method should invalidate any external cache and then invalidate the LruCache. return self._lru_cache.invalidate(key) + def invalidate_on_extra_index_local(self, index_key: KT) -> None: + self._lru_cache.invalidate_on_extra_index(index_key) + def invalidate_local(self, key: KT) -> None: """Remove an entry from the local cache diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py index 082ad8cedb38..b73f690b883f 100644 --- a/synapse/util/iterutils.py +++ b/synapse/util/iterutils.py @@ -50,8 +50,7 @@ class _SelfSlice(Sized, Protocol): returned. """ - def __getitem__(self: S, i: slice) -> S: - ... + def __getitem__(self: S, i: slice) -> S: ... def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T, ...]]: diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index dc9bddb00d53..8ead72bb7a4f 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -177,9 +177,9 @@ def new_limiter() -> "_PerHostRatelimiter": clock=clock, config=config, metrics_name=metrics_name ) - self.ratelimiters: DefaultDict[ - str, "_PerHostRatelimiter" - ] = collections.defaultdict(new_limiter) + self.ratelimiters: DefaultDict[str, "_PerHostRatelimiter"] = ( + collections.defaultdict(new_limiter) + ) with _rate_limiter_instances_lock: _rate_limiter_instances.add(self) diff --git a/synapse/visibility.py b/synapse/visibility.py index e58f649aaf44..d1d478129fda 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -129,9 +129,9 @@ async def filter_events_for_client( retention_policies: Dict[str, RetentionPolicy] = {} for room_id in room_ids: - retention_policies[ - room_id - ] = await storage.main.get_retention_policy_for_room(room_id) + retention_policies[room_id] = ( + await storage.main.get_retention_policy_for_room(room_id) + ) def allowed(event: EventBase) -> Optional[EventBase]: return _check_client_allowed_to_see_event( diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index ce85ddf22aa9..bd229cf7e9c5 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -128,7 +128,7 @@ def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=IPSet(["192.168/16"]), + ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -147,7 +147,7 @@ def test_get_user_by_req_appservice_valid_token_bad_ip(self) -> None: token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=IPSet(["192.168/16"]), + ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) diff --git a/tests/api/test_errors.py b/tests/api/test_errors.py index 25fa93b9d86a..efa3addf0094 100644 --- a/tests/api/test_errors.py +++ b/tests/api/test_errors.py @@ -33,18 +33,14 @@ def test_key_appears_in_context_but_not_error_dict(self) -> None: self.assertIn("needle", err.debug_context) self.assertNotIn("needle", serialised) - # Create a sub-class to avoid mutating the class-level property. - class LimitExceededErrorHeaders(LimitExceededError): - include_retry_after_header = True - def test_limit_exceeded_header(self) -> None: - err = self.LimitExceededErrorHeaders(limiter_name="test", retry_after_ms=100) + err = LimitExceededError(limiter_name="test", retry_after_ms=100) self.assertEqual(err.error_dict(None).get("retry_after_ms"), 100) assert err.headers is not None self.assertEqual(err.headers.get("Retry-After"), "1") def test_limit_exceeded_rounding(self) -> None: - err = self.LimitExceededErrorHeaders(limiter_name="test", retry_after_ms=3001) + err = LimitExceededError(limiter_name="test", retry_after_ms=3001) self.assertEqual(err.error_dict(None).get("retry_after_ms"), 3001) assert err.headers is not None self.assertEqual(err.headers.get("Retry-After"), "4") diff --git a/tests/handlers/test_deactivate_account.py b/tests/handlers/test_deactivate_account.py index 25ac68e6c5d2..b3f9e50f0f30 100644 --- a/tests/handlers/test_deactivate_account.py +++ b/tests/handlers/test_deactivate_account.py @@ -21,12 +21,13 @@ from twisted.test.proto_helpers import MemoryReactor -from synapse.api.constants import AccountDataTypes +from synapse.api.constants import AccountDataTypes, EventTypes, JoinRules, Membership from synapse.push.rulekinds import PRIORITY_CLASS_MAP from synapse.rest import admin -from synapse.rest.client import account, login +from synapse.rest.client import account, login, room from synapse.server import HomeServer from synapse.synapse_rust.push import PushRule +from synapse.types import UserID, create_requester from synapse.util import Clock from tests.unittest import HomeserverTestCase @@ -37,6 +38,7 @@ class DeactivateAccountTestCase(HomeserverTestCase): login.register_servlets, admin.register_servlets, account.register_servlets, + room.register_servlets, ] def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: @@ -44,6 +46,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user = self.register_user("user", "pass") self.token = self.login("user", "pass") + self.handler = self.hs.get_room_member_handler() def _deactivate_my_account(self) -> None: """ @@ -341,3 +344,83 @@ def test_deactivate_account_needs_auth(self) -> None: self.assertEqual(req.code, 401, req) self.assertEqual(req.json_body["flows"], [{"stages": ["m.login.password"]}]) + + def test_deactivate_account_rejects_invites(self) -> None: + """ + Tests that deactivating an account rejects its invite memberships + """ + # Create another user and room just for the invitation + another_user = self.register_user("another_user", "pass") + token = self.login("another_user", "pass") + room_id = self.helper.create_room_as(another_user, is_public=False, tok=token) + + # Invite user to the created room + invite_event, _ = self.get_success( + self.handler.update_membership( + requester=create_requester(another_user), + target=UserID.from_string(self.user), + room_id=room_id, + action=Membership.INVITE, + ) + ) + + # Check that the invite exists + invite = self.get_success( + self._store.get_invited_rooms_for_local_user(self.user) + ) + self.assertEqual(invite[0].event_id, invite_event) + + # Deactivate the user + self._deactivate_my_account() + + # Check that the deactivated user has no invites in the room + after_deactivate_invite = self.get_success( + self._store.get_invited_rooms_for_local_user(self.user) + ) + self.assertEqual(len(after_deactivate_invite), 0) + + def test_deactivate_account_rejects_knocks(self) -> None: + """ + Tests that deactivating an account rejects its knock memberships + """ + # Create another user and room just for the invitation + another_user = self.register_user("another_user", "pass") + token = self.login("another_user", "pass") + room_id = self.helper.create_room_as( + another_user, + is_public=False, + tok=token, + ) + + # Allow room to be knocked at + self.helper.send_state( + room_id, + EventTypes.JoinRules, + {"join_rule": JoinRules.KNOCK}, + tok=token, + ) + + # Knock user at the created room + knock_event, _ = self.get_success( + self.handler.update_membership( + requester=create_requester(self.user), + target=UserID.from_string(self.user), + room_id=room_id, + action=Membership.KNOCK, + ) + ) + + # Check that the knock exists + knocks = self.get_success( + self._store.get_knocked_at_rooms_for_local_user(self.user) + ) + self.assertEqual(knocks[0].event_id, knock_event) + + # Deactivate the user + self._deactivate_my_account() + + # Check that the deactivated user has no knocks + after_deactivate_knocks = self.get_success( + self._store.get_knocked_at_rooms_for_local_user(self.user) + ) + self.assertEqual(len(after_deactivate_knocks), 0) diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 0ee5eee38570..76ab83d1f775 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -24,6 +24,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import EventTypes +from synapse.api.errors import SynapseError from synapse.events import EventBase from synapse.events.snapshot import EventContext, UnpersistedEventContextBase from synapse.rest import admin @@ -51,11 +52,15 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: persistence = self.hs.get_storage_controllers().persistence assert persistence is not None self._persist_event_storage_controller = persistence + self.store = self.hs.get_datastores().main self.user_id = self.register_user("tester", "foobar") device_id = "dev-1" access_token = self.login("tester", "foobar", device_id=device_id) self.room_id = self.helper.create_room_as(self.user_id, tok=access_token) + self.private_room_id = self.helper.create_room_as( + self.user_id, tok=access_token, extra_content={"preset": "private_chat"} + ) self.requester = create_requester(self.user_id, device_id=device_id) @@ -285,6 +290,41 @@ def test_when_empty_prev_events_allowed_reject_event_with_empty_prev_events_and_ AssertionError, ) + def test_call_invite_event_creation_fails_in_public_room(self) -> None: + # get prev_events for room + prev_events = self.get_success( + self.store.get_prev_events_for_room(self.room_id) + ) + + # the invite in a public room should fail + self.get_failure( + self.handler.create_event( + self.requester, + { + "type": EventTypes.CallInvite, + "room_id": self.room_id, + "sender": self.requester.user.to_string(), + }, + prev_event_ids=prev_events, + auth_event_ids=prev_events, + ), + SynapseError, + ) + + # but a call invite in a private room should succeed + self.get_success( + self.handler.create_event( + self.requester, + { + "type": EventTypes.CallInvite, + "room_id": self.private_room_id, + "sender": self.requester.user.to_string(), + }, + prev_event_ids=prev_events, + auth_event_ids=prev_events, + ) + ) + class ServerAclValidationTestCase(unittest.HomeserverTestCase): servlets = [ diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 37904926e301..1b36324b8fc5 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Optional +from typing import Collection, List, Optional from unittest.mock import AsyncMock, Mock, patch from twisted.test.proto_helpers import MemoryReactor @@ -25,7 +25,10 @@ from synapse.api.constants import EventTypes, JoinRules from synapse.api.errors import Codes, ResourceLimitError from synapse.api.filtering import Filtering -from synapse.api.room_versions import RoomVersions +from synapse.api.room_versions import RoomVersion, RoomVersions +from synapse.events import EventBase +from synapse.events.snapshot import EventContext +from synapse.federation.federation_base import event_from_pdu_json from synapse.handlers.sync import SyncConfig, SyncResult from synapse.rest import admin from synapse.rest.client import knock, login, room @@ -285,6 +288,114 @@ def test_ban_wins_race_with_join(self) -> None: ) self.assertEqual(eve_initial_sync_after_join.joined, []) + def test_call_invite_in_public_room_not_returned(self) -> None: + user = self.register_user("alice", "password") + tok = self.login(user, "password") + room_id = self.helper.create_room_as(user, is_public=True, tok=tok) + self.handler = self.hs.get_federation_handler() + federation_event_handler = self.hs.get_federation_event_handler() + + async def _check_event_auth( + origin: Optional[str], event: EventBase, context: EventContext + ) -> None: + pass + + federation_event_handler._check_event_auth = _check_event_auth # type: ignore[method-assign] + self.client = self.hs.get_federation_client() + + async def _check_sigs_and_hash_for_pulled_events_and_fetch( + dest: str, pdus: Collection[EventBase], room_version: RoomVersion + ) -> List[EventBase]: + return list(pdus) + + self.client._check_sigs_and_hash_for_pulled_events_and_fetch = _check_sigs_and_hash_for_pulled_events_and_fetch # type: ignore[assignment] + + prev_events = self.get_success(self.store.get_prev_events_for_room(room_id)) + + # create a call invite event + call_event = event_from_pdu_json( + { + "type": EventTypes.CallInvite, + "content": {}, + "room_id": room_id, + "sender": user, + "depth": 32, + "prev_events": prev_events, + "auth_events": prev_events, + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V10, + ) + + self.assertEqual( + self.get_success( + federation_event_handler.on_receive_pdu("test.serv", call_event) + ), + None, + ) + + # check that it is in DB + recent_event = self.get_success(self.store.get_prev_events_for_room(room_id)) + self.assertIn(call_event.event_id, recent_event) + + # but that it does not come down /sync in public room + sync_result: SyncResult = self.get_success( + self.sync_handler.wait_for_sync_for_user( + create_requester(user), generate_sync_config(user) + ) + ) + event_ids = [] + for event in sync_result.joined[0].timeline.events: + event_ids.append(event.event_id) + self.assertNotIn(call_event.event_id, event_ids) + + # it will come down in a private room, though + user2 = self.register_user("bob", "password") + tok2 = self.login(user2, "password") + private_room_id = self.helper.create_room_as( + user2, is_public=False, tok=tok2, extra_content={"preset": "private_chat"} + ) + + priv_prev_events = self.get_success( + self.store.get_prev_events_for_room(private_room_id) + ) + private_call_event = event_from_pdu_json( + { + "type": EventTypes.CallInvite, + "content": {}, + "room_id": private_room_id, + "sender": user, + "depth": 32, + "prev_events": priv_prev_events, + "auth_events": priv_prev_events, + "origin_server_ts": self.clock.time_msec(), + }, + RoomVersions.V10, + ) + + self.assertEqual( + self.get_success( + federation_event_handler.on_receive_pdu("test.serv", private_call_event) + ), + None, + ) + + recent_events = self.get_success( + self.store.get_prev_events_for_room(private_room_id) + ) + self.assertIn(private_call_event.event_id, recent_events) + + private_sync_result: SyncResult = self.get_success( + self.sync_handler.wait_for_sync_for_user( + create_requester(user2), generate_sync_config(user2) + ) + ) + priv_event_ids = [] + for event in private_sync_result.joined[0].timeline.events: + priv_event_ids.append(event.event_id) + + self.assertIn(private_call_event.event_id, priv_event_ids) + _request_key = 0 diff --git a/tests/handlers/test_worker_lock.py b/tests/handlers/test_worker_lock.py index 3a4cf82094c2..6e9a15c8eee9 100644 --- a/tests/handlers/test_worker_lock.py +++ b/tests/handlers/test_worker_lock.py @@ -27,6 +27,7 @@ from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase +from tests.utils import test_timeout class WorkerLockTestCase(unittest.HomeserverTestCase): @@ -50,6 +51,28 @@ def test_wait_for_lock_locally(self) -> None: self.get_success(d2) self.get_success(lock2.__aexit__(None, None, None)) + def test_lock_contention(self) -> None: + """Test lock contention when a lot of locks wait on a single worker""" + + # It takes around 0.5s on a 5+ years old laptop + with test_timeout(5): + nb_locks = 500 + d = self._take_locks(nb_locks) + self.assertEqual(self.get_success(d), nb_locks) + + async def _take_locks(self, nb_locks: int) -> int: + locks = [ + self.hs.get_worker_locks_handler().acquire_lock("test_lock", "") + for _ in range(nb_locks) + ] + + nb_locks_taken = 0 + for lock in locks: + async with lock: + nb_locks_taken += 1 + + return nb_locks_taken + class WorkerLockWorkersTestCase(BaseMultiWorkerStreamTestCase): def prepare( diff --git a/tests/replication/_base.py b/tests/replication/_base.py index d2220f81950d..8437da1cdde1 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -495,9 +495,9 @@ class FakeRedisPubSubServer: """A fake Redis server for pub/sub.""" def __init__(self) -> None: - self._subscribers_by_channel: Dict[ - bytes, Set["FakeRedisPubSubProtocol"] - ] = defaultdict(set) + self._subscribers_by_channel: Dict[bytes, Set["FakeRedisPubSubProtocol"]] = ( + defaultdict(set) + ) def add_subscriber(self, conn: "FakeRedisPubSubProtocol", channel: bytes) -> None: """A connection has called SUBSCRIBE""" diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index 92658542235c..c5da1e968695 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -503,7 +503,7 @@ def test_all_users(self) -> None: channel = self.make_request( "GET", - self.url + "?deactivated=true", + f"{self.url}?deactivated=true", {}, access_token=self.admin_user_tok, ) @@ -982,6 +982,56 @@ def test_filter_admins(self) -> None: self.assertEqual(1, channel.json_body["total"]) self.assertFalse(channel.json_body["users"][0]["admin"]) + def test_filter_deactivated_users(self) -> None: + """ + Tests whether the various values of the query parameter `deactivated` lead to the + expected result set. + """ + users_url_v3 = self.url.replace("v2", "v3") + + # Register an additional non admin user + user_id = self.register_user("user", "pass", admin=False) + + # Deactivate that user, requesting erasure. + deactivate_account_handler = self.hs.get_deactivate_account_handler() + self.get_success( + deactivate_account_handler.deactivate_account( + user_id, erase_data=True, requester=create_requester(user_id) + ) + ) + + # Query all users + channel = self.make_request( + "GET", + users_url_v3, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, channel.code, channel.result) + self.assertEqual(2, channel.json_body["total"]) + + # Query deactivated users + channel = self.make_request( + "GET", + f"{users_url_v3}?deactivated=true", + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, channel.code, channel.result) + self.assertEqual(1, channel.json_body["total"]) + self.assertEqual("@user:test", channel.json_body["users"][0]["name"]) + + # Query non-deactivated users + channel = self.make_request( + "GET", + f"{users_url_v3}?deactivated=false", + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, channel.code, channel.result) + self.assertEqual(1, channel.json_body["total"]) + self.assertEqual("@admin:test", channel.json_body["users"][0]["name"]) + @override_config( { "experimental_features": { @@ -1130,7 +1180,7 @@ def test_erasure_status(self) -> None: # They should appear in the list users API, marked as not erased. channel = self.make_request( "GET", - self.url + "?deactivated=true", + f"{self.url}?deactivated=true", access_token=self.admin_user_tok, ) users = {user["name"]: user for user in channel.json_body["users"]} @@ -1194,7 +1244,7 @@ def _order_test( dir: The direction of ordering to give the server """ - url = self.url + "?deactivated=true&" + url = f"{self.url}?deactivated=true&" if order_by is not None: url += "order_by=%s&" % (order_by,) if dir is not None and dir in ("b", "f"): diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py index 0a894ad081d0..9cfc6b224f76 100644 --- a/tests/rest/client/test_filter.py +++ b/tests/rest/client/test_filter.py @@ -72,7 +72,7 @@ def test_add_filter_for_other_user(self) -> None: def test_add_filter_non_local_user(self) -> None: _is_mine = self.hs.is_mine - self.hs.is_mine = lambda target_user: False # type: ignore[method-assign] + self.hs.is_mine = lambda target_user: False # type: ignore[assignment] channel = self.make_request( "POST", "/_matrix/client/r0/user/%s/filter" % (self.user_id), diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index 3d3a7b0aa711..3a1f150082e0 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -177,7 +177,6 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: # rc_login dict here, we need to set this manually as well "account": {"per_second": 10000, "burst_count": 10000}, }, - "experimental_features": {"msc4041_enabled": True}, } ) def test_POST_ratelimiting_per_address(self) -> None: @@ -229,7 +228,6 @@ def test_POST_ratelimiting_per_address(self) -> None: # rc_login dict here, we need to set this manually as well "address": {"per_second": 10000, "burst_count": 10000}, }, - "experimental_features": {"msc4041_enabled": True}, } ) def test_POST_ratelimiting_per_account(self) -> None: @@ -278,7 +276,6 @@ def test_POST_ratelimiting_per_account(self) -> None: "address": {"per_second": 10000, "burst_count": 10000}, "failed_attempts": {"per_second": 0.17, "burst_count": 5}, }, - "experimental_features": {"msc4041_enabled": True}, } ) def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: diff --git a/tests/rest/client/test_read_marker.py b/tests/rest/client/test_read_marker.py index 2fe350e1e857..0b4ad685b3cb 100644 --- a/tests/rest/client/test_read_marker.py +++ b/tests/rest/client/test_read_marker.py @@ -78,7 +78,7 @@ def send_message() -> str: channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_1, }, @@ -90,7 +90,7 @@ def send_message() -> str: event_id_2 = send_message() channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_2, }, @@ -123,7 +123,7 @@ def send_message() -> str: channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_1, }, @@ -142,7 +142,7 @@ def send_message() -> str: event_id_2 = send_message() channel = self.make_request( "POST", - "/rooms/!abc:beep/read_markers", + f"/rooms/{room_id}/read_markers", content={ "m.fully_read": event_id_2, }, diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index b11a73e92be2..1364615085e6 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -93,6 +93,7 @@ class RoomPermissionsTestCase(RoomBase): rmcreator_id = "@notme:red" def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store_controllers = hs.get_storage_controllers() self.helper.auth_user_id = self.rmcreator_id # create some rooms under the name rmcreator_id self.uncreated_rmid = "!aa:test" @@ -482,6 +483,23 @@ def test_member_event_from_ban(self) -> None: expect_code=HTTPStatus.OK, ) + def test_default_call_invite_power_level(self) -> None: + pl_event = self.get_success( + self.store_controllers.state.get_current_state_event( + self.created_public_rmid, EventTypes.PowerLevels, "" + ) + ) + assert pl_event is not None + self.assertEqual(50, pl_event.content.get("m.call.invite")) + + private_pl_event = self.get_success( + self.store_controllers.state.get_current_state_event( + self.created_rmid, EventTypes.PowerLevels, "" + ) + ) + assert private_pl_event is not None + self.assertEqual(None, private_pl_event.content.get("m.call.invite")) + class RoomStateTestCase(RoomBase): """Tests /rooms/$room_id/state.""" @@ -1222,9 +1240,9 @@ def test_spam_checker_may_join_room(self) -> None: """ # Register a dummy callback. Make it allow all room joins for now. - return_value: Union[ - Literal["NOT_SPAM"], Tuple[Codes, dict], Codes - ] = synapse.module_api.NOT_SPAM + return_value: Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes] = ( + synapse.module_api.NOT_SPAM + ) async def user_may_join_room( userid: str, @@ -1664,9 +1682,9 @@ def test_spam_checker_check_event_for_spam( expected_fields: dict, ) -> None: class SpamCheck: - mock_return_value: Union[ - str, bool, Codes, Tuple[Codes, JsonDict], bool - ] = "NOT_SPAM" + mock_return_value: Union[str, bool, Codes, Tuple[Codes, JsonDict], bool] = ( + "NOT_SPAM" + ) mock_content: Optional[JsonDict] = None async def check_event_for_spam( diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py index 10cfe22d8ef8..daa68d78b931 100644 --- a/tests/rest/client/utils.py +++ b/tests/rest/client/utils.py @@ -87,8 +87,7 @@ def create_room_as( expect_code: Literal[200] = ..., extra_content: Optional[Dict] = ..., custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., - ) -> str: - ... + ) -> str: ... @overload def create_room_as( @@ -100,8 +99,7 @@ def create_room_as( expect_code: int = ..., extra_content: Optional[Dict] = ..., custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., - ) -> Optional[str]: - ... + ) -> Optional[str]: ... def create_room_as( self, diff --git a/tests/server.py b/tests/server.py index f0cc4206b05a..4aaa91e956a6 100644 --- a/tests/server.py +++ b/tests/server.py @@ -47,7 +47,7 @@ Union, cast, ) -from unittest.mock import Mock +from unittest.mock import Mock, patch import attr from incremental import Version @@ -55,6 +55,7 @@ from zope.interface import implementer import twisted +from twisted.enterprise import adbapi from twisted.internet import address, tcp, threads, udp from twisted.internet._resolver import SimpleResolverComplexifier from twisted.internet.defer import Deferred, fail, maybeDeferred, succeed @@ -94,8 +95,8 @@ ) from synapse.server import HomeServer from synapse.storage import DataStore -from synapse.storage.database import LoggingDatabaseConnection -from synapse.storage.engines import create_engine +from synapse.storage.database import LoggingDatabaseConnection, make_pool +from synapse.storage.engines import BaseDatabaseEngine, create_engine from synapse.storage.prepare_database import prepare_database from synapse.types import ISynapseReactor, JsonDict from synapse.util import Clock @@ -670,6 +671,53 @@ def validate_connector(connector: tcp.Connector, expected_ip: str) -> None: ) +def make_fake_db_pool( + reactor: ISynapseReactor, + db_config: DatabaseConnectionConfig, + engine: BaseDatabaseEngine, +) -> adbapi.ConnectionPool: + """Wrapper for `make_pool` which builds a pool which runs db queries synchronously. + + For more deterministic testing, we don't use a regular db connection pool: instead + we run all db queries synchronously on the test reactor's main thread. This function + is a drop-in replacement for the normal `make_pool` which builds such a connection + pool. + """ + pool = make_pool(reactor, db_config, engine) + + def runWithConnection( + func: Callable[..., R], *args: Any, **kwargs: Any + ) -> Awaitable[R]: + return threads.deferToThreadPool( + pool._reactor, + pool.threadpool, + pool._runWithConnection, + func, + *args, + **kwargs, + ) + + def runInteraction( + desc: str, func: Callable[..., R], *args: Any, **kwargs: Any + ) -> Awaitable[R]: + return threads.deferToThreadPool( + pool._reactor, + pool.threadpool, + pool._runInteraction, + desc, + func, + *args, + **kwargs, + ) + + pool.runWithConnection = runWithConnection # type: ignore[method-assign] + pool.runInteraction = runInteraction # type: ignore[assignment] + # Replace the thread pool with a threadless 'thread' pool + pool.threadpool = ThreadPool(reactor) + pool.running = True + return pool + + class ThreadPool: """ Threadless thread pool. @@ -706,52 +754,6 @@ def _(res: Any) -> None: return d -def _make_test_homeserver_synchronous(server: HomeServer) -> None: - """ - Make the given test homeserver's database interactions synchronous. - """ - - clock = server.get_clock() - - for database in server.get_datastores().databases: - pool = database._db_pool - - def runWithConnection( - func: Callable[..., R], *args: Any, **kwargs: Any - ) -> Awaitable[R]: - return threads.deferToThreadPool( - pool._reactor, - pool.threadpool, - pool._runWithConnection, - func, - *args, - **kwargs, - ) - - def runInteraction( - desc: str, func: Callable[..., R], *args: Any, **kwargs: Any - ) -> Awaitable[R]: - return threads.deferToThreadPool( - pool._reactor, - pool.threadpool, - pool._runInteraction, - desc, - func, - *args, - **kwargs, - ) - - pool.runWithConnection = runWithConnection # type: ignore[method-assign] - pool.runInteraction = runInteraction # type: ignore[assignment] - # Replace the thread pool with a threadless 'thread' pool - pool.threadpool = ThreadPool(clock._reactor) - pool.running = True - - # We've just changed the Databases to run DB transactions on the same - # thread, so we need to disable the dedicated thread behaviour. - server.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False - - def get_clock() -> Tuple[ThreadedMemoryReactorClock, Clock]: clock = ThreadedMemoryReactorClock() hs_clock = Clock(clock) @@ -1067,7 +1069,14 @@ def setup_test_homeserver( # Mock TLS hs.tls_server_context_factory = Mock() - hs.setup() + # Patch `make_pool` before initialising the database, to make database transactions + # synchronous for testing. + with patch("synapse.storage.database.make_pool", side_effect=make_fake_db_pool): + hs.setup() + + # Since we've changed the databases to run DB transactions on the same + # thread, we need to stop the event fetcher hogging that one thread. + hs.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False if USE_POSTGRES_FOR_TESTS: database_pool = hs.get_datastores().databases[0] @@ -1137,9 +1146,6 @@ async def validate_hash(p: str, h: str) -> bool: hs.get_auth_handler().validate_hash = validate_hash # type: ignore[assignment] - # Make the threadpool and database transactions synchronous for testing. - _make_test_homeserver_synchronous(hs) - # Load any configured modules into the homeserver module_api = hs.get_module_api() for module, module_config in hs.config.modules.loaded_modules: diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index 249c6b39f732..d5b999628477 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -337,15 +337,15 @@ def test_expiry_logic(self) -> None: """Simple test to ensure that _expire_rooms_to_exclude_from_dummy_event_insertion() expires old entries correctly. """ - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "1" - ] = 100000 - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "2" - ] = 200000 - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "3" - ] = 300000 + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["1"] = ( + 100000 + ) + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["2"] = ( + 200000 + ) + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["3"] = ( + 300000 + ) self.event_creator_handler._expire_rooms_to_exclude_from_dummy_event_insertion() # All entries within time frame diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index 01c532480228..1eab89f140b1 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -328,9 +328,11 @@ def _check_test_cases( self.assertEqual( result["count"], 1 if expect_to_contain else 0, - f"expected '{query}' to match '{self.PHRASE}'" - if expect_to_contain - else f"'{query}' unexpectedly matched '{self.PHRASE}'", + ( + f"expected '{query}' to match '{self.PHRASE}'" + if expect_to_contain + else f"'{query}' unexpectedly matched '{self.PHRASE}'" + ), ) self.assertEqual( len(result["results"]), @@ -346,9 +348,11 @@ def _check_test_cases( self.assertEqual( result["count"], 1 if expect_to_contain else 0, - f"expected '{query}' to match '{self.PHRASE}'" - if expect_to_contain - else f"'{query}' unexpectedly matched '{self.PHRASE}'", + ( + f"expected '{query}' to match '{self.PHRASE}'" + if expect_to_contain + else f"'{query}' unexpectedly matched '{self.PHRASE}'" + ), ) self.assertEqual( len(result["results"]), diff --git a/tests/unittest.py b/tests/unittest.py index 33c9a384ea52..6fe0cd4a2dcd 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -109,8 +109,7 @@ class _TypedFailure(Generic[_ExcType], Protocol): """Extension to twisted.Failure, where the 'value' has a certain type.""" @property - def value(self) -> _ExcType: - ... + def value(self) -> _ExcType: ... def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]: diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index d4268bc2e273..7cbb1007dad8 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -34,8 +34,7 @@ class UnblockFunction(Protocol): - def __call__(self, pump_reactor: bool = True) -> None: - ... + def __call__(self, pump_reactor: bool = True) -> None: ... class LinearizerTestCase(unittest.TestCase): diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index dcc2b4be89df..3f0d8139f8e7 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -383,3 +383,34 @@ def test_evict_memory(self, jemalloc_interface: Mock) -> None: # the items should still be in the cache self.assertEqual(cache.get("key1"), 1) self.assertEqual(cache.get("key2"), 2) + + +class ExtraIndexLruCacheTestCase(unittest.HomeserverTestCase): + def test_invalidate_simple(self) -> None: + cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v)) + cache["key1"] = 1 + cache["key2"] = 2 + + cache.invalidate_on_extra_index("key1") + self.assertEqual(cache.get("key1"), 1) + self.assertEqual(cache.get("key2"), 2) + + cache.invalidate_on_extra_index("1") + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), 2) + + def test_invalidate_multi(self) -> None: + cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v)) + cache["key1"] = 1 + cache["key2"] = 1 + cache["key3"] = 2 + + cache.invalidate_on_extra_index("key1") + self.assertEqual(cache.get("key1"), 1) + self.assertEqual(cache.get("key2"), 1) + self.assertEqual(cache.get("key3"), 2) + + cache.invalidate_on_extra_index("1") + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), None) + self.assertEqual(cache.get("key3"), 2) diff --git a/tests/utils.py b/tests/utils.py index b5dbd60a9c27..9fd26ef348ea 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,7 +21,20 @@ import atexit import os -from typing import Any, Callable, Dict, List, Tuple, Type, TypeVar, Union, overload +import signal +from types import FrameType, TracebackType +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) import attr from typing_extensions import Literal, ParamSpec @@ -121,13 +134,11 @@ def _cleanup() -> None: @overload -def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: - ... +def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: ... @overload -def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: - ... +def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: ... def default_config( @@ -381,3 +392,30 @@ def checked_cast(type: Type[T], x: object) -> T: """ assert isinstance(x, type) return x + + +class TestTimeout(Exception): + pass + + +class test_timeout: + def __init__(self, seconds: int, error_message: Optional[str] = None) -> None: + if error_message is None: + error_message = "test timed out after {}s.".format(seconds) + self.seconds = seconds + self.error_message = error_message + + def handle_timeout(self, signum: int, frame: Optional[FrameType]) -> None: + raise TestTimeout(self.error_message) + + def __enter__(self) -> None: + signal.signal(signal.SIGALRM, self.handle_timeout) + signal.alarm(self.seconds) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + signal.alarm(0)