mirror of
https://github.com/element-hq/synapse
synced 2024-09-28 18:52:40 +00:00
Merge remote-tracking branch 'origin/develop' into matrix-org-hotfixes
This commit is contained in:
commit
56c0c711c1
81 changed files with 1220 additions and 350 deletions
8
.git-blame-ignore-revs
Normal file
8
.git-blame-ignore-revs
Normal file
|
@ -0,0 +1,8 @@
|
|||
# Black reformatting (#5482).
|
||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
||||
|
||||
# Target Python 3.5 with black (#8664).
|
||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
|
||||
# Update black to 20.8b1 (#9381).
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
21
CHANGES.md
21
CHANGES.md
|
@ -1,3 +1,18 @@
|
|||
Removal warning
|
||||
---------------
|
||||
|
||||
Note that this release deprecates the ability for appservices to call `POST /_matrix/client/r0/register` without the body parameter `type`. Appservice developers should use a `type` value of `m.login.application_service` as per the spec. In future releases, calling this endpoint with an access token but
|
||||
without a valid type will fail.
|
||||
|
||||
Synapse 1.29.0 (2021-03-08)
|
||||
===========================
|
||||
|
||||
Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change.
|
||||
|
||||
|
||||
No significant changes.
|
||||
|
||||
|
||||
Synapse 1.29.0rc1 (2021-03-04)
|
||||
==============================
|
||||
|
||||
|
@ -52,12 +67,6 @@ Internal Changes
|
|||
- Bump the versions of mypy and mypy-zope used for static type checking. ([\#9529](https://github.com/matrix-org/synapse/issues/9529))
|
||||
|
||||
|
||||
Synapse 1.xx.0
|
||||
==============
|
||||
|
||||
Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change.
|
||||
|
||||
|
||||
Synapse 1.28.0 (2021-02-25)
|
||||
===========================
|
||||
|
||||
|
|
|
@ -20,9 +20,10 @@ recursive-include scripts *
|
|||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
include tests/http/ca.crt
|
||||
include tests/http/ca.key
|
||||
include tests/http/server.key
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
|
|
|
@ -183,8 +183,9 @@ Using a reverse proxy with Synapse
|
|||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_ or
|
||||
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||
`HAProxy <https://www.haproxy.org/>`_ or
|
||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
|
||||
|
|
13
UPGRADE.rst
13
UPGRADE.rst
|
@ -98,9 +98,9 @@ will log a warning on each received request.
|
|||
|
||||
To avoid the warning, administrators using a reverse proxy should ensure that
|
||||
the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
|
||||
indicate the protocol used by the client. See the [reverse proxy
|
||||
documentation](docs/reverse_proxy.md), where the example configurations have
|
||||
been updated to show how to set this header.
|
||||
indicate the protocol used by the client. See the `reverse proxy documentation
|
||||
<docs/reverse_proxy.md>`_, where the example configurations have been updated to
|
||||
show how to set this header.
|
||||
|
||||
(Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
|
||||
sets `X-Forwarded-Proto` by default.)
|
||||
|
@ -124,6 +124,13 @@ This version changes the URI used for callbacks from OAuth2 and SAML2 identity p
|
|||
need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted
|
||||
"ACS location" (also known as "allowed callback URLs") at the identity provider.
|
||||
|
||||
The "Issuer" in the "AuthnRequest" to the SAML2 identity provider is also updated to
|
||||
``[synapse public baseurl]/_synapse/client/saml2/metadata.xml``. If your SAML2 identity
|
||||
provider uses this property to validate or otherwise identify Synapse, its configuration
|
||||
will need to be updated to use the new URL. Alternatively you could create a new, separate
|
||||
"EntityDescriptor" in your SAML2 identity provider with the new URLs and leave the URLs in
|
||||
the existing "EntityDescriptor" as they were.
|
||||
|
||||
Changes to HTML templates
|
||||
-------------------------
|
||||
|
||||
|
|
1
changelog.d/9458.misc
Normal file
1
changelog.d/9458.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Add tests to ResponseCache.
|
1
changelog.d/9473.bugfix
Normal file
1
changelog.d/9473.bugfix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix long-standing bug when generating thumbnails for some images with transparency: `TypeError: cannot unpack non-iterable int object`.
|
1
changelog.d/9508.doc
Normal file
1
changelog.d/9508.doc
Normal file
|
@ -0,0 +1 @@
|
|||
Add relayd entry to reverse proxy example configurations.
|
1
changelog.d/9510.feature
Normal file
1
changelog.d/9510.feature
Normal file
|
@ -0,0 +1 @@
|
|||
Add prometheus metrics for number of users successfully registering and logging in.
|
1
changelog.d/9511.feature
Normal file
1
changelog.d/9511.feature
Normal file
|
@ -0,0 +1 @@
|
|||
Add prometheus metrics for number of users successfully registering and logging in.
|
1
changelog.d/9520.misc
Normal file
1
changelog.d/9520.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Add type hints to purge room and server notice admin API.
|
1
changelog.d/9523.misc
Normal file
1
changelog.d/9523.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Add extra logging to ObservableDeferred when callbacks throw exceptions.
|
1
changelog.d/9528.misc
Normal file
1
changelog.d/9528.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Fix incorrect type hints.
|
1
changelog.d/9540.feature
Normal file
1
changelog.d/9540.feature
Normal file
|
@ -0,0 +1 @@
|
|||
Add `synapse_federation_last_sent_pdu_time` and `synapse_federation_last_received_pdu_time` prometheus metrics, which monitor federation delays by reporting the timestamps of messages sent and received to a set of remote servers.
|
1
changelog.d/9540.removal
Normal file
1
changelog.d/9540.removal
Normal file
|
@ -0,0 +1 @@
|
|||
The `synapse_federation_last_sent_pdu_age` and `synapse_federation_last_received_pdu_age` prometheus metrics have been removed. They are replaced by `synapse_federation_last_sent_pdu_time` and `synapse_federation_last_received_pdu_time`.
|
1
changelog.d/9541.misc
Normal file
1
changelog.d/9541.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Add an additional test for purging a room.
|
1
changelog.d/9543.misc
Normal file
1
changelog.d/9543.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Fix incorrect type hints.
|
1
changelog.d/9549.feature
Normal file
1
changelog.d/9549.feature
Normal file
|
@ -0,0 +1 @@
|
|||
Add support for generating JSON Web Tokens dynamically for use as OIDC client secrets.
|
1
changelog.d/9550.doc
Normal file
1
changelog.d/9550.doc
Normal file
|
@ -0,0 +1 @@
|
|||
Improve the SAML2 upgrade notes for 1.27.0.
|
1
changelog.d/9559.removal
Normal file
1
changelog.d/9559.removal
Normal file
|
@ -0,0 +1 @@
|
|||
Registering an Application Service user without using the `m.login.application_service` login type will be unsupported in an upcoming Synapse release.
|
1
changelog.d/9560.misc
Normal file
1
changelog.d/9560.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Add a `.git-blame-ignore-revs` file with the hashes of auto-formatting.
|
1
changelog.d/9562.misc
Normal file
1
changelog.d/9562.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Fix spurious errors reported by the `config-lint.sh` script.
|
1
changelog.d/9567.bugfix
Normal file
1
changelog.d/9567.bugfix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix bug where federation requests were not correctly retried on 5xx responses.
|
1
changelog.d/9571.doc
Normal file
1
changelog.d/9571.doc
Normal file
|
@ -0,0 +1 @@
|
|||
Link to the "List user's media" admin API from the media admin API docs.
|
7
debian/changelog
vendored
7
debian/changelog
vendored
|
@ -1,9 +1,12 @@
|
|||
matrix-synapse-py3 (1.29.0) UNRELEASED; urgency=medium
|
||||
matrix-synapse-py3 (1.29.0) stable; urgency=medium
|
||||
|
||||
[ Jonathan de Jong ]
|
||||
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Feb 2021 14:41:31 +0100
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.29.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
|
||||
|
||||
matrix-synapse-py3 (1.28.0) stable; urgency=medium
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# Contents
|
||||
- [List all media in a room](#list-all-media-in-a-room)
|
||||
- [Querying media](#querying-media)
|
||||
* [List all media in a room](#list-all-media-in-a-room)
|
||||
* [List all media uploaded by a user](#list-all-media-uploaded-by-a-user)
|
||||
- [Quarantine media](#quarantine-media)
|
||||
* [Quarantining media by ID](#quarantining-media-by-id)
|
||||
* [Quarantining media in a room](#quarantining-media-in-a-room)
|
||||
|
@ -10,7 +12,11 @@
|
|||
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
|
||||
- [Purge Remote Media API](#purge-remote-media-api)
|
||||
|
||||
# List all media in a room
|
||||
# Querying media
|
||||
|
||||
These APIs allow extracting media information from the homeserver.
|
||||
|
||||
## List all media in a room
|
||||
|
||||
This API gets a list of known media in a room.
|
||||
However, it only shows media from unencrypted events or rooms.
|
||||
|
@ -36,6 +42,12 @@ The API returns a JSON body like the following:
|
|||
}
|
||||
```
|
||||
|
||||
## List all media uploaded by a user
|
||||
|
||||
Listing all media that has been uploaded by a local user can be achieved through
|
||||
the use of the [List media of a user](user_admin_api.rst#list-media-of-a-user)
|
||||
Admin API.
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
|
|
|
@ -386,7 +386,7 @@ oidc_providers:
|
|||
config:
|
||||
subject_claim: "id"
|
||||
localpart_template: "{{ user.login }}"
|
||||
display_name_template: "{{ user.full_name }}"
|
||||
display_name_template: "{{ user.full_name }}"
|
||||
```
|
||||
|
||||
### XWiki
|
||||
|
@ -401,8 +401,7 @@ oidc_providers:
|
|||
idp_name: "XWiki"
|
||||
issuer: "https://myxwikihost/xwiki/oidc/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
# Needed until https://github.com/matrix-org/synapse/issues/9212 is fixed
|
||||
client_secret: "dontcare"
|
||||
client_auth_method: none
|
||||
scopes: ["openid", "profile"]
|
||||
user_profile_method: "userinfo_endpoint"
|
||||
user_mapping_provider:
|
||||
|
@ -410,3 +409,40 @@ oidc_providers:
|
|||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
## Apple
|
||||
|
||||
Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account.
|
||||
|
||||
You will need to create a new "Services ID" for SiWA, and create and download a
|
||||
private key with "SiWA" enabled.
|
||||
|
||||
As well as the private key file, you will need:
|
||||
* Client ID: the "identifier" you gave the "Services ID"
|
||||
* Team ID: a 10-character ID associated with your developer account.
|
||||
* Key ID: the 10-character identifier for the key.
|
||||
|
||||
https://help.apple.com/developer-account/?lang=en#/dev77c875b7e has more
|
||||
documentation on setting up SiWA.
|
||||
|
||||
The synapse config will look like this:
|
||||
|
||||
```yaml
|
||||
- idp_id: apple
|
||||
idp_name: Apple
|
||||
issuer: "https://appleid.apple.com"
|
||||
client_id: "your-client-id" # Set to the "identifier" for your "ServicesID"
|
||||
client_auth_method: "client_secret_post"
|
||||
client_secret_jwt_key:
|
||||
key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file
|
||||
jwt_header:
|
||||
alg: ES256
|
||||
kid: "KEYIDCODE" # Set to the 10-char Key ID
|
||||
jwt_payload:
|
||||
iss: TEAMIDCODE # Set to the 10-char Team ID
|
||||
scopes: ["name", "email", "openid"]
|
||||
authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post
|
||||
user_mapping_provider:
|
||||
config:
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
|
|
@ -3,8 +3,9 @@
|
|||
It is recommended to put a reverse proxy such as
|
||||
[nginx](https://nginx.org/en/docs/http/ngx_http_proxy_module.html),
|
||||
[Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html),
|
||||
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy) or
|
||||
[HAProxy](https://www.haproxy.org/) in front of Synapse. One advantage
|
||||
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy),
|
||||
[HAProxy](https://www.haproxy.org/) or
|
||||
[relayd](https://man.openbsd.org/relayd.8) in front of Synapse. One advantage
|
||||
of doing so is that it means that you can expose the default https port
|
||||
(443) to Matrix clients without needing to run Synapse with root
|
||||
privileges.
|
||||
|
@ -162,6 +163,52 @@ backend matrix
|
|||
server matrix 127.0.0.1:8008
|
||||
```
|
||||
|
||||
### Relayd
|
||||
|
||||
```
|
||||
table <webserver> { 127.0.0.1 }
|
||||
table <matrixserver> { 127.0.0.1 }
|
||||
|
||||
http protocol "https" {
|
||||
tls { no tlsv1.0, ciphers "HIGH" }
|
||||
tls keypair "example.com"
|
||||
match header set "X-Forwarded-For" value "$REMOTE_ADDR"
|
||||
match header set "X-Forwarded-Proto" value "https"
|
||||
|
||||
# set CORS header for .well-known/matrix/server, .well-known/matrix/client
|
||||
# httpd does not support setting headers, so do it here
|
||||
match request path "/.well-known/matrix/*" tag "matrix-cors"
|
||||
match response tagged "matrix-cors" header set "Access-Control-Allow-Origin" value "*"
|
||||
|
||||
pass quick path "/_matrix/*" forward to <matrixserver>
|
||||
pass quick path "/_synapse/client/*" forward to <matrixserver>
|
||||
|
||||
# pass on non-matrix traffic to webserver
|
||||
pass forward to <webserver>
|
||||
}
|
||||
|
||||
relay "https_traffic" {
|
||||
listen on egress port 443 tls
|
||||
protocol "https"
|
||||
forward to <matrixserver> port 8008 check tcp
|
||||
forward to <webserver> port 8080 check tcp
|
||||
}
|
||||
|
||||
http protocol "matrix" {
|
||||
tls { no tlsv1.0, ciphers "HIGH" }
|
||||
tls keypair "example.com"
|
||||
block
|
||||
pass quick path "/_matrix/*" forward to <matrixserver>
|
||||
pass quick path "/_synapse/client/*" forward to <matrixserver>
|
||||
}
|
||||
|
||||
relay "matrix_federation" {
|
||||
listen on egress port 8448 tls
|
||||
protocol "matrix"
|
||||
forward to <matrixserver> port 8008 check tcp
|
||||
}
|
||||
```
|
||||
|
||||
## Homeserver Configuration
|
||||
|
||||
You will also want to set `bind_addresses: ['127.0.0.1']` and
|
||||
|
|
|
@ -89,8 +89,7 @@ pid_file: DATADIR/homeserver.pid
|
|||
# Whether to require authentication to retrieve profile data (avatars,
|
||||
# display names) of other users through the client API. Defaults to
|
||||
# 'false'. Note that profile data is also available via the federation
|
||||
# API, so this setting is of limited value if federation is enabled on
|
||||
# the server.
|
||||
# API, unless allow_profile_lookup_over_federation is set to false.
|
||||
#
|
||||
#require_auth_for_profile_requests: true
|
||||
|
||||
|
@ -1780,7 +1779,26 @@ saml2_config:
|
|||
#
|
||||
# client_id: Required. oauth2 client id to use.
|
||||
#
|
||||
# client_secret: Required. oauth2 client secret to use.
|
||||
# client_secret: oauth2 client secret to use. May be omitted if
|
||||
# client_secret_jwt_key is given, or if client_auth_method is 'none'.
|
||||
#
|
||||
# client_secret_jwt_key: Alternative to client_secret: details of a key used
|
||||
# to create a JSON Web Token to be used as an OAuth2 client secret. If
|
||||
# given, must be a dictionary with the following properties:
|
||||
#
|
||||
# key: a pem-encoded signing key. Must be a suitable key for the
|
||||
# algorithm specified. Required unless 'key_file' is given.
|
||||
#
|
||||
# key_file: the path to file containing a pem-encoded signing key file.
|
||||
# Required unless 'key' is given.
|
||||
#
|
||||
# jwt_header: a dictionary giving properties to include in the JWT
|
||||
# header. Must include the key 'alg', giving the algorithm used to
|
||||
# sign the JWT, such as "ES256", using the JWA identifiers in
|
||||
# RFC7518.
|
||||
#
|
||||
# jwt_payload: an optional dictionary giving properties to include in
|
||||
# the JWT payload. Normally this should include an 'iss' key.
|
||||
#
|
||||
# client_auth_method: auth method to use when exchanging the token. Valid
|
||||
# values are 'client_secret_basic' (default), 'client_secret_post' and
|
||||
|
|
1
mypy.ini
1
mypy.ini
|
@ -69,6 +69,7 @@ files =
|
|||
synapse/util/async_helpers.py,
|
||||
synapse/util/caches,
|
||||
synapse/util/metrics.py,
|
||||
synapse/util/macaroons.py,
|
||||
synapse/util/stringutils.py,
|
||||
tests/replication,
|
||||
tests/test_utils,
|
||||
|
|
|
@ -2,9 +2,14 @@
|
|||
# Find linting errors in Synapse's default config file.
|
||||
# Exits with 0 if there are no problems, or another code otherwise.
|
||||
|
||||
# cd to the root of the repository
|
||||
cd `dirname $0`/..
|
||||
|
||||
# Restore backup of sample config upon script exit
|
||||
trap "mv docs/sample_config.yaml.bak docs/sample_config.yaml" EXIT
|
||||
|
||||
# Fix non-lowercase true/false values
|
||||
sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
|
||||
rm docs/sample_config.yaml.bak
|
||||
|
||||
# Check if anything changed
|
||||
git diff --exit-code docs/sample_config.yaml
|
||||
diff docs/sample_config.yaml docs/sample_config.yaml.bak
|
||||
|
|
|
@ -3,6 +3,7 @@ test_suite = tests
|
|||
|
||||
[check-manifest]
|
||||
ignore =
|
||||
.git-blame-ignore-revs
|
||||
contrib
|
||||
contrib/*
|
||||
docs/*
|
||||
|
|
|
@ -48,7 +48,7 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.29.0rc1"
|
||||
__version__ = "1.29.0"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
|
|
@ -39,6 +39,7 @@ from synapse.logging import opentracing as opentracing
|
|||
from synapse.storage.databases.main.registration import TokenLookupResult
|
||||
from synapse.types import StateMap, UserID
|
||||
from synapse.util.caches.lrucache import LruCache
|
||||
from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry
|
||||
from synapse.util.metrics import Measure
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -408,7 +409,7 @@ class Auth:
|
|||
raise _InvalidMacaroonException()
|
||||
|
||||
try:
|
||||
user_id = self.get_user_id_from_macaroon(macaroon)
|
||||
user_id = get_value_from_macaroon(macaroon, "user_id")
|
||||
|
||||
guest = False
|
||||
for caveat in macaroon.caveats:
|
||||
|
@ -416,7 +417,12 @@ class Auth:
|
|||
guest = True
|
||||
|
||||
self.validate_macaroon(macaroon, rights, user_id=user_id)
|
||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||
except (
|
||||
pymacaroons.exceptions.MacaroonException,
|
||||
KeyError,
|
||||
TypeError,
|
||||
ValueError,
|
||||
):
|
||||
raise InvalidClientTokenError("Invalid macaroon passed.")
|
||||
|
||||
if rights == "access":
|
||||
|
@ -424,27 +430,6 @@ class Auth:
|
|||
|
||||
return user_id, guest
|
||||
|
||||
def get_user_id_from_macaroon(self, macaroon):
|
||||
"""Retrieve the user_id given by the caveats on the macaroon.
|
||||
|
||||
Does *not* validate the macaroon.
|
||||
|
||||
Args:
|
||||
macaroon (pymacaroons.Macaroon): The macaroon to validate
|
||||
|
||||
Returns:
|
||||
(str) user id
|
||||
|
||||
Raises:
|
||||
InvalidClientCredentialsError if there is no user_id caveat in the
|
||||
macaroon
|
||||
"""
|
||||
user_prefix = "user_id = "
|
||||
for caveat in macaroon.caveats:
|
||||
if caveat.caveat_id.startswith(user_prefix):
|
||||
return caveat.caveat_id[len(user_prefix) :]
|
||||
raise InvalidClientTokenError("No user caveat in macaroon")
|
||||
|
||||
def validate_macaroon(self, macaroon, type_string, user_id):
|
||||
"""
|
||||
validate that a Macaroon is understood by and was signed by this server.
|
||||
|
@ -465,21 +450,13 @@ class Auth:
|
|||
v.satisfy_exact("type = " + type_string)
|
||||
v.satisfy_exact("user_id = %s" % user_id)
|
||||
v.satisfy_exact("guest = true")
|
||||
v.satisfy_general(self._verify_expiry)
|
||||
satisfy_expiry(v, self.clock.time_msec)
|
||||
|
||||
# access_tokens include a nonce for uniqueness: any value is acceptable
|
||||
v.satisfy_general(lambda c: c.startswith("nonce = "))
|
||||
|
||||
v.verify(macaroon, self._macaroon_secret_key)
|
||||
|
||||
def _verify_expiry(self, caveat):
|
||||
prefix = "time < "
|
||||
if not caveat.startswith(prefix):
|
||||
return False
|
||||
expiry = int(caveat[len(prefix) :])
|
||||
now = self.hs.get_clock().time_msec()
|
||||
return now < expiry
|
||||
|
||||
def get_appservice_by_req(self, request: SynapseRequest) -> ApplicationService:
|
||||
token = self.get_access_token_from_request(request)
|
||||
service = self.store.get_app_service_by_token(token)
|
||||
|
|
|
@ -90,7 +90,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
self.clock = hs.get_clock()
|
||||
|
||||
self.protocol_meta_cache = ResponseCache(
|
||||
hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
) # type: ResponseCache[Tuple[str, str]]
|
||||
|
||||
async def query_user(self, service, user_id):
|
||||
|
|
|
@ -212,9 +212,8 @@ class Config:
|
|||
|
||||
@classmethod
|
||||
def read_file(cls, file_path, config_name):
|
||||
cls.check_file(file_path, config_name)
|
||||
with open(file_path) as file_stream:
|
||||
return file_stream.read()
|
||||
"""Deprecated: call read_file directly"""
|
||||
return read_file(file_path, (config_name,))
|
||||
|
||||
def read_template(self, filename: str) -> jinja2.Template:
|
||||
"""Load a template file from disk.
|
||||
|
@ -894,4 +893,35 @@ class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
|
|||
return self._get_instance(key)
|
||||
|
||||
|
||||
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
|
||||
def read_file(file_path: Any, config_path: Iterable[str]) -> str:
|
||||
"""Check the given file exists, and read it into a string
|
||||
|
||||
If it does not, emit an error indicating the problem
|
||||
|
||||
Args:
|
||||
file_path: the file to be read
|
||||
config_path: where in the configuration file_path came from, so that a useful
|
||||
error can be emitted if it does not exist.
|
||||
Returns:
|
||||
content of the file.
|
||||
Raises:
|
||||
ConfigError if there is a problem reading the file.
|
||||
"""
|
||||
if not isinstance(file_path, str):
|
||||
raise ConfigError("%r is not a string", config_path)
|
||||
|
||||
try:
|
||||
os.stat(file_path)
|
||||
with open(file_path) as file_stream:
|
||||
return file_stream.read()
|
||||
except OSError as e:
|
||||
raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Config",
|
||||
"RootConfig",
|
||||
"ShardedWorkerHandlingConfig",
|
||||
"RoutableShardedWorkerHandlingConfig",
|
||||
"read_file",
|
||||
]
|
||||
|
|
|
@ -152,3 +152,5 @@ class ShardedWorkerHandlingConfig:
|
|||
|
||||
class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
|
||||
def get_instance(self, key: str) -> str: ...
|
||||
|
||||
def read_file(file_path: Any, config_path: Iterable[str]) -> str: ...
|
||||
|
|
|
@ -21,8 +21,10 @@ import threading
|
|||
from string import Template
|
||||
|
||||
import yaml
|
||||
from zope.interface import implementer
|
||||
|
||||
from twisted.logger import (
|
||||
ILogObserver,
|
||||
LogBeginner,
|
||||
STDLibLogObserver,
|
||||
eventAsText,
|
||||
|
@ -227,7 +229,8 @@ def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) ->
|
|||
|
||||
threadlocal = threading.local()
|
||||
|
||||
def _log(event):
|
||||
@implementer(ILogObserver)
|
||||
def _log(event: dict) -> None:
|
||||
if "log_text" in event:
|
||||
if event["log_text"].startswith("DNSDatagramProtocol starting on "):
|
||||
return
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
from collections import Counter
|
||||
from typing import Iterable, Optional, Tuple, Type
|
||||
from typing import Iterable, Mapping, Optional, Tuple, Type
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -25,7 +25,7 @@ from synapse.types import Collection, JsonDict
|
|||
from synapse.util.module_loader import load_module
|
||||
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
from ._base import Config, ConfigError, read_file
|
||||
|
||||
DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider"
|
||||
|
||||
|
@ -97,7 +97,26 @@ class OIDCConfig(Config):
|
|||
#
|
||||
# client_id: Required. oauth2 client id to use.
|
||||
#
|
||||
# client_secret: Required. oauth2 client secret to use.
|
||||
# client_secret: oauth2 client secret to use. May be omitted if
|
||||
# client_secret_jwt_key is given, or if client_auth_method is 'none'.
|
||||
#
|
||||
# client_secret_jwt_key: Alternative to client_secret: details of a key used
|
||||
# to create a JSON Web Token to be used as an OAuth2 client secret. If
|
||||
# given, must be a dictionary with the following properties:
|
||||
#
|
||||
# key: a pem-encoded signing key. Must be a suitable key for the
|
||||
# algorithm specified. Required unless 'key_file' is given.
|
||||
#
|
||||
# key_file: the path to file containing a pem-encoded signing key file.
|
||||
# Required unless 'key' is given.
|
||||
#
|
||||
# jwt_header: a dictionary giving properties to include in the JWT
|
||||
# header. Must include the key 'alg', giving the algorithm used to
|
||||
# sign the JWT, such as "ES256", using the JWA identifiers in
|
||||
# RFC7518.
|
||||
#
|
||||
# jwt_payload: an optional dictionary giving properties to include in
|
||||
# the JWT payload. Normally this should include an 'iss' key.
|
||||
#
|
||||
# client_auth_method: auth method to use when exchanging the token. Valid
|
||||
# values are 'client_secret_basic' (default), 'client_secret_post' and
|
||||
|
@ -240,7 +259,7 @@ class OIDCConfig(Config):
|
|||
# jsonschema definition of the configuration settings for an oidc identity provider
|
||||
OIDC_PROVIDER_CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"required": ["issuer", "client_id", "client_secret"],
|
||||
"required": ["issuer", "client_id"],
|
||||
"properties": {
|
||||
"idp_id": {
|
||||
"type": "string",
|
||||
|
@ -262,6 +281,30 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
|||
"issuer": {"type": "string"},
|
||||
"client_id": {"type": "string"},
|
||||
"client_secret": {"type": "string"},
|
||||
"client_secret_jwt_key": {
|
||||
"type": "object",
|
||||
"required": ["jwt_header"],
|
||||
"oneOf": [
|
||||
{"required": ["key"]},
|
||||
{"required": ["key_file"]},
|
||||
],
|
||||
"properties": {
|
||||
"key": {"type": "string"},
|
||||
"key_file": {"type": "string"},
|
||||
"jwt_header": {
|
||||
"type": "object",
|
||||
"required": ["alg"],
|
||||
"properties": {
|
||||
"alg": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": {"type": "string"},
|
||||
},
|
||||
"jwt_payload": {
|
||||
"type": "object",
|
||||
"additionalProperties": {"type": "string"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"client_auth_method": {
|
||||
"type": "string",
|
||||
# the following list is the same as the keys of
|
||||
|
@ -404,6 +447,20 @@ def _parse_oidc_config_dict(
|
|||
"idp_icon must be a valid MXC URI", config_path + ("idp_icon",)
|
||||
) from e
|
||||
|
||||
client_secret_jwt_key_config = oidc_config.get("client_secret_jwt_key")
|
||||
client_secret_jwt_key = None # type: Optional[OidcProviderClientSecretJwtKey]
|
||||
if client_secret_jwt_key_config is not None:
|
||||
keyfile = client_secret_jwt_key_config.get("key_file")
|
||||
if keyfile:
|
||||
key = read_file(keyfile, config_path + ("client_secret_jwt_key",))
|
||||
else:
|
||||
key = client_secret_jwt_key_config["key"]
|
||||
client_secret_jwt_key = OidcProviderClientSecretJwtKey(
|
||||
key=key,
|
||||
jwt_header=client_secret_jwt_key_config["jwt_header"],
|
||||
jwt_payload=client_secret_jwt_key_config.get("jwt_payload", {}),
|
||||
)
|
||||
|
||||
return OidcProviderConfig(
|
||||
idp_id=idp_id,
|
||||
idp_name=oidc_config.get("idp_name", "OIDC"),
|
||||
|
@ -412,7 +469,8 @@ def _parse_oidc_config_dict(
|
|||
discover=oidc_config.get("discover", True),
|
||||
issuer=oidc_config["issuer"],
|
||||
client_id=oidc_config["client_id"],
|
||||
client_secret=oidc_config["client_secret"],
|
||||
client_secret=oidc_config.get("client_secret"),
|
||||
client_secret_jwt_key=client_secret_jwt_key,
|
||||
client_auth_method=oidc_config.get("client_auth_method", "client_secret_basic"),
|
||||
scopes=oidc_config.get("scopes", ["openid"]),
|
||||
authorization_endpoint=oidc_config.get("authorization_endpoint"),
|
||||
|
@ -427,6 +485,18 @@ def _parse_oidc_config_dict(
|
|||
)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class OidcProviderClientSecretJwtKey:
|
||||
# a pem-encoded signing key
|
||||
key = attr.ib(type=str)
|
||||
|
||||
# properties to include in the JWT header
|
||||
jwt_header = attr.ib(type=Mapping[str, str])
|
||||
|
||||
# properties to include in the JWT payload.
|
||||
jwt_payload = attr.ib(type=Mapping[str, str])
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class OidcProviderConfig:
|
||||
# a unique identifier for this identity provider. Used in the 'user_external_ids'
|
||||
|
@ -452,8 +522,13 @@ class OidcProviderConfig:
|
|||
# oauth2 client id to use
|
||||
client_id = attr.ib(type=str)
|
||||
|
||||
# oauth2 client secret to use
|
||||
client_secret = attr.ib(type=str)
|
||||
# oauth2 client secret to use. if `None`, use client_secret_jwt_key to generate
|
||||
# a secret.
|
||||
client_secret = attr.ib(type=Optional[str])
|
||||
|
||||
# key to use to construct a JWT to use as a client secret. May be `None` if
|
||||
# `client_secret` is set.
|
||||
client_secret_jwt_key = attr.ib(type=Optional[OidcProviderClientSecretJwtKey])
|
||||
|
||||
# auth method to use when exchanging the token.
|
||||
# Valid values are 'client_secret_basic', 'client_secret_post' and
|
||||
|
|
|
@ -841,8 +841,7 @@ class ServerConfig(Config):
|
|||
# Whether to require authentication to retrieve profile data (avatars,
|
||||
# display names) of other users through the client API. Defaults to
|
||||
# 'false'. Note that profile data is also available via the federation
|
||||
# API, so this setting is of limited value if federation is enabled on
|
||||
# the server.
|
||||
# API, unless allow_profile_lookup_over_federation is set to false.
|
||||
#
|
||||
#require_auth_for_profile_requests: true
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ from typing import (
|
|||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
|
@ -90,16 +91,15 @@ pdu_process_time = Histogram(
|
|||
"Time taken to process an event",
|
||||
)
|
||||
|
||||
|
||||
last_pdu_age_metric = Gauge(
|
||||
"synapse_federation_last_received_pdu_age",
|
||||
"The age (in seconds) of the last PDU successfully received from the given domain",
|
||||
last_pdu_ts_metric = Gauge(
|
||||
"synapse_federation_last_received_pdu_time",
|
||||
"The timestamp of the last PDU which was successfully received from the given domain",
|
||||
labelnames=("server_name",),
|
||||
)
|
||||
|
||||
|
||||
class FederationServer(FederationBase):
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
|
||||
self.auth = hs.get_auth()
|
||||
|
@ -119,7 +119,7 @@ class FederationServer(FederationBase):
|
|||
|
||||
# We cache results for transaction with the same ID
|
||||
self._transaction_resp_cache = ResponseCache(
|
||||
hs, "fed_txn_handler", timeout_ms=30000
|
||||
hs.get_clock(), "fed_txn_handler", timeout_ms=30000
|
||||
) # type: ResponseCache[Tuple[str, str]]
|
||||
|
||||
self.transaction_actions = TransactionActions(self.store)
|
||||
|
@ -129,10 +129,10 @@ class FederationServer(FederationBase):
|
|||
# We cache responses to state queries, as they take a while and often
|
||||
# come in waves.
|
||||
self._state_resp_cache = ResponseCache(
|
||||
hs, "state_resp", timeout_ms=30000
|
||||
hs.get_clock(), "state_resp", timeout_ms=30000
|
||||
) # type: ResponseCache[Tuple[str, str]]
|
||||
self._state_ids_resp_cache = ResponseCache(
|
||||
hs, "state_ids_resp", timeout_ms=30000
|
||||
hs.get_clock(), "state_ids_resp", timeout_ms=30000
|
||||
) # type: ResponseCache[Tuple[str, str]]
|
||||
|
||||
self._federation_metrics_domains = (
|
||||
|
@ -361,7 +361,7 @@ class FederationServer(FederationBase):
|
|||
logger.error(
|
||||
"Failed to handle PDU %s",
|
||||
event_id,
|
||||
exc_info=(f.type, f.value, f.getTracebackObject()),
|
||||
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
|
||||
)
|
||||
|
||||
await concurrently_execute(
|
||||
|
@ -369,8 +369,7 @@ class FederationServer(FederationBase):
|
|||
)
|
||||
|
||||
if newest_pdu_ts and origin in self._federation_metrics_domains:
|
||||
newest_pdu_age = self._clock.time_msec() - newest_pdu_ts
|
||||
last_pdu_age_metric.labels(server_name=origin).set(newest_pdu_age / 1000)
|
||||
last_pdu_ts_metric.labels(server_name=origin).set(newest_pdu_ts / 1000)
|
||||
|
||||
return pdu_results
|
||||
|
||||
|
@ -455,7 +454,9 @@ class FederationServer(FederationBase):
|
|||
self, room_id: str, event_id: str
|
||||
) -> Dict[str, list]:
|
||||
if event_id:
|
||||
pdus = await self.handler.get_state_for_pdu(room_id, event_id)
|
||||
pdus = await self.handler.get_state_for_pdu(
|
||||
room_id, event_id
|
||||
) # type: Iterable[EventBase]
|
||||
else:
|
||||
pdus = (await self.state.get_current_state(room_id)).values()
|
||||
|
||||
|
|
|
@ -36,9 +36,9 @@ if TYPE_CHECKING:
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
last_pdu_age_metric = Gauge(
|
||||
"synapse_federation_last_sent_pdu_age",
|
||||
"The age (in seconds) of the last PDU successfully sent to the given domain",
|
||||
last_pdu_ts_metric = Gauge(
|
||||
"synapse_federation_last_sent_pdu_time",
|
||||
"The timestamp of the last PDU which was successfully sent to the given domain",
|
||||
labelnames=("server_name",),
|
||||
)
|
||||
|
||||
|
@ -187,9 +187,8 @@ class TransactionManager:
|
|||
|
||||
if success and pdus and destination in self._federation_metrics_domains:
|
||||
last_pdu = pdus[-1]
|
||||
last_pdu_age = self.clock.time_msec() - last_pdu.origin_server_ts
|
||||
last_pdu_age_metric.labels(server_name=destination).set(
|
||||
last_pdu_age / 1000
|
||||
last_pdu_ts_metric.labels(server_name=destination).set(
|
||||
last_pdu.origin_server_ts / 1000
|
||||
)
|
||||
|
||||
set_tag(tags.ERROR, not success)
|
||||
|
|
|
@ -73,7 +73,9 @@ class AcmeHandler:
|
|||
"Listening for ACME requests on %s:%i", host, self.hs.config.acme_port
|
||||
)
|
||||
try:
|
||||
self.reactor.listenTCP(self.hs.config.acme_port, srv, interface=host)
|
||||
self.reactor.listenTCP(
|
||||
self.hs.config.acme_port, srv, backlog=50, interface=host
|
||||
)
|
||||
except twisted.internet.error.CannotListenError as e:
|
||||
check_bind_error(e, host, bind_addresses)
|
||||
|
||||
|
|
|
@ -65,6 +65,7 @@ from synapse.storage.roommember import ProfileInfo
|
|||
from synapse.types import JsonDict, Requester, UserID
|
||||
from synapse.util import stringutils as stringutils
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry
|
||||
from synapse.util.msisdn import phone_number_to_msisdn
|
||||
from synapse.util.threepids import canonicalise_email
|
||||
|
||||
|
@ -170,6 +171,16 @@ class SsoLoginExtraAttributes:
|
|||
extra_attributes = attr.ib(type=JsonDict)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class LoginTokenAttributes:
|
||||
"""Data we store in a short-term login token"""
|
||||
|
||||
user_id = attr.ib(type=str)
|
||||
|
||||
# the SSO Identity Provider that the user authenticated with, to get this token
|
||||
auth_provider_id = attr.ib(type=str)
|
||||
|
||||
|
||||
class AuthHandler(BaseHandler):
|
||||
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
|
||||
|
||||
|
@ -1164,18 +1175,16 @@ class AuthHandler(BaseHandler):
|
|||
return None
|
||||
return user_id
|
||||
|
||||
async def validate_short_term_login_token_and_get_user_id(self, login_token: str):
|
||||
auth_api = self.hs.get_auth()
|
||||
user_id = None
|
||||
async def validate_short_term_login_token(
|
||||
self, login_token: str
|
||||
) -> LoginTokenAttributes:
|
||||
try:
|
||||
macaroon = pymacaroons.Macaroon.deserialize(login_token)
|
||||
user_id = auth_api.get_user_id_from_macaroon(macaroon)
|
||||
auth_api.validate_macaroon(macaroon, "login", user_id)
|
||||
res = self.macaroon_gen.verify_short_term_login_token(login_token)
|
||||
except Exception:
|
||||
raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN)
|
||||
|
||||
await self.auth.check_auth_blocking(user_id)
|
||||
return user_id
|
||||
await self.auth.check_auth_blocking(res.user_id)
|
||||
return res
|
||||
|
||||
async def delete_access_token(self, access_token: str):
|
||||
"""Invalidate a single access token
|
||||
|
@ -1397,6 +1406,7 @@ class AuthHandler(BaseHandler):
|
|||
async def complete_sso_login(
|
||||
self,
|
||||
registered_user_id: str,
|
||||
auth_provider_id: str,
|
||||
request: Request,
|
||||
client_redirect_url: str,
|
||||
extra_attributes: Optional[JsonDict] = None,
|
||||
|
@ -1406,6 +1416,9 @@ class AuthHandler(BaseHandler):
|
|||
|
||||
Args:
|
||||
registered_user_id: The registered user ID to complete SSO login for.
|
||||
auth_provider_id: The id of the SSO Identity provider that was used for
|
||||
login. This will be stored in the login token for future tracking in
|
||||
prometheus metrics.
|
||||
request: The request to complete.
|
||||
client_redirect_url: The URL to which to redirect the user at the end of the
|
||||
process.
|
||||
|
@ -1427,6 +1440,7 @@ class AuthHandler(BaseHandler):
|
|||
|
||||
self._complete_sso_login(
|
||||
registered_user_id,
|
||||
auth_provider_id,
|
||||
request,
|
||||
client_redirect_url,
|
||||
extra_attributes,
|
||||
|
@ -1437,6 +1451,7 @@ class AuthHandler(BaseHandler):
|
|||
def _complete_sso_login(
|
||||
self,
|
||||
registered_user_id: str,
|
||||
auth_provider_id: str,
|
||||
request: Request,
|
||||
client_redirect_url: str,
|
||||
extra_attributes: Optional[JsonDict] = None,
|
||||
|
@ -1463,7 +1478,7 @@ class AuthHandler(BaseHandler):
|
|||
|
||||
# Create a login token
|
||||
login_token = self.macaroon_gen.generate_short_term_login_token(
|
||||
registered_user_id
|
||||
registered_user_id, auth_provider_id=auth_provider_id
|
||||
)
|
||||
|
||||
# Append the login token to the original redirect URL (i.e. with its query
|
||||
|
@ -1569,15 +1584,48 @@ class MacaroonGenerator:
|
|||
return macaroon.serialize()
|
||||
|
||||
def generate_short_term_login_token(
|
||||
self, user_id: str, duration_in_ms: int = (2 * 60 * 1000)
|
||||
self,
|
||||
user_id: str,
|
||||
auth_provider_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
) -> str:
|
||||
macaroon = self._generate_base_macaroon(user_id)
|
||||
macaroon.add_first_party_caveat("type = login")
|
||||
now = self.hs.get_clock().time_msec()
|
||||
expiry = now + duration_in_ms
|
||||
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
||||
macaroon.add_first_party_caveat("auth_provider_id = %s" % (auth_provider_id,))
|
||||
return macaroon.serialize()
|
||||
|
||||
def verify_short_term_login_token(self, token: str) -> LoginTokenAttributes:
|
||||
"""Verify a short-term-login macaroon
|
||||
|
||||
Checks that the given token is a valid, unexpired short-term-login token
|
||||
minted by this server.
|
||||
|
||||
Args:
|
||||
token: the login token to verify
|
||||
|
||||
Returns:
|
||||
the user_id that this token is valid for
|
||||
|
||||
Raises:
|
||||
MacaroonVerificationFailedException if the verification failed
|
||||
"""
|
||||
macaroon = pymacaroons.Macaroon.deserialize(token)
|
||||
user_id = get_value_from_macaroon(macaroon, "user_id")
|
||||
auth_provider_id = get_value_from_macaroon(macaroon, "auth_provider_id")
|
||||
|
||||
v = pymacaroons.Verifier()
|
||||
v.satisfy_exact("gen = 1")
|
||||
v.satisfy_exact("type = login")
|
||||
v.satisfy_general(lambda c: c.startswith("user_id = "))
|
||||
v.satisfy_general(lambda c: c.startswith("auth_provider_id = "))
|
||||
satisfy_expiry(v, self.hs.get_clock().time_msec)
|
||||
v.verify(macaroon, self.hs.config.key.macaroon_secret_key)
|
||||
|
||||
return LoginTokenAttributes(user_id=user_id, auth_provider_id=auth_provider_id)
|
||||
|
||||
def generate_delete_pusher_token(self, user_id: str) -> str:
|
||||
macaroon = self._generate_base_macaroon(user_id)
|
||||
macaroon.add_first_party_caveat("type = delete_pusher")
|
||||
|
|
|
@ -48,7 +48,7 @@ class InitialSyncHandler(BaseHandler):
|
|||
self.clock = hs.get_clock()
|
||||
self.validator = EventValidator()
|
||||
self.snapshot_cache = ResponseCache(
|
||||
hs, "initial_sync_cache"
|
||||
hs.get_clock(), "initial_sync_cache"
|
||||
) # type: ResponseCache[Tuple[str, Optional[StreamToken], Optional[StreamToken], str, Optional[int], bool, bool]]
|
||||
self._event_serializer = hs.get_event_client_serializer()
|
||||
self.storage = hs.get_storage()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 Quentin Gliech
|
||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
@ -14,13 +15,13 @@
|
|||
# limitations under the License.
|
||||
import inspect
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar
|
||||
from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import attr
|
||||
import pymacaroons
|
||||
from authlib.common.security import generate_token
|
||||
from authlib.jose import JsonWebToken
|
||||
from authlib.jose import JsonWebToken, jwt
|
||||
from authlib.oauth2.auth import ClientAuth
|
||||
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
|
||||
from authlib.oidc.core import CodeIDToken, ImplicitIDToken, UserInfo
|
||||
|
@ -35,13 +36,17 @@ from typing_extensions import TypedDict
|
|||
from twisted.web.client import readBody
|
||||
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config.oidc_config import OidcProviderConfig
|
||||
from synapse.config.oidc_config import (
|
||||
OidcProviderClientSecretJwtKey,
|
||||
OidcProviderConfig,
|
||||
)
|
||||
from synapse.handlers.sso import MappingException, UserAttributes
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util import Clock, json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
@ -211,7 +216,7 @@ class OidcHandler:
|
|||
session_data = self._token_generator.verify_oidc_session_token(
|
||||
session, state
|
||||
)
|
||||
except (MacaroonDeserializationException, ValueError) as e:
|
||||
except (MacaroonDeserializationException, KeyError) as e:
|
||||
logger.exception("Invalid session for OIDC callback")
|
||||
self._sso_handler.render_error(request, "invalid_session", str(e))
|
||||
return
|
||||
|
@ -275,9 +280,21 @@ class OidcProvider:
|
|||
|
||||
self._scopes = provider.scopes
|
||||
self._user_profile_method = provider.user_profile_method
|
||||
|
||||
client_secret = None # type: Union[None, str, JwtClientSecret]
|
||||
if provider.client_secret:
|
||||
client_secret = provider.client_secret
|
||||
elif provider.client_secret_jwt_key:
|
||||
client_secret = JwtClientSecret(
|
||||
provider.client_secret_jwt_key,
|
||||
provider.client_id,
|
||||
provider.issuer,
|
||||
hs.get_clock(),
|
||||
)
|
||||
|
||||
self._client_auth = ClientAuth(
|
||||
provider.client_id,
|
||||
provider.client_secret,
|
||||
client_secret,
|
||||
provider.client_auth_method,
|
||||
) # type: ClientAuth
|
||||
self._client_auth_method = provider.client_auth_method
|
||||
|
@ -745,7 +762,7 @@ class OidcProvider:
|
|||
idp_id=self.idp_id,
|
||||
nonce=nonce,
|
||||
client_redirect_url=client_redirect_url.decode(),
|
||||
ui_auth_session_id=ui_auth_session_id,
|
||||
ui_auth_session_id=ui_auth_session_id or "",
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -976,6 +993,81 @@ class OidcProvider:
|
|||
return str(remote_user_id)
|
||||
|
||||
|
||||
# number of seconds a newly-generated client secret should be valid for
|
||||
CLIENT_SECRET_VALIDITY_SECONDS = 3600
|
||||
|
||||
# minimum remaining validity on a client secret before we should generate a new one
|
||||
CLIENT_SECRET_MIN_VALIDITY_SECONDS = 600
|
||||
|
||||
|
||||
class JwtClientSecret:
|
||||
"""A class which generates a new client secret on demand, based on a JWK
|
||||
|
||||
This implementation is designed to comply with the requirements for Apple Sign in:
|
||||
https://developer.apple.com/documentation/sign_in_with_apple/generate_and_validate_tokens#3262048
|
||||
|
||||
It looks like those requirements are based on https://tools.ietf.org/html/rfc7523,
|
||||
but it's worth noting that we still put the generated secret in the "client_secret"
|
||||
field (or rather, whereever client_auth_method puts it) rather than in a
|
||||
client_assertion field in the body as that RFC seems to require.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
key: OidcProviderClientSecretJwtKey,
|
||||
oauth_client_id: str,
|
||||
oauth_issuer: str,
|
||||
clock: Clock,
|
||||
):
|
||||
self._key = key
|
||||
self._oauth_client_id = oauth_client_id
|
||||
self._oauth_issuer = oauth_issuer
|
||||
self._clock = clock
|
||||
self._cached_secret = b""
|
||||
self._cached_secret_replacement_time = 0
|
||||
|
||||
def __str__(self):
|
||||
# if client_auth_method is client_secret_basic, then ClientAuth.prepare calls
|
||||
# encode_client_secret_basic, which calls "{}".format(secret), which ends up
|
||||
# here.
|
||||
return self._get_secret().decode("ascii")
|
||||
|
||||
def __bytes__(self):
|
||||
# if client_auth_method is client_secret_post, then ClientAuth.prepare calls
|
||||
# encode_client_secret_post, which ends up here.
|
||||
return self._get_secret()
|
||||
|
||||
def _get_secret(self) -> bytes:
|
||||
now = self._clock.time()
|
||||
|
||||
# if we have enough validity on our existing secret, use it
|
||||
if now < self._cached_secret_replacement_time:
|
||||
return self._cached_secret
|
||||
|
||||
issued_at = int(now)
|
||||
expires_at = issued_at + CLIENT_SECRET_VALIDITY_SECONDS
|
||||
|
||||
# we copy the configured header because jwt.encode modifies it.
|
||||
header = dict(self._key.jwt_header)
|
||||
|
||||
# see https://tools.ietf.org/html/rfc7523#section-3
|
||||
payload = {
|
||||
"sub": self._oauth_client_id,
|
||||
"aud": self._oauth_issuer,
|
||||
"iat": issued_at,
|
||||
"exp": expires_at,
|
||||
**self._key.jwt_payload,
|
||||
}
|
||||
logger.info(
|
||||
"Generating new JWT for %s: %s %s", self._oauth_issuer, header, payload
|
||||
)
|
||||
self._cached_secret = jwt.encode(header, payload, self._key.key)
|
||||
self._cached_secret_replacement_time = (
|
||||
expires_at - CLIENT_SECRET_MIN_VALIDITY_SECONDS
|
||||
)
|
||||
return self._cached_secret
|
||||
|
||||
|
||||
class OidcSessionTokenGenerator:
|
||||
"""Methods for generating and checking OIDC Session cookies."""
|
||||
|
||||
|
@ -1020,10 +1112,9 @@ class OidcSessionTokenGenerator:
|
|||
macaroon.add_first_party_caveat(
|
||||
"client_redirect_url = %s" % (session_data.client_redirect_url,)
|
||||
)
|
||||
if session_data.ui_auth_session_id:
|
||||
macaroon.add_first_party_caveat(
|
||||
"ui_auth_session_id = %s" % (session_data.ui_auth_session_id,)
|
||||
)
|
||||
macaroon.add_first_party_caveat(
|
||||
"ui_auth_session_id = %s" % (session_data.ui_auth_session_id,)
|
||||
)
|
||||
now = self._clock.time_msec()
|
||||
expiry = now + duration_in_ms
|
||||
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
||||
|
@ -1046,7 +1137,7 @@ class OidcSessionTokenGenerator:
|
|||
The data extracted from the session cookie
|
||||
|
||||
Raises:
|
||||
ValueError if an expected caveat is missing from the macaroon.
|
||||
KeyError if an expected caveat is missing from the macaroon.
|
||||
"""
|
||||
macaroon = pymacaroons.Macaroon.deserialize(session)
|
||||
|
||||
|
@ -1057,26 +1148,16 @@ class OidcSessionTokenGenerator:
|
|||
v.satisfy_general(lambda c: c.startswith("nonce = "))
|
||||
v.satisfy_general(lambda c: c.startswith("idp_id = "))
|
||||
v.satisfy_general(lambda c: c.startswith("client_redirect_url = "))
|
||||
# Sometimes there's a UI auth session ID, it seems to be OK to attempt
|
||||
# to always satisfy this.
|
||||
v.satisfy_general(lambda c: c.startswith("ui_auth_session_id = "))
|
||||
v.satisfy_general(self._verify_expiry)
|
||||
satisfy_expiry(v, self._clock.time_msec)
|
||||
|
||||
v.verify(macaroon, self._macaroon_secret_key)
|
||||
|
||||
# Extract the session data from the token.
|
||||
nonce = self._get_value_from_macaroon(macaroon, "nonce")
|
||||
idp_id = self._get_value_from_macaroon(macaroon, "idp_id")
|
||||
client_redirect_url = self._get_value_from_macaroon(
|
||||
macaroon, "client_redirect_url"
|
||||
)
|
||||
try:
|
||||
ui_auth_session_id = self._get_value_from_macaroon(
|
||||
macaroon, "ui_auth_session_id"
|
||||
) # type: Optional[str]
|
||||
except ValueError:
|
||||
ui_auth_session_id = None
|
||||
|
||||
nonce = get_value_from_macaroon(macaroon, "nonce")
|
||||
idp_id = get_value_from_macaroon(macaroon, "idp_id")
|
||||
client_redirect_url = get_value_from_macaroon(macaroon, "client_redirect_url")
|
||||
ui_auth_session_id = get_value_from_macaroon(macaroon, "ui_auth_session_id")
|
||||
return OidcSessionData(
|
||||
nonce=nonce,
|
||||
idp_id=idp_id,
|
||||
|
@ -1084,33 +1165,6 @@ class OidcSessionTokenGenerator:
|
|||
ui_auth_session_id=ui_auth_session_id,
|
||||
)
|
||||
|
||||
def _get_value_from_macaroon(self, macaroon: pymacaroons.Macaroon, key: str) -> str:
|
||||
"""Extracts a caveat value from a macaroon token.
|
||||
|
||||
Args:
|
||||
macaroon: the token
|
||||
key: the key of the caveat to extract
|
||||
|
||||
Returns:
|
||||
The extracted value
|
||||
|
||||
Raises:
|
||||
ValueError: if the caveat was not in the macaroon
|
||||
"""
|
||||
prefix = key + " = "
|
||||
for caveat in macaroon.caveats:
|
||||
if caveat.caveat_id.startswith(prefix):
|
||||
return caveat.caveat_id[len(prefix) :]
|
||||
raise ValueError("No %s caveat in macaroon" % (key,))
|
||||
|
||||
def _verify_expiry(self, caveat: str) -> bool:
|
||||
prefix = "time < "
|
||||
if not caveat.startswith(prefix):
|
||||
return False
|
||||
expiry = int(caveat[len(prefix) :])
|
||||
now = self._clock.time_msec()
|
||||
return now < expiry
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True)
|
||||
class OidcSessionData:
|
||||
|
@ -1125,8 +1179,8 @@ class OidcSessionData:
|
|||
# The URL the client gave when it initiated the flow. ("" if this is a UI Auth)
|
||||
client_redirect_url = attr.ib(type=str)
|
||||
|
||||
# The session ID of the ongoing UI Auth (None if this is a login)
|
||||
ui_auth_session_id = attr.ib(type=Optional[str], default=None)
|
||||
# The session ID of the ongoing UI Auth ("" if this is a login)
|
||||
ui_auth_session_id = attr.ib(type=str)
|
||||
|
||||
|
||||
UserAttributeDict = TypedDict(
|
||||
|
|
|
@ -285,7 +285,7 @@ class PaginationHandler:
|
|||
except Exception:
|
||||
f = Failure()
|
||||
logger.error(
|
||||
"[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject())
|
||||
"[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) # type: ignore
|
||||
)
|
||||
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED
|
||||
finally:
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
import logging
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
from synapse import types
|
||||
from synapse.api.constants import MAX_USERID_LENGTH, EventTypes, JoinRules, LoginType
|
||||
from synapse.api.errors import AuthError, Codes, ConsentNotGivenError, SynapseError
|
||||
|
@ -41,6 +43,19 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
registration_counter = Counter(
|
||||
"synapse_user_registrations_total",
|
||||
"Number of new users registered (since restart)",
|
||||
["guest", "shadow_banned", "auth_provider"],
|
||||
)
|
||||
|
||||
login_counter = Counter(
|
||||
"synapse_user_logins_total",
|
||||
"Number of user logins (since restart)",
|
||||
["guest", "auth_provider"],
|
||||
)
|
||||
|
||||
|
||||
class RegistrationHandler(BaseHandler):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
|
@ -156,6 +171,7 @@ class RegistrationHandler(BaseHandler):
|
|||
bind_emails: Iterable[str] = [],
|
||||
by_admin: bool = False,
|
||||
user_agent_ips: Optional[List[Tuple[str, str]]] = None,
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Registers a new client on the server.
|
||||
|
||||
|
@ -181,8 +197,10 @@ class RegistrationHandler(BaseHandler):
|
|||
admin api, otherwise False.
|
||||
user_agent_ips: Tuples of IP addresses and user-agents used
|
||||
during the registration process.
|
||||
auth_provider_id: The SSO IdP the user used, if any (just used for the
|
||||
prometheus metrics).
|
||||
Returns:
|
||||
The registere user_id.
|
||||
The registered user_id.
|
||||
Raises:
|
||||
SynapseError if there was a problem registering.
|
||||
"""
|
||||
|
@ -280,6 +298,12 @@ class RegistrationHandler(BaseHandler):
|
|||
# if user id is taken, just generate another
|
||||
fail_count += 1
|
||||
|
||||
registration_counter.labels(
|
||||
guest=make_guest,
|
||||
shadow_banned=shadow_banned,
|
||||
auth_provider=(auth_provider_id or ""),
|
||||
).inc()
|
||||
|
||||
if not self.hs.config.user_consent_at_registration:
|
||||
if not self.hs.config.auto_join_rooms_for_guests and make_guest:
|
||||
logger.info(
|
||||
|
@ -638,6 +662,7 @@ class RegistrationHandler(BaseHandler):
|
|||
initial_display_name: Optional[str],
|
||||
is_guest: bool = False,
|
||||
is_appservice_ghost: bool = False,
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Tuple[str, str]:
|
||||
"""Register a device for a user and generate an access token.
|
||||
|
||||
|
@ -648,7 +673,8 @@ class RegistrationHandler(BaseHandler):
|
|||
device_id: The device ID to check, or None to generate a new one.
|
||||
initial_display_name: An optional display name for the device.
|
||||
is_guest: Whether this is a guest account
|
||||
|
||||
auth_provider_id: The SSO IdP the user used, if any (just used for the
|
||||
prometheus metrics).
|
||||
Returns:
|
||||
Tuple of device ID and access token
|
||||
"""
|
||||
|
@ -687,6 +713,11 @@ class RegistrationHandler(BaseHandler):
|
|||
is_appservice_ghost=is_appservice_ghost,
|
||||
)
|
||||
|
||||
login_counter.labels(
|
||||
guest=is_guest,
|
||||
auth_provider=(auth_provider_id or ""),
|
||||
).inc()
|
||||
|
||||
return (registered_device_id, access_token)
|
||||
|
||||
async def post_registration_actions(
|
||||
|
|
|
@ -121,7 +121,7 @@ class RoomCreationHandler(BaseHandler):
|
|||
# succession, only process the first attempt and return its result to
|
||||
# subsequent requests
|
||||
self._upgrade_response_cache = ResponseCache(
|
||||
hs, "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
|
||||
hs.get_clock(), "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
|
||||
) # type: ResponseCache[Tuple[str, str]]
|
||||
self._server_notices_mxid = hs.config.server_notices_mxid
|
||||
|
||||
|
|
|
@ -45,10 +45,10 @@ class RoomListHandler(BaseHandler):
|
|||
self.enable_room_list_search = hs.config.enable_room_list_search
|
||||
|
||||
self.response_cache = ResponseCache(
|
||||
hs, "room_list"
|
||||
hs.get_clock(), "room_list"
|
||||
) # type: ResponseCache[Tuple[Optional[int], Optional[str], ThirdPartyInstanceID]]
|
||||
self.remote_response_cache = ResponseCache(
|
||||
hs, "remote_room_list", timeout_ms=30 * 1000
|
||||
hs.get_clock(), "remote_room_list", timeout_ms=30 * 1000
|
||||
) # type: ResponseCache[Tuple[str, Optional[int], Optional[str], bool, Optional[str]]]
|
||||
|
||||
async def get_local_public_room_list(
|
||||
|
|
|
@ -456,6 +456,7 @@ class SsoHandler:
|
|||
|
||||
await self._auth_handler.complete_sso_login(
|
||||
user_id,
|
||||
auth_provider_id,
|
||||
request,
|
||||
client_redirect_url,
|
||||
extra_login_attributes,
|
||||
|
@ -605,6 +606,7 @@ class SsoHandler:
|
|||
default_display_name=attributes.display_name,
|
||||
bind_emails=attributes.emails,
|
||||
user_agent_ips=[(user_agent, ip_address)],
|
||||
auth_provider_id=auth_provider_id,
|
||||
)
|
||||
|
||||
await self._store.record_user_external_id(
|
||||
|
@ -886,6 +888,7 @@ class SsoHandler:
|
|||
|
||||
await self._auth_handler.complete_sso_login(
|
||||
user_id,
|
||||
session.auth_provider_id,
|
||||
request,
|
||||
session.client_redirect_url,
|
||||
session.extra_login_attributes,
|
||||
|
|
|
@ -245,7 +245,7 @@ class SyncHandler:
|
|||
self.event_sources = hs.get_event_sources()
|
||||
self.clock = hs.get_clock()
|
||||
self.response_cache = ResponseCache(
|
||||
hs, "sync", timeout_ms=SYNC_RESPONSE_CACHE_MS
|
||||
hs.get_clock(), "sync", timeout_ms=SYNC_RESPONSE_CACHE_MS
|
||||
) # type: ResponseCache[Tuple[Any, ...]]
|
||||
self.state = hs.get_state_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
|
|
@ -63,6 +63,7 @@ from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_u
|
|||
from synapse.http.proxyagent import ProxyAgent
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.logging.opentracing import set_tag, start_active_span, tags
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.async_helpers import timeout_deferred
|
||||
|
||||
|
@ -199,7 +200,7 @@ class _IPBlacklistingResolver:
|
|||
return r
|
||||
|
||||
|
||||
@implementer(IReactorPluggableNameResolver)
|
||||
@implementer(ISynapseReactor)
|
||||
class BlacklistingReactorWrapper:
|
||||
"""
|
||||
A Reactor wrapper which will prevent DNS resolution to blacklisted IP
|
||||
|
@ -324,7 +325,7 @@ class SimpleHttpClient:
|
|||
# filters out blacklisted IP addresses, to prevent DNS rebinding.
|
||||
self.reactor = BlacklistingReactorWrapper(
|
||||
hs.get_reactor(), self._ip_whitelist, self._ip_blacklist
|
||||
)
|
||||
) # type: ISynapseReactor
|
||||
else:
|
||||
self.reactor = hs.get_reactor()
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ from synapse.http.client import BlacklistingAgentWrapper
|
|||
from synapse.http.federation.srv_resolver import Server, SrvResolver
|
||||
from synapse.http.federation.well_known_resolver import WellKnownResolver
|
||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import Clock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -68,7 +69,7 @@ class MatrixFederationAgent:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
reactor: IReactorCore,
|
||||
reactor: ISynapseReactor,
|
||||
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
|
||||
user_agent: bytes,
|
||||
ip_blacklist: IPSet,
|
||||
|
|
|
@ -322,7 +322,8 @@ def _cache_period_from_headers(
|
|||
|
||||
def _parse_cache_control(headers: Headers) -> Dict[bytes, Optional[bytes]]:
|
||||
cache_controls = {}
|
||||
for hdr in headers.getRawHeaders(b"cache-control", []):
|
||||
cache_control_headers = headers.getRawHeaders(b"cache-control") or []
|
||||
for hdr in cache_control_headers:
|
||||
for directive in hdr.split(b","):
|
||||
splits = [x.strip() for x in directive.split(b"=", 1)]
|
||||
k = splits[0].lower()
|
||||
|
|
|
@ -59,7 +59,7 @@ from synapse.logging.opentracing import (
|
|||
start_active_span,
|
||||
tags,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
from synapse.types import ISynapseReactor, JsonDict
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.async_helpers import timeout_deferred
|
||||
from synapse.util.metrics import Measure
|
||||
|
@ -237,14 +237,14 @@ class MatrixFederationHttpClient:
|
|||
# addresses, to prevent DNS rebinding.
|
||||
self.reactor = BlacklistingReactorWrapper(
|
||||
hs.get_reactor(), None, hs.config.federation_ip_range_blacklist
|
||||
)
|
||||
) # type: ISynapseReactor
|
||||
|
||||
user_agent = hs.version_string
|
||||
if hs.config.user_agent_suffix:
|
||||
user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix)
|
||||
user_agent = user_agent.encode("ascii")
|
||||
|
||||
self.agent = MatrixFederationAgent(
|
||||
federation_agent = MatrixFederationAgent(
|
||||
self.reactor,
|
||||
tls_client_options_factory,
|
||||
user_agent,
|
||||
|
@ -254,7 +254,7 @@ class MatrixFederationHttpClient:
|
|||
# Use a BlacklistingAgentWrapper to prevent circumventing the IP
|
||||
# blacklist via IP literals in server names
|
||||
self.agent = BlacklistingAgentWrapper(
|
||||
self.agent,
|
||||
federation_agent,
|
||||
ip_blacklist=hs.config.federation_ip_range_blacklist,
|
||||
)
|
||||
|
||||
|
@ -534,9 +534,10 @@ class MatrixFederationHttpClient:
|
|||
response.code, response_phrase, body
|
||||
)
|
||||
|
||||
# Retry if the error is a 429 (Too Many Requests),
|
||||
# otherwise just raise a standard HttpResponseException
|
||||
if response.code == 429:
|
||||
# Retry if the error is a 5xx or a 429 (Too Many
|
||||
# Requests), otherwise just raise a standard
|
||||
# `HttpResponseException`
|
||||
if 500 <= response.code < 600 or response.code == 429:
|
||||
raise RequestSendFailed(exc, can_retry=True) from exc
|
||||
else:
|
||||
raise exc
|
||||
|
|
|
@ -669,7 +669,7 @@ def preserve_fn(f):
|
|||
return g
|
||||
|
||||
|
||||
def run_in_background(f, *args, **kwargs):
|
||||
def run_in_background(f, *args, **kwargs) -> defer.Deferred:
|
||||
"""Calls a function, ensuring that the current context is restored after
|
||||
return from the function, and that the sentinel context is set once the
|
||||
deferred returned by the function completes.
|
||||
|
@ -697,8 +697,10 @@ def run_in_background(f, *args, **kwargs):
|
|||
if isinstance(res, types.CoroutineType):
|
||||
res = defer.ensureDeferred(res)
|
||||
|
||||
# At this point we should have a Deferred, if not then f was a synchronous
|
||||
# function, wrap it in a Deferred for consistency.
|
||||
if not isinstance(res, defer.Deferred):
|
||||
return res
|
||||
return defer.succeed(res)
|
||||
|
||||
if res.called and not res.paused:
|
||||
# The function should have maintained the logcontext, so we can
|
||||
|
|
|
@ -203,11 +203,26 @@ class ModuleApi:
|
|||
)
|
||||
|
||||
def generate_short_term_login_token(
|
||||
self, user_id: str, duration_in_ms: int = (2 * 60 * 1000)
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: str = "",
|
||||
) -> str:
|
||||
"""Generate a login token suitable for m.login.token authentication"""
|
||||
"""Generate a login token suitable for m.login.token authentication
|
||||
|
||||
Args:
|
||||
user_id: gives the ID of the user that the token is for
|
||||
|
||||
duration_in_ms: the time that the token will be valid for
|
||||
|
||||
auth_provider_id: the ID of the SSO IdP that the user used to authenticate
|
||||
to get this token, if any. This is encoded in the token so that
|
||||
/login can report stats on number of successful logins by IdP.
|
||||
"""
|
||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
||||
user_id, duration_in_ms
|
||||
user_id,
|
||||
auth_provider_id,
|
||||
duration_in_ms,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -276,6 +291,7 @@ class ModuleApi:
|
|||
"""
|
||||
self._auth_handler._complete_sso_login(
|
||||
registered_user_id,
|
||||
"<unknown>",
|
||||
request,
|
||||
client_redirect_url,
|
||||
)
|
||||
|
@ -286,6 +302,7 @@ class ModuleApi:
|
|||
request: SynapseRequest,
|
||||
client_redirect_url: str,
|
||||
new_user: bool = False,
|
||||
auth_provider_id: str = "<unknown>",
|
||||
):
|
||||
"""Complete a SSO login by redirecting the user to a page to confirm whether they
|
||||
want their access token sent to `client_redirect_url`, or redirect them to that
|
||||
|
@ -299,9 +316,15 @@ class ModuleApi:
|
|||
redirect them directly if whitelisted).
|
||||
new_user: set to true to use wording for the consent appropriate to a user
|
||||
who has just registered.
|
||||
auth_provider_id: the ID of the SSO IdP which was used to log in. This
|
||||
is used to track counts of sucessful logins by IdP.
|
||||
"""
|
||||
await self._auth_handler.complete_sso_login(
|
||||
registered_user_id, request, client_redirect_url, new_user=new_user
|
||||
registered_user_id,
|
||||
auth_provider_id,
|
||||
request,
|
||||
client_redirect_url,
|
||||
new_user=new_user,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -18,7 +18,7 @@ import logging
|
|||
import re
|
||||
import urllib
|
||||
from inspect import signature
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import TYPE_CHECKING, Dict, List, Tuple
|
||||
|
||||
from prometheus_client import Counter, Gauge
|
||||
|
||||
|
@ -28,6 +28,9 @@ from synapse.logging.opentracing import inject_active_span_byte_dict, trace
|
|||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_pending_outgoing_requests = Gauge(
|
||||
|
@ -88,10 +91,10 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
|||
CACHE = True
|
||||
RETRY_ON_TIMEOUT = True
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
if self.CACHE:
|
||||
self.response_cache = ResponseCache(
|
||||
hs, "repl." + self.NAME, timeout_ms=30 * 60 * 1000
|
||||
hs.get_clock(), "repl." + self.NAME, timeout_ms=30 * 60 * 1000
|
||||
) # type: ResponseCache[str]
|
||||
|
||||
# We reserve `instance_name` as a parameter to sending requests, so we
|
||||
|
|
|
@ -328,6 +328,6 @@ def lazyConnection(
|
|||
factory.continueTrying = reconnect
|
||||
|
||||
reactor = hs.get_reactor()
|
||||
reactor.connectTCP(host, port, factory, 30)
|
||||
reactor.connectTCP(host, port, factory, timeout=30, bindAddress=None)
|
||||
|
||||
return factory.handler
|
||||
|
|
|
@ -12,13 +12,20 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.admin import assert_requester_is_admin
|
||||
from synapse.rest.admin._base import admin_patterns
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class PurgeRoomServlet(RestServlet):
|
||||
|
@ -36,16 +43,12 @@ class PurgeRoomServlet(RestServlet):
|
|||
|
||||
PATTERNS = admin_patterns("/purge_room$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.pagination_handler = hs.get_pagination_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
body = parse_json_object_from_request(request)
|
||||
|
|
|
@ -12,17 +12,24 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.admin import assert_requester_is_admin
|
||||
from synapse.rest.admin._base import admin_patterns
|
||||
from synapse.rest.client.transactions import HttpTransactionCache
|
||||
from synapse.types import UserID
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class SendServerNoticeServlet(RestServlet):
|
||||
|
@ -44,17 +51,13 @@ class SendServerNoticeServlet(RestServlet):
|
|||
}
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.txns = HttpTransactionCache(hs)
|
||||
self.snm = hs.get_server_notices_manager()
|
||||
|
||||
def register(self, json_resource):
|
||||
def register(self, json_resource: HttpServer):
|
||||
PATTERN = "/send_server_notice"
|
||||
json_resource.register_paths(
|
||||
"POST", admin_patterns(PATTERN + "$"), self.on_POST, self.__class__.__name__
|
||||
|
@ -66,7 +69,9 @@ class SendServerNoticeServlet(RestServlet):
|
|||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
async def on_POST(self, request, txn_id=None):
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, txn_id: Optional[str] = None
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
body = parse_json_object_from_request(request)
|
||||
assert_params_in_dict(body, ("user_id", "content"))
|
||||
|
@ -90,7 +95,7 @@ class SendServerNoticeServlet(RestServlet):
|
|||
|
||||
return 200, {"event_id": event.event_id}
|
||||
|
||||
def on_PUT(self, request, txn_id):
|
||||
def on_PUT(self, request: SynapseRequest, txn_id: str) -> Tuple[int, JsonDict]:
|
||||
return self.txns.fetch_or_execute_request(
|
||||
request, self.on_POST, request, txn_id
|
||||
)
|
||||
|
|
|
@ -219,6 +219,7 @@ class LoginRestServlet(RestServlet):
|
|||
callback: Optional[Callable[[Dict[str, str]], Awaitable[None]]] = None,
|
||||
create_non_existent_users: bool = False,
|
||||
ratelimit: bool = True,
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Called when we've successfully authed the user and now need to
|
||||
actually login them in (e.g. create devices). This gets called on
|
||||
|
@ -234,6 +235,8 @@ class LoginRestServlet(RestServlet):
|
|||
create_non_existent_users: Whether to create the user if they don't
|
||||
exist. Defaults to False.
|
||||
ratelimit: Whether to ratelimit the login request.
|
||||
auth_provider_id: The SSO IdP the user used, if any (just used for the
|
||||
prometheus metrics).
|
||||
|
||||
Returns:
|
||||
result: Dictionary of account information after successful login.
|
||||
|
@ -256,7 +259,7 @@ class LoginRestServlet(RestServlet):
|
|||
device_id = login_submission.get("device_id")
|
||||
initial_display_name = login_submission.get("initial_device_display_name")
|
||||
device_id, access_token = await self.registration_handler.register_device(
|
||||
user_id, device_id, initial_display_name
|
||||
user_id, device_id, initial_display_name, auth_provider_id=auth_provider_id
|
||||
)
|
||||
|
||||
result = {
|
||||
|
@ -283,12 +286,13 @@ class LoginRestServlet(RestServlet):
|
|||
"""
|
||||
token = login_submission["token"]
|
||||
auth_handler = self.auth_handler
|
||||
user_id = await auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
token
|
||||
)
|
||||
res = await auth_handler.validate_short_term_login_token(token)
|
||||
|
||||
return await self._complete_login(
|
||||
user_id, login_submission, self.auth_handler._sso_login_callback
|
||||
res.user_id,
|
||||
login_submission,
|
||||
self.auth_handler._sso_login_callback,
|
||||
auth_provider_id=res.auth_provider_id,
|
||||
)
|
||||
|
||||
async def _do_jwt_login(self, login_submission: JsonDict) -> Dict[str, str]:
|
||||
|
|
|
@ -96,9 +96,14 @@ class Thumbnailer:
|
|||
def _resize(self, width: int, height: int) -> Image:
|
||||
# 1-bit or 8-bit color palette images need converting to RGB
|
||||
# otherwise they will be scaled using nearest neighbour which
|
||||
# looks awful
|
||||
if self.image.mode in ["1", "P"]:
|
||||
self.image = self.image.convert("RGB")
|
||||
# looks awful.
|
||||
#
|
||||
# If the image has transparency, use RGBA instead.
|
||||
if self.image.mode in ["1", "L", "P"]:
|
||||
mode = "RGB"
|
||||
if self.image.info.get("transparency", None) is not None:
|
||||
mode = "RGBA"
|
||||
self.image = self.image.convert(mode)
|
||||
return self.image.resize((width, height), Image.ANTIALIAS)
|
||||
|
||||
def scale(self, width: int, height: int, output_type: str) -> BytesIO:
|
||||
|
|
|
@ -36,7 +36,6 @@ from typing import (
|
|||
cast,
|
||||
)
|
||||
|
||||
import twisted.internet.base
|
||||
import twisted.internet.tcp
|
||||
from twisted.internet import defer
|
||||
from twisted.mail.smtp import sendmail
|
||||
|
@ -130,7 +129,7 @@ from synapse.server_notices.worker_server_notices_sender import (
|
|||
from synapse.state import StateHandler, StateResolutionHandler
|
||||
from synapse.storage import Databases, DataStore, Storage
|
||||
from synapse.streams.events import EventSources
|
||||
from synapse.types import DomainSpecificString
|
||||
from synapse.types import DomainSpecificString, ISynapseReactor
|
||||
from synapse.util import Clock
|
||||
from synapse.util.distributor import Distributor
|
||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||
|
@ -291,7 +290,7 @@ class HomeServer(metaclass=abc.ABCMeta):
|
|||
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
|
||||
getattr(self, "get_" + i + "_handler")()
|
||||
|
||||
def get_reactor(self) -> twisted.internet.base.ReactorBase:
|
||||
def get_reactor(self) -> ISynapseReactor:
|
||||
"""
|
||||
Fetch the Twisted reactor in use by this HomeServer.
|
||||
"""
|
||||
|
|
|
@ -35,6 +35,14 @@ from typing import (
|
|||
import attr
|
||||
from signedjson.key import decode_verify_key_bytes
|
||||
from unpaddedbase64 import decode_base64
|
||||
from zope.interface import Interface
|
||||
|
||||
from twisted.internet.interfaces import (
|
||||
IReactorCore,
|
||||
IReactorPluggableNameResolver,
|
||||
IReactorTCP,
|
||||
IReactorTime,
|
||||
)
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.util.stringutils import parse_and_validate_server_name
|
||||
|
@ -67,6 +75,14 @@ MutableStateMap = MutableMapping[StateKey, T]
|
|||
JsonDict = Dict[str, Any]
|
||||
|
||||
|
||||
# Note that this seems to require inheriting *directly* from Interface in order
|
||||
# for mypy-zope to realize it is an interface.
|
||||
class ISynapseReactor(
|
||||
IReactorTCP, IReactorPluggableNameResolver, IReactorTime, IReactorCore, Interface
|
||||
):
|
||||
"""The interfaces necessary for Synapse to function."""
|
||||
|
||||
|
||||
class Requester(
|
||||
namedtuple(
|
||||
"Requester",
|
||||
|
|
|
@ -76,11 +76,16 @@ class ObservableDeferred:
|
|||
def callback(r):
|
||||
object.__setattr__(self, "_result", (True, r))
|
||||
while self._observers:
|
||||
observer = self._observers.pop()
|
||||
try:
|
||||
# TODO: Handle errors here.
|
||||
self._observers.pop().callback(r)
|
||||
except Exception:
|
||||
pass
|
||||
observer.callback(r)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"%r threw an exception on .callback(%r), ignoring...",
|
||||
observer,
|
||||
r,
|
||||
exc_info=e,
|
||||
)
|
||||
return r
|
||||
|
||||
def errback(f):
|
||||
|
@ -90,11 +95,16 @@ class ObservableDeferred:
|
|||
# traces when we `await` on one of the observer deferreds.
|
||||
f.value.__failure__ = f
|
||||
|
||||
observer = self._observers.pop()
|
||||
try:
|
||||
# TODO: Handle errors here.
|
||||
self._observers.pop().errback(f)
|
||||
except Exception:
|
||||
pass
|
||||
observer.errback(f)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"%r threw an exception on .errback(%r), ignoring...",
|
||||
observer,
|
||||
f,
|
||||
exc_info=e,
|
||||
)
|
||||
|
||||
if consumeErrors:
|
||||
return None
|
||||
|
|
|
@ -13,17 +13,15 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, TypeVar
|
||||
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||
from synapse.util import Clock
|
||||
from synapse.util.async_helpers import ObservableDeferred
|
||||
from synapse.util.caches import register_cache
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.app.homeserver import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
@ -37,11 +35,11 @@ class ResponseCache(Generic[T]):
|
|||
used rather than trying to compute a new response.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer", name: str, timeout_ms: float = 0):
|
||||
def __init__(self, clock: Clock, name: str, timeout_ms: float = 0):
|
||||
# Requests that haven't finished yet.
|
||||
self.pending_result_cache = {} # type: Dict[T, ObservableDeferred]
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
self.clock = clock
|
||||
self.timeout_sec = timeout_ms / 1000.0
|
||||
|
||||
self._name = name
|
||||
|
|
89
synapse/util/macaroons.py
Normal file
89
synapse/util/macaroons.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 Quentin Gliech
|
||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Utilities for manipulating macaroons"""
|
||||
|
||||
from typing import Callable, Optional
|
||||
|
||||
import pymacaroons
|
||||
from pymacaroons.exceptions import MacaroonVerificationFailedException
|
||||
|
||||
|
||||
def get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str:
|
||||
"""Extracts a caveat value from a macaroon token.
|
||||
|
||||
Checks that there is exactly one caveat of the form "key = <val>" in the macaroon,
|
||||
and returns the extracted value.
|
||||
|
||||
Args:
|
||||
macaroon: the token
|
||||
key: the key of the caveat to extract
|
||||
|
||||
Returns:
|
||||
The extracted value
|
||||
|
||||
Raises:
|
||||
MacaroonVerificationFailedException: if there are conflicting values for the
|
||||
caveat in the macaroon, or if the caveat was not found in the macaroon.
|
||||
"""
|
||||
prefix = key + " = "
|
||||
result = None # type: Optional[str]
|
||||
for caveat in macaroon.caveats:
|
||||
if not caveat.caveat_id.startswith(prefix):
|
||||
continue
|
||||
|
||||
val = caveat.caveat_id[len(prefix) :]
|
||||
|
||||
if result is None:
|
||||
# first time we found this caveat: record the value
|
||||
result = val
|
||||
elif val != result:
|
||||
# on subsequent occurrences, raise if the value is different.
|
||||
raise MacaroonVerificationFailedException(
|
||||
"Conflicting values for caveat " + key
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# If the caveat is not there, we raise a MacaroonVerificationFailedException.
|
||||
# Note that it is insecure to generate a macaroon without all the caveats you
|
||||
# might need (because there is nothing stopping people from adding extra caveats),
|
||||
# so if the caveat isn't there, something odd must be going on.
|
||||
raise MacaroonVerificationFailedException("No %s caveat in macaroon" % (key,))
|
||||
|
||||
|
||||
def satisfy_expiry(v: pymacaroons.Verifier, get_time_ms: Callable[[], int]) -> None:
|
||||
"""Make a macaroon verifier which accepts 'time' caveats
|
||||
|
||||
Builds a caveat verifier which will accept unexpired 'time' caveats, and adds it to
|
||||
the given macaroon verifier.
|
||||
|
||||
Args:
|
||||
v: the macaroon verifier
|
||||
get_time_ms: a callable which will return the timestamp after which the caveat
|
||||
should be considered expired. Normally the current time.
|
||||
"""
|
||||
|
||||
def verify_expiry_caveat(caveat: str):
|
||||
time_msec = get_time_ms()
|
||||
prefix = "time < "
|
||||
if not caveat.startswith(prefix):
|
||||
return False
|
||||
expiry = int(caveat[len(prefix) :])
|
||||
return time_msec < expiry
|
||||
|
||||
v.satisfy_general(verify_expiry_caveat)
|
5
tests/handlers/oidc_test_key.p8
Normal file
5
tests/handlers/oidc_test_key.p8
Normal file
|
@ -0,0 +1,5 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgrHMvFcFjFhei6gHp
|
||||
Gfy4C8+6z7634MZbC7SSx4a17GahRANCAATp0YxEzGUXuqszggiFxczDdPgDpCJA
|
||||
P18rRuN7FLwZDuzYQPb8zVd8eGh4BqxjiVocICnVWyaSWD96N00I96SW
|
||||
-----END PRIVATE KEY-----
|
4
tests/handlers/oidc_test_key.pub.pem
Normal file
4
tests/handlers/oidc_test_key.pub.pem
Normal file
|
@ -0,0 +1,4 @@
|
|||
-----BEGIN PUBLIC KEY-----
|
||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE6dGMRMxlF7qrM4IIhcXMw3T4A6Qi
|
||||
QD9fK0bjexS8GQ7s2ED2/M1XfHhoeAasY4laHCAp1Vsmklg/ejdNCPeklg==
|
||||
-----END PUBLIC KEY-----
|
|
@ -68,38 +68,45 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
|||
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
||||
|
||||
def test_short_term_login_token_gives_user_id(self):
|
||||
token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000)
|
||||
user_id = self.get_success(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(token)
|
||||
token = self.macaroon_generator.generate_short_term_login_token(
|
||||
"a_user", "", 5000
|
||||
)
|
||||
self.assertEqual("a_user", user_id)
|
||||
res = self.get_success(self.auth_handler.validate_short_term_login_token(token))
|
||||
self.assertEqual("a_user", res.user_id)
|
||||
self.assertEqual("", res.auth_provider_id)
|
||||
|
||||
# when we advance the clock, the token should be rejected
|
||||
self.reactor.advance(6)
|
||||
self.get_failure(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(token),
|
||||
self.auth_handler.validate_short_term_login_token(token),
|
||||
AuthError,
|
||||
)
|
||||
|
||||
def test_short_term_login_token_gives_auth_provider(self):
|
||||
token = self.macaroon_generator.generate_short_term_login_token(
|
||||
"a_user", auth_provider_id="my_idp"
|
||||
)
|
||||
res = self.get_success(self.auth_handler.validate_short_term_login_token(token))
|
||||
self.assertEqual("a_user", res.user_id)
|
||||
self.assertEqual("my_idp", res.auth_provider_id)
|
||||
|
||||
def test_short_term_login_token_cannot_replace_user_id(self):
|
||||
token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000)
|
||||
token = self.macaroon_generator.generate_short_term_login_token(
|
||||
"a_user", "", 5000
|
||||
)
|
||||
macaroon = pymacaroons.Macaroon.deserialize(token)
|
||||
|
||||
user_id = self.get_success(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
macaroon.serialize()
|
||||
)
|
||||
res = self.get_success(
|
||||
self.auth_handler.validate_short_term_login_token(macaroon.serialize())
|
||||
)
|
||||
self.assertEqual("a_user", user_id)
|
||||
self.assertEqual("a_user", res.user_id)
|
||||
|
||||
# add another "user_id" caveat, which might allow us to override the
|
||||
# user_id.
|
||||
macaroon.add_first_party_caveat("user_id = b_user")
|
||||
|
||||
self.get_failure(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
macaroon.serialize()
|
||||
),
|
||||
self.auth_handler.validate_short_term_login_token(macaroon.serialize()),
|
||||
AuthError,
|
||||
)
|
||||
|
||||
|
@ -113,7 +120,7 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
|||
)
|
||||
|
||||
self.get_success(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
self.auth_handler.validate_short_term_login_token(
|
||||
self._get_macaroon().serialize()
|
||||
)
|
||||
)
|
||||
|
@ -135,7 +142,7 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
|||
return_value=make_awaitable(self.large_number_of_users)
|
||||
)
|
||||
self.get_failure(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
self.auth_handler.validate_short_term_login_token(
|
||||
self._get_macaroon().serialize()
|
||||
),
|
||||
ResourceLimitError,
|
||||
|
@ -159,7 +166,7 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
|||
ResourceLimitError,
|
||||
)
|
||||
self.get_failure(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
self.auth_handler.validate_short_term_login_token(
|
||||
self._get_macaroon().serialize()
|
||||
),
|
||||
ResourceLimitError,
|
||||
|
@ -175,7 +182,7 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
|||
)
|
||||
)
|
||||
self.get_success(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
self.auth_handler.validate_short_term_login_token(
|
||||
self._get_macaroon().serialize()
|
||||
)
|
||||
)
|
||||
|
@ -197,11 +204,13 @@ class AuthTestCase(unittest.HomeserverTestCase):
|
|||
return_value=make_awaitable(self.small_number_of_users)
|
||||
)
|
||||
self.get_success(
|
||||
self.auth_handler.validate_short_term_login_token_and_get_user_id(
|
||||
self.auth_handler.validate_short_term_login_token(
|
||||
self._get_macaroon().serialize()
|
||||
)
|
||||
)
|
||||
|
||||
def _get_macaroon(self):
|
||||
token = self.macaroon_generator.generate_short_term_login_token("user_a", 5000)
|
||||
token = self.macaroon_generator.generate_short_term_login_token(
|
||||
"user_a", "", 5000
|
||||
)
|
||||
return pymacaroons.Macaroon.deserialize(token)
|
||||
|
|
|
@ -66,7 +66,7 @@ class CasHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "redirect_uri", None, new_user=True
|
||||
"@test_user:test", "cas", request, "redirect_uri", None, new_user=True
|
||||
)
|
||||
|
||||
def test_map_cas_user_to_existing_user(self):
|
||||
|
@ -89,7 +89,7 @@ class CasHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "redirect_uri", None, new_user=False
|
||||
"@test_user:test", "cas", request, "redirect_uri", None, new_user=False
|
||||
)
|
||||
|
||||
# Subsequent calls should map to the same mxid.
|
||||
|
@ -98,7 +98,7 @@ class CasHandlerTestCase(HomeserverTestCase):
|
|||
self.handler._handle_cas_response(request, cas_response, "redirect_uri", "")
|
||||
)
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "redirect_uri", None, new_user=False
|
||||
"@test_user:test", "cas", request, "redirect_uri", None, new_user=False
|
||||
)
|
||||
|
||||
def test_map_cas_user_to_invalid_localpart(self):
|
||||
|
@ -116,7 +116,7 @@ class CasHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@f=c3=b6=c3=b6:test", request, "redirect_uri", None, new_user=True
|
||||
"@f=c3=b6=c3=b6:test", "cas", request, "redirect_uri", None, new_user=True
|
||||
)
|
||||
|
||||
@override_config(
|
||||
|
@ -160,7 +160,7 @@ class CasHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "redirect_uri", None, new_user=True
|
||||
"@test_user:test", "cas", request, "redirect_uri", None, new_user=True
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
from typing import Optional
|
||||
import os
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from mock import ANY, Mock, patch
|
||||
|
@ -23,6 +23,7 @@ import pymacaroons
|
|||
from synapse.handlers.sso import MappingException
|
||||
from synapse.server import HomeServer
|
||||
from synapse.types import UserID
|
||||
from synapse.util.macaroons import get_value_from_macaroon
|
||||
|
||||
from tests.test_utils import FakeResponse, get_awaitable_result, simple_async_mock
|
||||
from tests.unittest import HomeserverTestCase, override_config
|
||||
|
@ -50,7 +51,18 @@ WELL_KNOWN = ISSUER + ".well-known/openid-configuration"
|
|||
JWKS_URI = ISSUER + ".well-known/jwks.json"
|
||||
|
||||
# config for common cases
|
||||
COMMON_CONFIG = {
|
||||
DEFAULT_CONFIG = {
|
||||
"enabled": True,
|
||||
"client_id": CLIENT_ID,
|
||||
"client_secret": CLIENT_SECRET,
|
||||
"issuer": ISSUER,
|
||||
"scopes": SCOPES,
|
||||
"user_mapping_provider": {"module": __name__ + ".TestMappingProvider"},
|
||||
}
|
||||
|
||||
# extends the default config with explicit OAuth2 endpoints instead of using discovery
|
||||
EXPLICIT_ENDPOINT_CONFIG = {
|
||||
**DEFAULT_CONFIG,
|
||||
"discover": False,
|
||||
"authorization_endpoint": AUTHORIZATION_ENDPOINT,
|
||||
"token_endpoint": TOKEN_ENDPOINT,
|
||||
|
@ -107,6 +119,32 @@ async def get_json(url):
|
|||
return {"keys": []}
|
||||
|
||||
|
||||
def _key_file_path() -> str:
|
||||
"""path to a file containing the private half of a test key"""
|
||||
|
||||
# this key was generated with:
|
||||
# openssl ecparam -name prime256v1 -genkey -noout |
|
||||
# openssl pkcs8 -topk8 -nocrypt -out oidc_test_key.p8
|
||||
#
|
||||
# we use PKCS8 rather than SEC-1 (which is what openssl ecparam spits out), because
|
||||
# that's what Apple use, and we want to be sure that we work with Apple's keys.
|
||||
#
|
||||
# (For the record: both PKCS8 and SEC-1 specify (different) ways of representing
|
||||
# keys using ASN.1. Both are then typically formatted using PEM, which says: use the
|
||||
# base64-encoded DER encoding of ASN.1, with headers and footers. But we don't
|
||||
# really need to care about any of that.)
|
||||
return os.path.join(os.path.dirname(__file__), "oidc_test_key.p8")
|
||||
|
||||
|
||||
def _public_key_file_path() -> str:
|
||||
"""path to a file containing the public half of a test key"""
|
||||
# this was generated with:
|
||||
# openssl ec -in oidc_test_key.p8 -pubout -out oidc_test_key.pub.pem
|
||||
#
|
||||
# See above about where oidc_test_key.p8 came from
|
||||
return os.path.join(os.path.dirname(__file__), "oidc_test_key.pub.pem")
|
||||
|
||||
|
||||
class OidcHandlerTestCase(HomeserverTestCase):
|
||||
if not HAS_OIDC:
|
||||
skip = "requires OIDC"
|
||||
|
@ -114,20 +152,6 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
def default_config(self):
|
||||
config = super().default_config()
|
||||
config["public_baseurl"] = BASE_URL
|
||||
oidc_config = {
|
||||
"enabled": True,
|
||||
"client_id": CLIENT_ID,
|
||||
"client_secret": CLIENT_SECRET,
|
||||
"issuer": ISSUER,
|
||||
"scopes": SCOPES,
|
||||
"user_mapping_provider": {"module": __name__ + ".TestMappingProvider"},
|
||||
}
|
||||
|
||||
# Update this config with what's in the default config so that
|
||||
# override_config works as expected.
|
||||
oidc_config.update(config.get("oidc_config", {}))
|
||||
config["oidc_config"] = oidc_config
|
||||
|
||||
return config
|
||||
|
||||
def make_homeserver(self, reactor, clock):
|
||||
|
@ -170,13 +194,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.render_error.reset_mock()
|
||||
return args
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_config(self):
|
||||
"""Basic config correctly sets up the callback URL and client auth correctly."""
|
||||
self.assertEqual(self.provider._callback_url, CALLBACK_URL)
|
||||
self.assertEqual(self.provider._client_auth.client_id, CLIENT_ID)
|
||||
self.assertEqual(self.provider._client_auth.client_secret, CLIENT_SECRET)
|
||||
|
||||
@override_config({"oidc_config": {"discover": True}})
|
||||
@override_config({"oidc_config": {**DEFAULT_CONFIG, "discover": True}})
|
||||
def test_discovery(self):
|
||||
"""The handler should discover the endpoints from OIDC discovery document."""
|
||||
# This would throw if some metadata were invalid
|
||||
|
@ -195,13 +220,13 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.get_success(self.provider.load_metadata())
|
||||
self.http_client.get_json.assert_not_called()
|
||||
|
||||
@override_config({"oidc_config": COMMON_CONFIG})
|
||||
@override_config({"oidc_config": EXPLICIT_ENDPOINT_CONFIG})
|
||||
def test_no_discovery(self):
|
||||
"""When discovery is disabled, it should not try to load from discovery document."""
|
||||
self.get_success(self.provider.load_metadata())
|
||||
self.http_client.get_json.assert_not_called()
|
||||
|
||||
@override_config({"oidc_config": COMMON_CONFIG})
|
||||
@override_config({"oidc_config": EXPLICIT_ENDPOINT_CONFIG})
|
||||
def test_load_jwks(self):
|
||||
"""JWKS loading is done once (then cached) if used."""
|
||||
jwks = self.get_success(self.provider.load_jwks())
|
||||
|
@ -236,6 +261,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.http_client.get_json.assert_not_called()
|
||||
self.assertEqual(jwks, {"keys": []})
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_validate_config(self):
|
||||
"""Provider metadatas are extensively validated."""
|
||||
h = self.provider
|
||||
|
@ -318,13 +344,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
# Shouldn't raise with a valid userinfo, even without jwks
|
||||
force_load_metadata()
|
||||
|
||||
@override_config({"oidc_config": {"skip_verification": True}})
|
||||
@override_config({"oidc_config": {**DEFAULT_CONFIG, "skip_verification": True}})
|
||||
def test_skip_verification(self):
|
||||
"""Provider metadata validation can be disabled by config."""
|
||||
with self.metadata_edit({"issuer": "http://insecure"}):
|
||||
# This should not throw
|
||||
get_awaitable_result(self.provider.load_metadata())
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_redirect_request(self):
|
||||
"""The redirect request has the right arguments & generates a valid session cookie."""
|
||||
req = Mock(spec=["cookies"])
|
||||
|
@ -360,20 +387,15 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.assertEqual(name, b"oidc_session")
|
||||
|
||||
macaroon = pymacaroons.Macaroon.deserialize(cookie)
|
||||
state = self.handler._token_generator._get_value_from_macaroon(
|
||||
macaroon, "state"
|
||||
)
|
||||
nonce = self.handler._token_generator._get_value_from_macaroon(
|
||||
macaroon, "nonce"
|
||||
)
|
||||
redirect = self.handler._token_generator._get_value_from_macaroon(
|
||||
macaroon, "client_redirect_url"
|
||||
)
|
||||
state = get_value_from_macaroon(macaroon, "state")
|
||||
nonce = get_value_from_macaroon(macaroon, "nonce")
|
||||
redirect = get_value_from_macaroon(macaroon, "client_redirect_url")
|
||||
|
||||
self.assertEqual(params["state"], [state])
|
||||
self.assertEqual(params["nonce"], [nonce])
|
||||
self.assertEqual(redirect, "http://client/redirect")
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_callback_error(self):
|
||||
"""Errors from the provider returned in the callback are displayed."""
|
||||
request = Mock(args={})
|
||||
|
@ -385,6 +407,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.get_success(self.handler.handle_oidc_callback(request))
|
||||
self.assertRenderedError("invalid_client", "some description")
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_callback(self):
|
||||
"""Code callback works and display errors if something went wrong.
|
||||
|
||||
|
@ -434,7 +457,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.get_success(self.handler.handle_oidc_callback(request))
|
||||
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
expected_user_id, request, client_redirect_url, None, new_user=True
|
||||
expected_user_id, "oidc", request, client_redirect_url, None, new_user=True
|
||||
)
|
||||
self.provider._exchange_code.assert_called_once_with(code)
|
||||
self.provider._parse_id_token.assert_called_once_with(token, nonce=nonce)
|
||||
|
@ -465,7 +488,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.get_success(self.handler.handle_oidc_callback(request))
|
||||
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
expected_user_id, request, client_redirect_url, None, new_user=False
|
||||
expected_user_id, "oidc", request, client_redirect_url, None, new_user=False
|
||||
)
|
||||
self.provider._exchange_code.assert_called_once_with(code)
|
||||
self.provider._parse_id_token.assert_not_called()
|
||||
|
@ -486,6 +509,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.get_success(self.handler.handle_oidc_callback(request))
|
||||
self.assertRenderedError("invalid_request")
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_callback_session(self):
|
||||
"""The callback verifies the session presence and validity"""
|
||||
request = Mock(spec=["args", "getCookie", "cookies"])
|
||||
|
@ -528,7 +552,9 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
self.get_success(self.handler.handle_oidc_callback(request))
|
||||
self.assertRenderedError("invalid_request")
|
||||
|
||||
@override_config({"oidc_config": {"client_auth_method": "client_secret_post"}})
|
||||
@override_config(
|
||||
{"oidc_config": {**DEFAULT_CONFIG, "client_auth_method": "client_secret_post"}}
|
||||
)
|
||||
def test_exchange_code(self):
|
||||
"""Code exchange behaves correctly and handles various error scenarios."""
|
||||
token = {"type": "bearer"}
|
||||
|
@ -613,9 +639,105 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
"enabled": True,
|
||||
"client_id": CLIENT_ID,
|
||||
"issuer": ISSUER,
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret_jwt_key": {
|
||||
"key_file": _key_file_path(),
|
||||
"jwt_header": {"alg": "ES256", "kid": "ABC789"},
|
||||
"jwt_payload": {"iss": "DEFGHI"},
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
def test_exchange_code_jwt_key(self):
|
||||
"""Test that code exchange works with a JWK client secret."""
|
||||
from authlib.jose import jwt
|
||||
|
||||
token = {"type": "bearer"}
|
||||
self.http_client.request = simple_async_mock(
|
||||
return_value=FakeResponse(
|
||||
code=200, phrase=b"OK", body=json.dumps(token).encode("utf-8")
|
||||
)
|
||||
)
|
||||
code = "code"
|
||||
|
||||
# advance the clock a bit before we start, so we aren't working with zero
|
||||
# timestamps.
|
||||
self.reactor.advance(1000)
|
||||
start_time = self.reactor.seconds()
|
||||
ret = self.get_success(self.provider._exchange_code(code))
|
||||
|
||||
self.assertEqual(ret, token)
|
||||
|
||||
# the request should have hit the token endpoint
|
||||
kwargs = self.http_client.request.call_args[1]
|
||||
self.assertEqual(kwargs["method"], "POST")
|
||||
self.assertEqual(kwargs["uri"], TOKEN_ENDPOINT)
|
||||
|
||||
# the client secret provided to the should be a jwt which can be checked with
|
||||
# the public key
|
||||
args = parse_qs(kwargs["data"].decode("utf-8"))
|
||||
secret = args["client_secret"][0]
|
||||
with open(_public_key_file_path()) as f:
|
||||
key = f.read()
|
||||
claims = jwt.decode(secret, key)
|
||||
self.assertEqual(claims.header["kid"], "ABC789")
|
||||
self.assertEqual(claims["aud"], ISSUER)
|
||||
self.assertEqual(claims["iss"], "DEFGHI")
|
||||
self.assertEqual(claims["sub"], CLIENT_ID)
|
||||
self.assertEqual(claims["iat"], start_time)
|
||||
self.assertGreater(claims["exp"], start_time)
|
||||
|
||||
# check the rest of the POSTed data
|
||||
self.assertEqual(args["grant_type"], ["authorization_code"])
|
||||
self.assertEqual(args["code"], [code])
|
||||
self.assertEqual(args["client_id"], [CLIENT_ID])
|
||||
self.assertEqual(args["redirect_uri"], [CALLBACK_URL])
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
"enabled": True,
|
||||
"client_id": CLIENT_ID,
|
||||
"issuer": ISSUER,
|
||||
"client_auth_method": "none",
|
||||
}
|
||||
}
|
||||
)
|
||||
def test_exchange_code_no_auth(self):
|
||||
"""Test that code exchange works with no client secret."""
|
||||
token = {"type": "bearer"}
|
||||
self.http_client.request = simple_async_mock(
|
||||
return_value=FakeResponse(
|
||||
code=200, phrase=b"OK", body=json.dumps(token).encode("utf-8")
|
||||
)
|
||||
)
|
||||
code = "code"
|
||||
ret = self.get_success(self.provider._exchange_code(code))
|
||||
|
||||
self.assertEqual(ret, token)
|
||||
|
||||
# the request should have hit the token endpoint
|
||||
kwargs = self.http_client.request.call_args[1]
|
||||
self.assertEqual(kwargs["method"], "POST")
|
||||
self.assertEqual(kwargs["uri"], TOKEN_ENDPOINT)
|
||||
|
||||
# check the POSTed data
|
||||
args = parse_qs(kwargs["data"].decode("utf-8"))
|
||||
self.assertEqual(args["grant_type"], ["authorization_code"])
|
||||
self.assertEqual(args["code"], [code])
|
||||
self.assertEqual(args["client_id"], [CLIENT_ID])
|
||||
self.assertEqual(args["redirect_uri"], [CALLBACK_URL])
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
**DEFAULT_CONFIG,
|
||||
"user_mapping_provider": {
|
||||
"module": __name__ + ".TestMappingProviderExtra"
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -651,12 +773,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@foo:test",
|
||||
"oidc",
|
||||
request,
|
||||
client_redirect_url,
|
||||
{"phone": "1234567"},
|
||||
new_user=True,
|
||||
)
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_map_userinfo_to_user(self):
|
||||
"""Ensure that mapping the userinfo returned from a provider to an MXID works properly."""
|
||||
auth_handler = self.hs.get_auth_handler()
|
||||
|
@ -668,7 +792,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
}
|
||||
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", ANY, ANY, None, new_user=True
|
||||
"@test_user:test", "oidc", ANY, ANY, None, new_user=True
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
|
@ -679,7 +803,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
}
|
||||
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user_2:test", ANY, ANY, None, new_user=True
|
||||
"@test_user_2:test", "oidc", ANY, ANY, None, new_user=True
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
|
@ -697,7 +821,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
"Mapping provider does not support de-duplicating Matrix IDs",
|
||||
)
|
||||
|
||||
@override_config({"oidc_config": {"allow_existing_users": True}})
|
||||
@override_config({"oidc_config": {**DEFAULT_CONFIG, "allow_existing_users": True}})
|
||||
def test_map_userinfo_to_existing_user(self):
|
||||
"""Existing users can log in with OpenID Connect when allow_existing_users is True."""
|
||||
store = self.hs.get_datastore()
|
||||
|
@ -716,14 +840,14 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
}
|
||||
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
user.to_string(), ANY, ANY, None, new_user=False
|
||||
user.to_string(), "oidc", ANY, ANY, None, new_user=False
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
# Subsequent calls should map to the same mxid.
|
||||
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
user.to_string(), ANY, ANY, None, new_user=False
|
||||
user.to_string(), "oidc", ANY, ANY, None, new_user=False
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
|
@ -738,7 +862,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
}
|
||||
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
user.to_string(), ANY, ANY, None, new_user=False
|
||||
user.to_string(), "oidc", ANY, ANY, None, new_user=False
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
|
@ -774,9 +898,10 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@TEST_USER_2:test", ANY, ANY, None, new_user=False
|
||||
"@TEST_USER_2:test", "oidc", ANY, ANY, None, new_user=False
|
||||
)
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_map_userinfo_to_invalid_localpart(self):
|
||||
"""If the mapping provider generates an invalid localpart it should be rejected."""
|
||||
self.get_success(
|
||||
|
@ -787,9 +912,10 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
**DEFAULT_CONFIG,
|
||||
"user_mapping_provider": {
|
||||
"module": __name__ + ".TestMappingProviderFailures"
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -810,7 +936,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# test_user is already taken, so test_user1 gets registered instead.
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user1:test", ANY, ANY, None, new_user=True
|
||||
"@test_user1:test", "oidc", ANY, ANY, None, new_user=True
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
|
@ -834,6 +960,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
"mapping_error", "Unable to generate a Matrix ID from the SSO response"
|
||||
)
|
||||
|
||||
@override_config({"oidc_config": DEFAULT_CONFIG})
|
||||
def test_empty_localpart(self):
|
||||
"""Attempts to map onto an empty localpart should be rejected."""
|
||||
userinfo = {
|
||||
|
@ -846,9 +973,10 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
**DEFAULT_CONFIG,
|
||||
"user_mapping_provider": {
|
||||
"config": {"localpart_template": "{{ user.username }}"}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -866,7 +994,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
|||
state: str,
|
||||
nonce: str,
|
||||
client_redirect_url: str,
|
||||
ui_auth_session_id: Optional[str] = None,
|
||||
ui_auth_session_id: str = "",
|
||||
) -> str:
|
||||
from synapse.handlers.oidc_handler import OidcSessionData
|
||||
|
||||
|
@ -909,6 +1037,7 @@ async def _make_callback_with_userinfo(
|
|||
idp_id="oidc",
|
||||
nonce="nonce",
|
||||
client_redirect_url=client_redirect_url,
|
||||
ui_auth_session_id="",
|
||||
),
|
||||
)
|
||||
request = _build_callback_request("code", state, session)
|
||||
|
|
|
@ -131,7 +131,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "redirect_uri", None, new_user=True
|
||||
"@test_user:test", "saml", request, "redirect_uri", None, new_user=True
|
||||
)
|
||||
|
||||
@override_config({"saml2_config": {"grandfathered_mxid_source_attribute": "mxid"}})
|
||||
|
@ -157,7 +157,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "", None, new_user=False
|
||||
"@test_user:test", "saml", request, "", None, new_user=False
|
||||
)
|
||||
|
||||
# Subsequent calls should map to the same mxid.
|
||||
|
@ -166,7 +166,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
|
|||
self.handler._handle_authn_response(request, saml_response, "")
|
||||
)
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "", None, new_user=False
|
||||
"@test_user:test", "saml", request, "", None, new_user=False
|
||||
)
|
||||
|
||||
def test_map_saml_response_to_invalid_localpart(self):
|
||||
|
@ -214,7 +214,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# test_user is already taken, so test_user1 gets registered instead.
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user1:test", request, "", None, new_user=True
|
||||
"@test_user1:test", "saml", request, "", None, new_user=True
|
||||
)
|
||||
auth_handler.complete_sso_login.reset_mock()
|
||||
|
||||
|
@ -310,7 +310,7 @@ class SamlHandlerTestCase(HomeserverTestCase):
|
|||
|
||||
# check that the auth handler got called as expected
|
||||
auth_handler.complete_sso_login.assert_called_once_with(
|
||||
"@test_user:test", request, "redirect_uri", None, new_user=True
|
||||
"@test_user:test", "saml", request, "redirect_uri", None, new_user=True
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ from twisted.internet.protocol import Protocol
|
|||
from twisted.internet.task import LoopingCall
|
||||
from twisted.web.http import HTTPChannel
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.web.server import Request, Site
|
||||
|
||||
from synapse.app.generic_worker import (
|
||||
GenericWorkerReplicationHandler,
|
||||
|
@ -32,7 +33,10 @@ from synapse.http.site import SynapseRequest, SynapseSite
|
|||
from synapse.replication.http import ReplicationRestResource
|
||||
from synapse.replication.tcp.handler import ReplicationCommandHandler
|
||||
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
|
||||
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
|
||||
from synapse.replication.tcp.resource import (
|
||||
ReplicationStreamProtocolFactory,
|
||||
ServerReplicationStreamProtocol,
|
||||
)
|
||||
from synapse.server import HomeServer
|
||||
from synapse.util import Clock
|
||||
|
||||
|
@ -59,7 +63,9 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
|
|||
# build a replication server
|
||||
server_factory = ReplicationStreamProtocolFactory(hs)
|
||||
self.streamer = hs.get_replication_streamer()
|
||||
self.server = server_factory.buildProtocol(None)
|
||||
self.server = server_factory.buildProtocol(
|
||||
None
|
||||
) # type: ServerReplicationStreamProtocol
|
||||
|
||||
# Make a new HomeServer object for the worker
|
||||
self.reactor.lookups["testserv"] = "1.2.3.4"
|
||||
|
@ -155,9 +161,7 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
|
|||
request_factory = OneShotRequestFactory()
|
||||
|
||||
# Set up the server side protocol
|
||||
channel = _PushHTTPChannel(self.reactor)
|
||||
channel.requestFactory = request_factory
|
||||
channel.site = self.site
|
||||
channel = _PushHTTPChannel(self.reactor, request_factory, self.site)
|
||||
|
||||
# Connect client to server and vice versa.
|
||||
client_to_server_transport = FakeTransport(
|
||||
|
@ -188,8 +192,9 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
|
|||
fetching updates for given stream.
|
||||
"""
|
||||
|
||||
path = request.path # type: bytes # type: ignore
|
||||
self.assertRegex(
|
||||
request.path,
|
||||
path,
|
||||
br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
|
||||
% (stream_name.encode("ascii"),),
|
||||
)
|
||||
|
@ -390,9 +395,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
|
|||
request_factory = OneShotRequestFactory()
|
||||
|
||||
# Set up the server side protocol
|
||||
channel = _PushHTTPChannel(self.reactor)
|
||||
channel.requestFactory = request_factory
|
||||
channel.site = self._hs_to_site[hs]
|
||||
channel = _PushHTTPChannel(self.reactor, request_factory, self._hs_to_site[hs])
|
||||
|
||||
# Connect client to server and vice versa.
|
||||
client_to_server_transport = FakeTransport(
|
||||
|
@ -475,9 +478,13 @@ class _PushHTTPChannel(HTTPChannel):
|
|||
makes it very hard to test.
|
||||
"""
|
||||
|
||||
def __init__(self, reactor: IReactorTime):
|
||||
def __init__(
|
||||
self, reactor: IReactorTime, request_factory: Callable[..., Request], site: Site
|
||||
):
|
||||
super().__init__()
|
||||
self.reactor = reactor
|
||||
self.requestFactory = request_factory
|
||||
self.site = site
|
||||
|
||||
self._pull_to_push_producer = None # type: Optional[_PullToPushProducer]
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ class MediaStorageTests(unittest.HomeserverTestCase):
|
|||
self.assertEqual(test_body, body)
|
||||
|
||||
|
||||
@attr.s
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class _TestImage:
|
||||
"""An image for testing thumbnailing with the expected results
|
||||
|
||||
|
@ -117,13 +117,15 @@ class _TestImage:
|
|||
test should just check for success.
|
||||
expected_scaled: The expected bytes from scaled thumbnailing, or None if
|
||||
test should just check for a valid image returned.
|
||||
expected_found: True if the file should exist on the server, or False if
|
||||
a 404 is expected.
|
||||
"""
|
||||
|
||||
data = attr.ib(type=bytes)
|
||||
content_type = attr.ib(type=bytes)
|
||||
extension = attr.ib(type=bytes)
|
||||
expected_cropped = attr.ib(type=Optional[bytes])
|
||||
expected_scaled = attr.ib(type=Optional[bytes])
|
||||
expected_cropped = attr.ib(type=Optional[bytes], default=None)
|
||||
expected_scaled = attr.ib(type=Optional[bytes], default=None)
|
||||
expected_found = attr.ib(default=True, type=bool)
|
||||
|
||||
|
||||
|
@ -153,6 +155,21 @@ class _TestImage:
|
|||
),
|
||||
),
|
||||
),
|
||||
# small png with transparency.
|
||||
(
|
||||
_TestImage(
|
||||
unhexlify(
|
||||
b"89504e470d0a1a0a0000000d49484452000000010000000101000"
|
||||
b"00000376ef9240000000274524e5300010194fdae0000000a4944"
|
||||
b"4154789c636800000082008177cd72b60000000049454e44ae426"
|
||||
b"082"
|
||||
),
|
||||
b"image/png",
|
||||
b".png",
|
||||
# Note that we don't check the output since it varies across
|
||||
# different versions of Pillow.
|
||||
),
|
||||
),
|
||||
# small lossless webp
|
||||
(
|
||||
_TestImage(
|
||||
|
@ -162,8 +179,6 @@ class _TestImage:
|
|||
),
|
||||
b"image/webp",
|
||||
b".webp",
|
||||
None,
|
||||
None,
|
||||
),
|
||||
),
|
||||
# an empty file
|
||||
|
@ -172,9 +187,7 @@ class _TestImage:
|
|||
b"",
|
||||
b"image/gif",
|
||||
b".gif",
|
||||
None,
|
||||
None,
|
||||
False,
|
||||
expected_found=False,
|
||||
),
|
||||
),
|
||||
],
|
||||
|
|
|
@ -188,7 +188,7 @@ class FakeSite:
|
|||
|
||||
def make_request(
|
||||
reactor,
|
||||
site: Site,
|
||||
site: Union[Site, FakeSite],
|
||||
method,
|
||||
path,
|
||||
content=b"",
|
||||
|
|
|
@ -13,9 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import NotFoundError
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.rest.client.v1 import room
|
||||
|
||||
from tests.unittest import HomeserverTestCase
|
||||
|
@ -33,9 +31,12 @@ class PurgeTests(HomeserverTestCase):
|
|||
def prepare(self, reactor, clock, hs):
|
||||
self.room_id = self.helper.create_room_as(self.user_id)
|
||||
|
||||
def test_purge(self):
|
||||
self.store = hs.get_datastore()
|
||||
self.storage = self.hs.get_storage()
|
||||
|
||||
def test_purge_history(self):
|
||||
"""
|
||||
Purging a room will delete everything before the topological point.
|
||||
Purging a room history will delete everything before the topological point.
|
||||
"""
|
||||
# Send four messages to the room
|
||||
first = self.helper.send(self.room_id, body="test1")
|
||||
|
@ -43,30 +44,27 @@ class PurgeTests(HomeserverTestCase):
|
|||
third = self.helper.send(self.room_id, body="test3")
|
||||
last = self.helper.send(self.room_id, body="test4")
|
||||
|
||||
store = self.hs.get_datastore()
|
||||
storage = self.hs.get_storage()
|
||||
|
||||
# Get the topological token
|
||||
token = self.get_success(
|
||||
store.get_topological_token_for_event(last["event_id"])
|
||||
self.store.get_topological_token_for_event(last["event_id"])
|
||||
)
|
||||
token_str = self.get_success(token.to_string(self.hs.get_datastore()))
|
||||
|
||||
# Purge everything before this topological token
|
||||
self.get_success(
|
||||
storage.purge_events.purge_history(self.room_id, token_str, True)
|
||||
self.storage.purge_events.purge_history(self.room_id, token_str, True)
|
||||
)
|
||||
|
||||
# 1-3 should fail and last will succeed, meaning that 1-3 are deleted
|
||||
# and last is not.
|
||||
self.get_failure(store.get_event(first["event_id"]), NotFoundError)
|
||||
self.get_failure(store.get_event(second["event_id"]), NotFoundError)
|
||||
self.get_failure(store.get_event(third["event_id"]), NotFoundError)
|
||||
self.get_success(store.get_event(last["event_id"]))
|
||||
self.get_failure(self.store.get_event(first["event_id"]), NotFoundError)
|
||||
self.get_failure(self.store.get_event(second["event_id"]), NotFoundError)
|
||||
self.get_failure(self.store.get_event(third["event_id"]), NotFoundError)
|
||||
self.get_success(self.store.get_event(last["event_id"]))
|
||||
|
||||
def test_purge_wont_delete_extrems(self):
|
||||
def test_purge_history_wont_delete_extrems(self):
|
||||
"""
|
||||
Purging a room will delete everything before the topological point.
|
||||
Purging a room history will delete everything before the topological point.
|
||||
"""
|
||||
# Send four messages to the room
|
||||
first = self.helper.send(self.room_id, body="test1")
|
||||
|
@ -74,22 +72,43 @@ class PurgeTests(HomeserverTestCase):
|
|||
third = self.helper.send(self.room_id, body="test3")
|
||||
last = self.helper.send(self.room_id, body="test4")
|
||||
|
||||
storage = self.hs.get_datastore()
|
||||
|
||||
# Set the topological token higher than it should be
|
||||
token = self.get_success(
|
||||
storage.get_topological_token_for_event(last["event_id"])
|
||||
self.store.get_topological_token_for_event(last["event_id"])
|
||||
)
|
||||
event = "t{}-{}".format(token.topological + 1, token.stream + 1)
|
||||
|
||||
# Purge everything before this topological token
|
||||
purge = defer.ensureDeferred(storage.purge_history(self.room_id, event, True))
|
||||
self.pump()
|
||||
f = self.failureResultOf(purge)
|
||||
f = self.get_failure(
|
||||
self.storage.purge_events.purge_history(self.room_id, event, True),
|
||||
SynapseError,
|
||||
)
|
||||
self.assertIn("greater than forward", f.value.args[0])
|
||||
|
||||
# Try and get the events
|
||||
self.get_success(storage.get_event(first["event_id"]))
|
||||
self.get_success(storage.get_event(second["event_id"]))
|
||||
self.get_success(storage.get_event(third["event_id"]))
|
||||
self.get_success(storage.get_event(last["event_id"]))
|
||||
self.get_success(self.store.get_event(first["event_id"]))
|
||||
self.get_success(self.store.get_event(second["event_id"]))
|
||||
self.get_success(self.store.get_event(third["event_id"]))
|
||||
self.get_success(self.store.get_event(last["event_id"]))
|
||||
|
||||
def test_purge_room(self):
|
||||
"""
|
||||
Purging a room will delete everything about it.
|
||||
"""
|
||||
# Send four messages to the room
|
||||
first = self.helper.send(self.room_id, body="test1")
|
||||
|
||||
# Get the current room state.
|
||||
state_handler = self.hs.get_state_handler()
|
||||
create_event = self.get_success(
|
||||
state_handler.get_current_state(self.room_id, "m.room.create", "")
|
||||
)
|
||||
self.assertIsNotNone(create_event)
|
||||
|
||||
# Purge everything before this topological token
|
||||
self.get_success(self.storage.purge_events.purge_room(self.room_id))
|
||||
|
||||
# The events aren't found.
|
||||
self.store._invalidate_get_event_cache(create_event.event_id)
|
||||
self.get_failure(self.store.get_event(create_event.event_id), NotFoundError)
|
||||
self.get_failure(self.store.get_event(first["event_id"]), NotFoundError)
|
||||
|
|
|
@ -28,7 +28,7 @@ class ToTwistedHandler(logging.Handler):
|
|||
def emit(self, record):
|
||||
log_entry = self.format(record)
|
||||
log_level = record.levelname.lower().replace("warning", "warn")
|
||||
self.tx_log.emit(
|
||||
self.tx_log.emit( # type: ignore
|
||||
twisted.logger.LogLevel.levelWithName(log_level), "{entry}", entry=log_entry
|
||||
)
|
||||
|
||||
|
|
131
tests/util/caches/test_responsecache.py
Normal file
131
tests/util/caches/test_responsecache.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
from tests.server import get_clock
|
||||
from tests.unittest import TestCase
|
||||
|
||||
|
||||
class DeferredCacheTestCase(TestCase):
|
||||
"""
|
||||
A TestCase class for ResponseCache.
|
||||
|
||||
The test-case function naming has some logic to it in it's parts, here's some notes about it:
|
||||
wait: Denotes tests that have an element of "waiting" before its wrapped result becomes available
|
||||
(Generally these just use .delayed_return instead of .instant_return in it's wrapped call.)
|
||||
expire: Denotes tests that test expiry after assured existence.
|
||||
(These have cache with a short timeout_ms=, shorter than will be tested through advancing the clock)
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.reactor, self.clock = get_clock()
|
||||
|
||||
def with_cache(self, name: str, ms: int = 0) -> ResponseCache:
|
||||
return ResponseCache(self.clock, name, timeout_ms=ms)
|
||||
|
||||
@staticmethod
|
||||
async def instant_return(o: str) -> str:
|
||||
return o
|
||||
|
||||
async def delayed_return(self, o: str) -> str:
|
||||
await self.clock.sleep(1)
|
||||
return o
|
||||
|
||||
def test_cache_hit(self):
|
||||
cache = self.with_cache("keeping_cache", ms=9001)
|
||||
|
||||
expected_result = "howdy"
|
||||
|
||||
wrap_d = cache.wrap(0, self.instant_return, expected_result)
|
||||
|
||||
self.assertEqual(
|
||||
expected_result,
|
||||
self.successResultOf(wrap_d),
|
||||
"initial wrap result should be the same",
|
||||
)
|
||||
self.assertEqual(
|
||||
expected_result,
|
||||
self.successResultOf(cache.get(0)),
|
||||
"cache should have the result",
|
||||
)
|
||||
|
||||
def test_cache_miss(self):
|
||||
cache = self.with_cache("trashing_cache", ms=0)
|
||||
|
||||
expected_result = "howdy"
|
||||
|
||||
wrap_d = cache.wrap(0, self.instant_return, expected_result)
|
||||
|
||||
self.assertEqual(
|
||||
expected_result,
|
||||
self.successResultOf(wrap_d),
|
||||
"initial wrap result should be the same",
|
||||
)
|
||||
self.assertIsNone(cache.get(0), "cache should not have the result now")
|
||||
|
||||
def test_cache_expire(self):
|
||||
cache = self.with_cache("short_cache", ms=1000)
|
||||
|
||||
expected_result = "howdy"
|
||||
|
||||
wrap_d = cache.wrap(0, self.instant_return, expected_result)
|
||||
|
||||
self.assertEqual(expected_result, self.successResultOf(wrap_d))
|
||||
self.assertEqual(
|
||||
expected_result,
|
||||
self.successResultOf(cache.get(0)),
|
||||
"cache should still have the result",
|
||||
)
|
||||
|
||||
# cache eviction timer is handled
|
||||
self.reactor.pump((2,))
|
||||
|
||||
self.assertIsNone(cache.get(0), "cache should not have the result now")
|
||||
|
||||
def test_cache_wait_hit(self):
|
||||
cache = self.with_cache("neutral_cache")
|
||||
|
||||
expected_result = "howdy"
|
||||
|
||||
wrap_d = cache.wrap(0, self.delayed_return, expected_result)
|
||||
self.assertNoResult(wrap_d)
|
||||
|
||||
# function wakes up, returns result
|
||||
self.reactor.pump((2,))
|
||||
|
||||
self.assertEqual(expected_result, self.successResultOf(wrap_d))
|
||||
|
||||
def test_cache_wait_expire(self):
|
||||
cache = self.with_cache("medium_cache", ms=3000)
|
||||
|
||||
expected_result = "howdy"
|
||||
|
||||
wrap_d = cache.wrap(0, self.delayed_return, expected_result)
|
||||
self.assertNoResult(wrap_d)
|
||||
|
||||
# stop at 1 second to callback cache eviction callLater at that time, then another to set time at 2
|
||||
self.reactor.pump((1, 1))
|
||||
|
||||
self.assertEqual(expected_result, self.successResultOf(wrap_d))
|
||||
self.assertEqual(
|
||||
expected_result,
|
||||
self.successResultOf(cache.get(0)),
|
||||
"cache should still have the result",
|
||||
)
|
||||
|
||||
# (1 + 1 + 2) > 3.0, cache eviction timer is handled
|
||||
self.reactor.pump((2,))
|
||||
|
||||
self.assertIsNone(cache.get(0), "cache should not have the result now")
|
Loading…
Reference in a new issue