From 6901ac7e9d1c543b04baf9e23063f5107e90ea7f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 28 Jan 2019 12:15:22 -0700 Subject: [PATCH 01/20] Don't recommend :8448 to people on public_baseurl --- synapse/config/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/config/server.py b/synapse/config/server.py index 22dcc87d8a..268a43ff00 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -261,7 +261,7 @@ class ServerConfig(Config): # enter into the 'custom HS URL' field on their client. If you # use synapse with a reverse proxy, this should be the URL to reach # synapse via the proxy. - # public_baseurl: https://example.com:8448/ + # public_baseurl: https://example.com/ # Set the soft limit on the number of file descriptors synapse can use # Zero is used to indicate synapse should set the soft limit to the From c4045647eb17ff6713b0a62a6b4ec91adb7a67fa Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 28 Jan 2019 12:16:39 -0700 Subject: [PATCH 02/20] Create 4498.misc --- changelog.d/4498.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4498.misc diff --git a/changelog.d/4498.misc b/changelog.d/4498.misc new file mode 100644 index 0000000000..8b3bc94a34 --- /dev/null +++ b/changelog.d/4498.misc @@ -0,0 +1 @@ +Clarify documentation for the `public_baseurl` config param From 554ca58ea1729804f413630b98fd158f324041a0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Jan 2019 17:17:38 +0000 Subject: [PATCH 03/20] Make add_hashes_and_signatures operate on dicts --- synapse/crypto/event_signing.py | 16 ++++----- tests/crypto/test_event_signing.py | 52 ++++++++++++++---------------- 2 files changed, 30 insertions(+), 38 deletions(-) diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index d01ac5075c..1dfa727fcf 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -131,12 +131,12 @@ def compute_event_signature(event_dict, signature_name, signing_key): return redact_json["signatures"] -def add_hashes_and_signatures(event, signature_name, signing_key, +def add_hashes_and_signatures(event_dict, signature_name, signing_key, hash_algorithm=hashlib.sha256): """Add content hash and sign the event Args: - event_dict (EventBuilder): The event to add hashes to and sign + event_dict (dict): The event to add hashes to and sign signature_name (str): The name of the entity signing the event (typically the server's hostname). signing_key (syutil.crypto.SigningKey): The key to sign with @@ -144,16 +144,12 @@ def add_hashes_and_signatures(event, signature_name, signing_key, to hash the event """ - name, digest = compute_content_hash( - event.get_pdu_json(), hash_algorithm=hash_algorithm, - ) + name, digest = compute_content_hash(event_dict, hash_algorithm=hash_algorithm) - if not hasattr(event, "hashes"): - event.hashes = {} - event.hashes[name] = encode_base64(digest) + event_dict.setdefault("hashes", {})[name] = encode_base64(digest) - event.signatures = compute_event_signature( - event.get_pdu_json(), + event_dict["signatures"] = compute_event_signature( + event_dict, signature_name=signature_name, signing_key=signing_key, ) diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py index b2536c1e69..71aa731439 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py @@ -18,7 +18,7 @@ import nacl.signing from unpaddedbase64 import decode_base64 from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.events.builder import EventBuilder +from synapse.events import FrozenEvent from tests import unittest @@ -40,20 +40,18 @@ class EventSigningTestCase(unittest.TestCase): self.signing_key.version = KEY_VER def test_sign_minimal(self): - builder = EventBuilder( - { - 'event_id': "$0:domain", - 'origin': "domain", - 'origin_server_ts': 1000000, - 'signatures': {}, - 'type': "X", - 'unsigned': {'age_ts': 1000000}, - } - ) + event_dict = { + 'event_id': "$0:domain", + 'origin': "domain", + 'origin_server_ts': 1000000, + 'signatures': {}, + 'type': "X", + 'unsigned': {'age_ts': 1000000}, + } - add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) + add_hashes_and_signatures(event_dict, HOSTNAME, self.signing_key) - event = builder.build() + event = FrozenEvent(event_dict) self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) @@ -71,23 +69,21 @@ class EventSigningTestCase(unittest.TestCase): ) def test_sign_message(self): - builder = EventBuilder( - { - 'content': {'body': "Here is the message content"}, - 'event_id': "$0:domain", - 'origin': "domain", - 'origin_server_ts': 1000000, - 'type': "m.room.message", - 'room_id': "!r:domain", - 'sender': "@u:domain", - 'signatures': {}, - 'unsigned': {'age_ts': 1000000}, - } - ) + event_dict = { + 'content': {'body': "Here is the message content"}, + 'event_id': "$0:domain", + 'origin': "domain", + 'origin_server_ts': 1000000, + 'type': "m.room.message", + 'room_id': "!r:domain", + 'sender': "@u:domain", + 'signatures': {}, + 'unsigned': {'age_ts': 1000000}, + } - add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) + add_hashes_and_signatures(event_dict, HOSTNAME, self.signing_key) - event = builder.build() + event = FrozenEvent(event_dict) self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) From be47cfa9c97b4acfd884440f1953ed000225eb37 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Jan 2019 17:19:31 +0000 Subject: [PATCH 04/20] Refactor event building into EventBuilder This is so that everything is done in one place, making it easier to change the event format based on room version --- synapse/events/builder.py | 290 ++++++++++++++++++------ synapse/federation/federation_client.py | 20 +- synapse/handlers/message.py | 34 +-- synapse/server.py | 5 +- synapse/storage/event_federation.py | 23 ++ 5 files changed, 257 insertions(+), 115 deletions(-) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 7e63371095..225b5fd670 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -13,79 +13,156 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy +import attr -from synapse.api.constants import RoomVersions +from twisted.internet import defer + +from synapse.api.constants import ( + KNOWN_EVENT_FORMAT_VERSIONS, + KNOWN_ROOM_VERSIONS, + MAX_DEPTH, +) +from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.types import EventID from synapse.util.stringutils import random_string -from . import EventBase, FrozenEvent, _event_dict_property +from . import ( + _EventInternalMetadata, + event_type_from_format_version, + room_version_to_event_format, +) -def get_event_builder(room_version, key_values={}, internal_metadata_dict={}): - """Generate an event builder appropriate for the given room version +@attr.s(slots=True, cmp=False, frozen=True) +class EventBuilder(object): + """A format independent event builder used to build up the event content + before signing the event. - Args: - room_version (str): Version of the room that we're creating an - event builder for - key_values (dict): Fields used as the basis of the new event - internal_metadata_dict (dict): Used to create the `_EventInternalMetadata` - object. + (Note that while objects of this class are frozen, the + content/unsigned/internal_metadata fields are still mutable) - Returns: - EventBuilder + Attributes: + format_version (int): Event format version + room_id (str) + type (str) + sender (str) + content (dict) + unsigned (dict) + internal_metadata (_EventInternalMetadata) + + _state (StateHandler) + _auth (synapse.api.Auth) + _store (DataStore) + _clock (Clock) + _hostname (str): The hostname of the server creating the event + _signing_key: The signing key to use to sign the event as the server """ - if room_version in { - RoomVersions.V1, - RoomVersions.V2, - RoomVersions.VDH_TEST, - RoomVersions.STATE_V2_TEST, - }: - return EventBuilder(key_values, internal_metadata_dict) - else: - raise Exception( - "No event format defined for version %r" % (room_version,) + + _state = attr.ib() + _auth = attr.ib() + _store = attr.ib() + _clock = attr.ib() + _hostname = attr.ib() + _signing_key = attr.ib() + + format_version = attr.ib() + + room_id = attr.ib() + type = attr.ib() + sender = attr.ib() + + content = attr.ib(default=attr.Factory(dict)) + unsigned = attr.ib(default=attr.Factory(dict)) + + # These only exist on a subset of events, so they raise AttributeError if + # someone tries to get them when they don't exist. + _state_key = attr.ib(default=None) + _redacts = attr.ib(default=None) + + internal_metadata = attr.ib(default=attr.Factory(lambda: _EventInternalMetadata({}))) + + @property + def state_key(self): + if self._state_key is not None: + return self._state_key + + raise AttributeError("state_key") + + def is_state(self): + return self._state_key is not None + + @defer.inlineCallbacks + def build(self, prev_event_ids): + """Transform into a fully signed and hashed event + + Args: + prev_event_ids (list[str]): The event IDs to use as the prev events + + Returns: + Deferred[FrozenEvent] + """ + + state_ids = yield self._state.get_current_state_ids( + self.room_id, prev_event_ids, + ) + auth_ids = yield self._auth.compute_auth_events( + self, state_ids, ) + auth_events = yield self._store.add_event_hashes(auth_ids) + prev_events = yield self._store.add_event_hashes(prev_event_ids) -class EventBuilder(EventBase): - def __init__(self, key_values={}, internal_metadata_dict={}): - signatures = copy.deepcopy(key_values.pop("signatures", {})) - unsigned = copy.deepcopy(key_values.pop("unsigned", {})) - - super(EventBuilder, self).__init__( - key_values, - signatures=signatures, - unsigned=unsigned, - internal_metadata_dict=internal_metadata_dict, + old_depth = yield self._store.get_max_depth_of( + prev_event_ids, ) + depth = old_depth + 1 - event_id = _event_dict_property("event_id") - state_key = _event_dict_property("state_key") - type = _event_dict_property("type") + # we cap depth of generated events, to ensure that they are not + # rejected by other servers (and so that they can be persisted in + # the db) + depth = min(depth, MAX_DEPTH) - def build(self): - return FrozenEvent.from_event(self) + event_dict = { + "auth_events": auth_events, + "prev_events": prev_events, + "type": self.type, + "room_id": self.room_id, + "sender": self.sender, + "content": self.content, + "unsigned": self.unsigned, + "depth": depth, + "prev_state": [], + } + + if self.is_state(): + event_dict["state_key"] = self._state_key + + if self._redacts is not None: + event_dict["redacts"] = self._redacts + + defer.returnValue( + create_local_event_from_event_dict( + clock=self._clock, + hostname=self._hostname, + signing_key=self._signing_key, + format_version=self.format_version, + event_dict=event_dict, + internal_metadata_dict=self.internal_metadata.get_dict(), + ) + ) class EventBuilderFactory(object): - def __init__(self, clock, hostname): - self.clock = clock - self.hostname = hostname + def __init__(self, hs): + self.clock = hs.get_clock() + self.hostname = hs.hostname + self.signing_key = hs.config.signing_key[0] - self.event_id_count = 0 + self.store = hs.get_datastore() + self.state = hs.get_state_handler() + self.auth = hs.get_auth() - def create_event_id(self): - i = str(self.event_id_count) - self.event_id_count += 1 - - local_part = str(int(self.clock.time())) + i + random_string(5) - - e_id = EventID(local_part, self.hostname) - - return e_id.to_string() - - def new(self, room_version, key_values={}): + def new(self, room_version, key_values): """Generate an event builder appropriate for the given room version Args: @@ -98,27 +175,104 @@ class EventBuilderFactory(object): """ # There's currently only the one event version defined - if room_version not in { - RoomVersions.V1, - RoomVersions.V2, - RoomVersions.VDH_TEST, - RoomVersions.STATE_V2_TEST, - }: + if room_version not in KNOWN_ROOM_VERSIONS: raise Exception( "No event format defined for version %r" % (room_version,) ) - key_values["event_id"] = self.create_event_id() + key_values["event_id"] = _create_event_id(self.clock, self.hostname) - time_now = int(self.clock.time_msec()) + return EventBuilder( + store=self.store, + state=self.state, + auth=self.auth, + clock=self.clock, + hostname=self.hostname, + signing_key=self.signing_key, + format_version=room_version_to_event_format(room_version), + type=key_values["type"], + state_key=key_values.get("state_key"), + room_id=key_values["room_id"], + sender=key_values["sender"], + content=key_values.get("content", {}), + unsigned=key_values.get("unsigned", {}), + redacts=key_values.get("redacts", None), + ) - key_values.setdefault("origin", self.hostname) - key_values.setdefault("origin_server_ts", time_now) - key_values.setdefault("unsigned", {}) - age = key_values["unsigned"].pop("age", 0) - key_values["unsigned"].setdefault("age_ts", time_now - age) +def create_local_event_from_event_dict(clock, hostname, signing_key, + format_version, event_dict, + internal_metadata_dict=None): + """Takes a fully formed event dict, ensuring that fields like `origin` + and `origin_server_ts` have correct values for a locally produced event, + then signs and hashes it. - key_values["signatures"] = {} + Args: + clock (Clock) + hostname (str) + signing_key + format_version (int) + event_dict (dict) + internal_metadata_dict (dict|None) - return EventBuilder(key_values=key_values,) + Returns: + FrozenEvent + """ + + # There's currently only the one event version defined + if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: + raise Exception( + "No event format defined for version %r" % (format_version,) + ) + + if internal_metadata_dict is None: + internal_metadata_dict = {} + + time_now = int(clock.time_msec()) + + event_dict["event_id"] = _create_event_id(clock, hostname) + + event_dict["origin"] = hostname + event_dict["origin_server_ts"] = time_now + + event_dict.setdefault("unsigned", {}) + age = event_dict["unsigned"].pop("age", 0) + event_dict["unsigned"].setdefault("age_ts", time_now - age) + + event_dict.setdefault("signatures", {}) + + add_hashes_and_signatures( + event_dict, + hostname, + signing_key, + ) + return event_type_from_format_version(format_version)( + event_dict, internal_metadata_dict=internal_metadata_dict, + ) + + +# A counter used when generating new event IDs +_event_id_counter = 0 + + +def _create_event_id(clock, hostname): + """Create a new event ID + + Args: + clock (Clock) + hostname (str): The server name for the event ID + + Returns: + str + """ + + global _event_id_counter + + i = str(_event_id_counter) + _event_id_counter += 1 + + local_part = str(int(clock.time())) + i + random_string(5) + + e_id = EventID(local_part, hostname) + + return e_id.to_string() diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 71809893c5..be3bb59431 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -37,8 +37,7 @@ from synapse.api.errors import ( HttpResponseException, SynapseError, ) -from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.events import room_version_to_event_format +from synapse.events import builder, room_version_to_event_format from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.util import logcontext, unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache @@ -72,7 +71,8 @@ class FederationClient(FederationBase): self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() - self.event_builder_factory = hs.get_event_builder_factory() + self.hostname = hs.hostname + self.signing_key = hs.config.signing_key[0] self._get_pdu_cache = ExpiringCache( cache_name="get_pdu_cache", @@ -608,18 +608,10 @@ class FederationClient(FederationBase): if "prev_state" not in pdu_dict: pdu_dict["prev_state"] = [] - # Strip off the fields that we want to clobber. - pdu_dict.pop("origin", None) - pdu_dict.pop("origin_server_ts", None) - pdu_dict.pop("unsigned", None) - - builder = self.event_builder_factory.new(room_version, pdu_dict) - add_hashes_and_signatures( - builder, - self.hs.hostname, - self.hs.config.signing_key[0] + ev = builder.create_local_event_from_event_dict( + self._clock, self.hostname, self.signing_key, + format_version=event_format, event_dict=pdu_dict, ) - ev = builder.build() defer.returnValue( (destination, ev, event_format) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 05d1370c18..ac6f4fd985 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ from canonicaljson import encode_canonical_json, json from twisted.internet import defer from twisted.internet.defer import succeed -from synapse.api.constants import MAX_DEPTH, EventTypes, Membership, RoomVersions +from synapse.api.constants import EventTypes, Membership, RoomVersions from synapse.api.errors import ( AuthError, Codes, @@ -31,7 +31,6 @@ from synapse.api.errors import ( SynapseError, ) from synapse.api.urls import ConsentURIBuilder -from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events.utils import serialize_event from synapse.events.validator import EventValidator from synapse.replication.http.send_event import ReplicationSendEventRestServlet @@ -545,40 +544,17 @@ class EventCreationHandler(object): prev_events_and_hashes = \ yield self.store.get_prev_events_for_room(builder.room_id) - if prev_events_and_hashes: - depth = max([d for _, _, d in prev_events_and_hashes]) + 1 - # we cap depth of generated events, to ensure that they are not - # rejected by other servers (and so that they can be persisted in - # the db) - depth = min(depth, MAX_DEPTH) - else: - depth = 1 - prev_events = [ (event_id, prev_hashes) for event_id, prev_hashes, _ in prev_events_and_hashes ] - builder.prev_events = prev_events - builder.depth = depth - - context = yield self.state.compute_event_context(builder) - if requester: - context.app_service = requester.app_service - - if builder.is_state(): - builder.prev_state = yield self.store.add_event_hashes( - context.prev_state_events - ) - - yield self.auth.add_auth_events(builder, context) - - signing_key = self.hs.config.signing_key[0] - add_hashes_and_signatures( - builder, self.server_name, signing_key + event = yield builder.build( + prev_event_ids=[p for p, _ in prev_events], ) + context = yield self.state.compute_event_context(event) - event = builder.build() + self.validator.validate_new(event) logger.debug( "Created event %s", diff --git a/synapse/server.py b/synapse/server.py index c8914302cf..6c52101616 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -355,10 +355,7 @@ class HomeServer(object): return Keyring(self) def build_event_builder_factory(self): - return EventBuilderFactory( - clock=self.get_clock(), - hostname=self.hostname, - ) + return EventBuilderFactory(self) def build_filtering(self): return Filtering(self) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index d3b9dea1d6..38809ed0fc 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -125,6 +125,29 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, return dict(txn) + @defer.inlineCallbacks + def get_max_depth_of(self, event_ids): + """Returns the max depth of a set of event IDs + + Args: + event_ids (list[str]) + + Returns + Deferred[int] + """ + rows = yield self._simple_select_many_batch( + table="events", + column="event_id", + iterable=event_ids, + retcols=("depth",), + desc="get_max_depth_of", + ) + + if not rows: + defer.returnValue(0) + else: + defer.returnValue(max(row["depth"] for row in rows)) + def _get_oldest_events_in_room_txn(self, txn, room_id): return self._simple_select_onecol_txn( txn, From a388d59d44f72db0feecfb811773c847ed4ebe3d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Jan 2019 17:54:02 +0000 Subject: [PATCH 05/20] Newsfile --- changelog.d/4481.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4481.misc diff --git a/changelog.d/4481.misc b/changelog.d/4481.misc new file mode 100644 index 0000000000..43f8963614 --- /dev/null +++ b/changelog.d/4481.misc @@ -0,0 +1 @@ +Add infrastructure to support different event formats From 5180f12bae880f76a483bc945951635c77596499 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 11:18:35 +0000 Subject: [PATCH 06/20] Replace usage of builder.user_id with builder.sender `.user_id` is proxed to `.sender` in FrozenEvent, so this has no functional change --- synapse/api/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 7b213e54c8..1a47fede71 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -577,7 +577,7 @@ class Auth(object): key = (EventTypes.JoinRules, "", ) join_rule_event_id = current_state_ids.get(key) - key = (EventTypes.Member, event.user_id, ) + key = (EventTypes.Member, event.sender, ) member_event_id = current_state_ids.get(key) key = (EventTypes.Create, "", ) From aee39f7de8c79acb780c9903dd17f7d0b6a2b760 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 11:19:50 +0000 Subject: [PATCH 07/20] Fix test to use valid event format --- tests/test_visibility.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 82d63ce00e..455db9f276 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -166,7 +166,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def inject_message(self, user_id, content=None): if content is None: - content = {"body": "testytest"} + content = {"body": "testytest", "msgtype": "m.text"} builder = self.event_builder_factory.new( RoomVersions.V1, { From 6598992b01629062791cd4d98f062aeabd4f7baf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 11:28:02 +0000 Subject: [PATCH 08/20] Fixup calls to `comput_event_signature` We currently pass FrozenEvent instead of `dict` to `compute_event_signature`, which works by accident due to `dict(event)` producing the correct result. This fixes PR #4493 commit 855a151 --- synapse/federation/federation_server.py | 2 +- synapse/handlers/federation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 4aa04b9588..6681614232 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -322,7 +322,7 @@ class FederationServer(FederationBase): if self.hs.is_mine_id(event.event_id): event.signatures.update( compute_event_signature( - event, + event.get_pdu_json(), self.hs.hostname, self.hs.config.signing_key[0] ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index adf59db7a8..fcaf7530b0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1300,7 +1300,7 @@ class FederationHandler(BaseHandler): event.signatures.update( compute_event_signature( - event, + event.get_pdu_json(), self.hs.hostname, self.hs.config.signing_key[0] ) From 0c55b7701ce33ae978c4d8cff4fa95305f689c3b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 11:42:33 +0000 Subject: [PATCH 09/20] Newsfile --- changelog.d/4510.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4510.misc diff --git a/changelog.d/4510.misc b/changelog.d/4510.misc new file mode 100644 index 0000000000..43f8963614 --- /dev/null +++ b/changelog.d/4510.misc @@ -0,0 +1 @@ +Add infrastructure to support different event formats From 64c1bd1d21e94bb08082ac63992697f99192912b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 12:06:28 +0000 Subject: [PATCH 10/20] Remove dead function --- synapse/api/auth.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 1a47fede71..2d78a257d3 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -550,17 +550,6 @@ class Auth(object): """ return self.store.is_server_admin(user) - @defer.inlineCallbacks - def add_auth_events(self, builder, context): - prev_state_ids = yield context.get_prev_state_ids(self.store) - auth_ids = yield self.compute_auth_events(builder, prev_state_ids) - - auth_events_entries = yield self.store.add_event_hashes( - auth_ids - ) - - builder.auth_events = auth_events_entries - @defer.inlineCallbacks def compute_auth_events(self, event, current_state_ids, for_verification=False): if event.type == EventTypes.Create: From 5488cadaae37af4e6d4f847f6eddb38eca6e7c04 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 29 Jan 2019 12:07:00 +0000 Subject: [PATCH 11/20] Enable configuring test log level via env var (#4506) I got fed up with always adding '@unittest.DEBUG' every time I needed to debug a test. --- changelog.d/4506.misc | 1 + tests/test_utils/__init__.py | 18 +++++++++++ tests/test_utils/logging_setup.py | 54 +++++++++++++++++++++++++++++++ tests/unittest.py | 37 ++++----------------- 4 files changed, 80 insertions(+), 30 deletions(-) create mode 100644 changelog.d/4506.misc create mode 100644 tests/test_utils/__init__.py create mode 100644 tests/test_utils/logging_setup.py diff --git a/changelog.d/4506.misc b/changelog.d/4506.misc new file mode 100644 index 0000000000..ea0e7d9580 --- /dev/null +++ b/changelog.d/4506.misc @@ -0,0 +1 @@ +Make it possible to set the log level for tests via an environment variable \ No newline at end of file diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py new file mode 100644 index 0000000000..a7310cf12a --- /dev/null +++ b/tests/test_utils/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Utilities for running the unit tests +""" diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py new file mode 100644 index 0000000000..d0bc8e2112 --- /dev/null +++ b/tests/test_utils/logging_setup.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import os + +import twisted.logger + +from synapse.util.logcontext import LoggingContextFilter + + +class ToTwistedHandler(logging.Handler): + """logging handler which sends the logs to the twisted log""" + tx_log = twisted.logger.Logger() + + def emit(self, record): + log_entry = self.format(record) + log_level = record.levelname.lower().replace('warning', 'warn') + self.tx_log.emit( + twisted.logger.LogLevel.levelWithName(log_level), + log_entry.replace("{", r"(").replace("}", r")"), + ) + + +def setup_logging(): + """Configure the python logging appropriately for the tests. + + (Logs will end up in _trial_temp.) + """ + root_logger = logging.getLogger() + + log_format = ( + "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s" + ) + + handler = ToTwistedHandler() + formatter = logging.Formatter(log_format) + handler.setFormatter(formatter) + handler.addFilter(LoggingContextFilter(request="")) + root_logger.addHandler(handler) + + log_level = os.environ.get("SYNAPSE_TEST_LOG_LEVEL", "ERROR") + root_logger.setLevel(log_level) diff --git a/tests/unittest.py b/tests/unittest.py index cda549c783..fac254ff10 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -31,38 +31,14 @@ from synapse.http.server import JsonResource from synapse.http.site import SynapseRequest from synapse.server import HomeServer from synapse.types import UserID, create_requester -from synapse.util.logcontext import LoggingContext, LoggingContextFilter +from synapse.util.logcontext import LoggingContext from tests.server import get_clock, make_request, render, setup_test_homeserver +from tests.test_utils.logging_setup import setup_logging from tests.utils import default_config, setupdb setupdb() - -# Set up putting Synapse's logs into Trial's. -rootLogger = logging.getLogger() - -log_format = ( - "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s" -) - - -class ToTwistedHandler(logging.Handler): - tx_log = twisted.logger.Logger() - - def emit(self, record): - log_entry = self.format(record) - log_level = record.levelname.lower().replace('warning', 'warn') - self.tx_log.emit( - twisted.logger.LogLevel.levelWithName(log_level), - log_entry.replace("{", r"(").replace("}", r")"), - ) - - -handler = ToTwistedHandler() -formatter = logging.Formatter(log_format) -handler.setFormatter(formatter) -handler.addFilter(LoggingContextFilter(request="")) -rootLogger.addHandler(handler) +setup_logging() def around(target): @@ -96,7 +72,7 @@ class TestCase(unittest.TestCase): method = getattr(self, methodName) - level = getattr(method, "loglevel", getattr(self, "loglevel", logging.WARNING)) + level = getattr(method, "loglevel", getattr(self, "loglevel", None)) @around(self) def setUp(orig): @@ -114,7 +90,7 @@ class TestCase(unittest.TestCase): ) old_level = logging.getLogger().level - if old_level != level: + if level is not None and old_level != level: @around(self) def tearDown(orig): @@ -122,7 +98,8 @@ class TestCase(unittest.TestCase): logging.getLogger().setLevel(old_level) return ret - logging.getLogger().setLevel(level) + logging.getLogger().setLevel(level) + return orig() @around(self) From fb99dae9c8ace0d53ca6c8b041e0f99794c3b100 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 12:08:23 +0000 Subject: [PATCH 12/20] Don't set event_id twice --- synapse/events/builder.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 225b5fd670..fb0683cea8 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -180,8 +180,6 @@ class EventBuilderFactory(object): "No event format defined for version %r" % (room_version,) ) - key_values["event_id"] = _create_event_id(self.clock, self.hostname) - return EventBuilder( store=self.store, state=self.state, From 5891a6edc89039cf4fa76217c3f34384a2aa14eb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 12:09:10 +0000 Subject: [PATCH 13/20] Correctly set context.app_service --- synapse/handlers/message.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index ac6f4fd985..37a7dca794 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -553,6 +553,8 @@ class EventCreationHandler(object): prev_event_ids=[p for p, _ in prev_events], ) context = yield self.state.compute_event_context(event) + if requester: + context.app_service = requester.app_service self.validator.validate_new(event) From 99e36d5e243059a4c92a769a2920714b2d4c84d9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 29 Jan 2019 13:53:02 +0000 Subject: [PATCH 14/20] Implement MSC1708 (.well-known lookups for server routing) (#4489) --- MANIFEST.in | 1 + changelog.d/4408.feature | 1 + changelog.d/4408.misc | 1 - changelog.d/4409.feature | 1 + changelog.d/4409.misc | 1 - changelog.d/4426.feature | 1 + changelog.d/4426.misc | 1 - changelog.d/4427.feature | 1 + changelog.d/4427.misc | 1 - changelog.d/4428.feature | 1 + changelog.d/4428.misc | 1 - changelog.d/4464.feature | 1 + changelog.d/4464.misc | 1 - changelog.d/4468.feature | 1 + changelog.d/4468.misc | 1 - changelog.d/4487.feature | 1 + changelog.d/4487.misc | 1 - changelog.d/4489.feature | 1 + .../federation/matrix_federation_agent.py | 114 ++++++++- tests/http/__init__.py | 42 ++++ .../test_matrix_federation_agent.py | 223 +++++++++++++++++- tests/http/server.pem | 81 +++++++ tests/server.py | 13 +- 23 files changed, 470 insertions(+), 21 deletions(-) create mode 100644 changelog.d/4408.feature delete mode 100644 changelog.d/4408.misc create mode 100644 changelog.d/4409.feature delete mode 100644 changelog.d/4409.misc create mode 100644 changelog.d/4426.feature delete mode 100644 changelog.d/4426.misc create mode 100644 changelog.d/4427.feature delete mode 100644 changelog.d/4427.misc create mode 100644 changelog.d/4428.feature delete mode 100644 changelog.d/4428.misc create mode 100644 changelog.d/4464.feature delete mode 100644 changelog.d/4464.misc create mode 100644 changelog.d/4468.feature delete mode 100644 changelog.d/4468.misc create mode 100644 changelog.d/4487.feature delete mode 100644 changelog.d/4487.misc create mode 100644 changelog.d/4489.feature create mode 100644 tests/http/server.pem diff --git a/MANIFEST.in b/MANIFEST.in index 7e4c031b79..eb2de60f72 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -15,6 +15,7 @@ recursive-include docs * recursive-include scripts * recursive-include scripts-dev * recursive-include synapse *.pyi +recursive-include tests *.pem recursive-include tests *.py recursive-include synapse/res * diff --git a/changelog.d/4408.feature b/changelog.d/4408.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4408.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4408.misc b/changelog.d/4408.misc deleted file mode 100644 index 729bafd62e..0000000000 --- a/changelog.d/4408.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor 'sign_request' as 'build_auth_headers' \ No newline at end of file diff --git a/changelog.d/4409.feature b/changelog.d/4409.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4409.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4409.misc b/changelog.d/4409.misc deleted file mode 100644 index 9cf2adfbb1..0000000000 --- a/changelog.d/4409.misc +++ /dev/null @@ -1 +0,0 @@ -Remove redundant federation connection wrapping code diff --git a/changelog.d/4426.feature b/changelog.d/4426.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4426.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4426.misc b/changelog.d/4426.misc deleted file mode 100644 index cda50438e0..0000000000 --- a/changelog.d/4426.misc +++ /dev/null @@ -1 +0,0 @@ -Remove redundant SynapseKeyClientProtocol magic \ No newline at end of file diff --git a/changelog.d/4427.feature b/changelog.d/4427.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4427.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4427.misc b/changelog.d/4427.misc deleted file mode 100644 index 75500bdbc2..0000000000 --- a/changelog.d/4427.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor and cleanup for SRV record lookup diff --git a/changelog.d/4428.feature b/changelog.d/4428.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4428.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4428.misc b/changelog.d/4428.misc deleted file mode 100644 index 9a51434755..0000000000 --- a/changelog.d/4428.misc +++ /dev/null @@ -1 +0,0 @@ -Move SRV logic into the Agent layer diff --git a/changelog.d/4464.feature b/changelog.d/4464.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4464.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4464.misc b/changelog.d/4464.misc deleted file mode 100644 index 9a51434755..0000000000 --- a/changelog.d/4464.misc +++ /dev/null @@ -1 +0,0 @@ -Move SRV logic into the Agent layer diff --git a/changelog.d/4468.feature b/changelog.d/4468.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4468.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4468.misc b/changelog.d/4468.misc deleted file mode 100644 index 9a51434755..0000000000 --- a/changelog.d/4468.misc +++ /dev/null @@ -1 +0,0 @@ -Move SRV logic into the Agent layer diff --git a/changelog.d/4487.feature b/changelog.d/4487.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4487.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/changelog.d/4487.misc b/changelog.d/4487.misc deleted file mode 100644 index 79de8eb3ad..0000000000 --- a/changelog.d/4487.misc +++ /dev/null @@ -1 +0,0 @@ -Fix idna and ipv6 literal handling in MatrixFederationAgent diff --git a/changelog.d/4489.feature b/changelog.d/4489.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4489.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 4a6f634c8b..07c72c9351 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -12,6 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import cgi +import json import logging import attr @@ -20,7 +22,7 @@ from zope.interface import implementer from twisted.internet import defer from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS -from twisted.web.client import URI, Agent, HTTPConnectionPool +from twisted.web.client import URI, Agent, HTTPConnectionPool, readBody from twisted.web.http_headers import Headers from twisted.web.iweb import IAgent @@ -43,13 +45,19 @@ class MatrixFederationAgent(object): tls_client_options_factory (ClientTLSOptionsFactory|None): factory to use for fetching client tls options, or none to disable TLS. + _well_known_tls_policy (IPolicyForHTTPS|None): + TLS policy to use for fetching .well-known files. None to use a default + (browser-like) implementation. + srv_resolver (SrvResolver|None): SRVResolver impl to use for looking up SRV records. None to use a default implementation. """ def __init__( - self, reactor, tls_client_options_factory, _srv_resolver=None, + self, reactor, tls_client_options_factory, + _well_known_tls_policy=None, + _srv_resolver=None, ): self._reactor = reactor self._tls_client_options_factory = tls_client_options_factory @@ -62,6 +70,14 @@ class MatrixFederationAgent(object): self._pool.maxPersistentPerHost = 5 self._pool.cachedConnectionTimeout = 2 * 60 + agent_args = {} + if _well_known_tls_policy is not None: + # the param is called 'contextFactory', but actually passing a + # contextfactory is deprecated, and it expects an IPolicyForHTTPS. + agent_args['contextFactory'] = _well_known_tls_policy + _well_known_agent = Agent(self._reactor, pool=self._pool, **agent_args) + self._well_known_agent = _well_known_agent + @defer.inlineCallbacks def request(self, method, uri, headers=None, bodyProducer=None): """ @@ -114,7 +130,11 @@ class MatrixFederationAgent(object): class EndpointFactory(object): @staticmethod def endpointForURI(_uri): - logger.info("Connecting to %s:%s", res.target_host, res.target_port) + logger.info( + "Connecting to %s:%i", + res.target_host.decode("ascii"), + res.target_port, + ) ep = HostnameEndpoint(self._reactor, res.target_host, res.target_port) if tls_options is not None: ep = wrapClientTLS(tls_options, ep) @@ -127,7 +147,7 @@ class MatrixFederationAgent(object): defer.returnValue(res) @defer.inlineCallbacks - def _route_matrix_uri(self, parsed_uri): + def _route_matrix_uri(self, parsed_uri, lookup_well_known=True): """Helper for `request`: determine the routing for a Matrix URI Args: @@ -135,6 +155,9 @@ class MatrixFederationAgent(object): parsed with URI.fromBytes(uri, defaultPort=-1) to set the `port` to -1 if there is no explicit port given. + lookup_well_known (bool): True if we should look up the .well-known file if + there is no SRV record. + Returns: Deferred[_RoutingResult] """ @@ -169,6 +192,42 @@ class MatrixFederationAgent(object): service_name = b"_matrix._tcp.%s" % (parsed_uri.host,) server_list = yield self._srv_resolver.resolve_service(service_name) + if not server_list and lookup_well_known: + # try a .well-known lookup + well_known_server = yield self._get_well_known(parsed_uri.host) + + if well_known_server: + # if we found a .well-known, start again, but don't do another + # .well-known lookup. + + # parse the server name in the .well-known response into host/port. + # (This code is lifted from twisted.web.client.URI.fromBytes). + if b':' in well_known_server: + well_known_host, well_known_port = well_known_server.rsplit(b':', 1) + try: + well_known_port = int(well_known_port) + except ValueError: + # the part after the colon could not be parsed as an int + # - we assume it is an IPv6 literal with no port (the closing + # ']' stops it being parsed as an int) + well_known_host, well_known_port = well_known_server, -1 + else: + well_known_host, well_known_port = well_known_server, -1 + + new_uri = URI( + scheme=parsed_uri.scheme, + netloc=well_known_server, + host=well_known_host, + port=well_known_port, + path=parsed_uri.path, + params=parsed_uri.params, + query=parsed_uri.query, + fragment=parsed_uri.fragment, + ) + + res = yield self._route_matrix_uri(new_uri, lookup_well_known=False) + defer.returnValue(res) + if not server_list: target_host = parsed_uri.host port = 8448 @@ -190,6 +249,53 @@ class MatrixFederationAgent(object): target_port=port, )) + @defer.inlineCallbacks + def _get_well_known(self, server_name): + """Attempt to fetch and parse a .well-known file for the given server + + Args: + server_name (bytes): name of the server, from the requested url + + Returns: + Deferred[bytes|None]: either the new server name, from the .well-known, or + None if there was no .well-known file. + """ + # FIXME: add a cache + + uri = b"https://%s/.well-known/matrix/server" % (server_name, ) + logger.info("Fetching %s", uri.decode("ascii")) + try: + response = yield make_deferred_yieldable( + self._well_known_agent.request(b"GET", uri), + ) + except Exception as e: + logger.info( + "Connection error fetching %s: %s", + uri.decode("ascii"), e, + ) + defer.returnValue(None) + + body = yield make_deferred_yieldable(readBody(response)) + + if response.code != 200: + logger.info( + "Error response %i from %s: %s", + response.code, uri.decode("ascii"), body, + ) + defer.returnValue(None) + + content_types = response.headers.getRawHeaders(u'content-type') + if content_types is None: + raise Exception("no content-type header on .well-known response") + content_type, _opts = cgi.parse_header(content_types[-1]) + if content_type != 'application/json': + raise Exception("content-type not application/json on .well-known response") + parsed_body = json.loads(body.decode('utf-8')) + logger.info("Response from .well-known: %s", parsed_body) + if not isinstance(parsed_body, dict) or "m.server" not in parsed_body: + raise Exception("invalid .well-known response") + defer.returnValue(parsed_body["m.server"].encode("ascii")) + @attr.s class _RoutingResult(object): diff --git a/tests/http/__init__.py b/tests/http/__init__.py index e69de29bb2..ee8010f598 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os.path + +from OpenSSL import SSL + + +def get_test_cert_file(): + """get the path to the test cert""" + + # the cert file itself is made with: + # + # openssl req -x509 -newkey rsa:4096 -keyout server.pem -out server.pem -days 36500 \ + # -nodes -subj '/CN=testserv' + return os.path.join( + os.path.dirname(__file__), + 'server.pem', + ) + + +class ServerTLSContext(object): + """A TLS Context which presents our test cert.""" + def __init__(self): + self.filename = get_test_cert_file() + + def getContext(self): + ctx = SSL.Context(SSL.TLSv1_METHOD) + ctx.use_certificate_file(self.filename) + ctx.use_privatekey_file(self.filename) + return ctx diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 53b52ace59..bd80dd0cb6 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -17,18 +17,21 @@ import logging from mock import Mock import treq +from zope.interface import implementer from twisted.internet import defer +from twisted.internet._sslverify import ClientTLSOptions, OpenSSLCertificateOptions from twisted.internet.protocol import Factory from twisted.protocols.tls import TLSMemoryBIOFactory -from twisted.test.ssl_helpers import ServerTLSContext from twisted.web.http import HTTPChannel +from twisted.web.iweb import IPolicyForHTTPS from synapse.crypto.context_factory import ClientTLSOptionsFactory from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent from synapse.http.federation.srv_resolver import Server from synapse.util.logcontext import LoggingContext +from tests.http import ServerTLSContext from tests.server import FakeTransport, ThreadedMemoryReactorClock from tests.unittest import TestCase @@ -44,6 +47,7 @@ class MatrixFederationAgentTests(TestCase): self.agent = MatrixFederationAgent( reactor=self.reactor, tls_client_options_factory=ClientTLSOptionsFactory(None), + _well_known_tls_policy=TrustingTLSPolicyForHTTPS(), _srv_resolver=self.mock_resolver, ) @@ -65,10 +69,14 @@ class MatrixFederationAgentTests(TestCase): # Normally this would be done by the TCP socket code in Twisted, but we are # stubbing that out here. client_protocol = client_factory.buildProtocol(None) - client_protocol.makeConnection(FakeTransport(server_tls_protocol, self.reactor)) + client_protocol.makeConnection( + FakeTransport(server_tls_protocol, self.reactor, client_protocol), + ) # tell the server tls protocol to send its stuff back to the client, too - server_tls_protocol.makeConnection(FakeTransport(client_protocol, self.reactor)) + server_tls_protocol.makeConnection( + FakeTransport(client_protocol, self.reactor, server_tls_protocol), + ) # give the reactor a pump to get the TLS juices flowing. self.reactor.pump((0.1,)) @@ -101,9 +109,49 @@ class MatrixFederationAgentTests(TestCase): try: fetch_res = yield fetch_d defer.returnValue(fetch_res) + except Exception as e: + logger.info("Fetch of %s failed: %s", uri.decode("ascii"), e) + raise finally: _check_logcontext(context) + def _handle_well_known_connection(self, client_factory, expected_sni, target_server): + """Handle an outgoing HTTPs connection: wire it up to a server, check that the + request is for a .well-known, and send the response. + + Args: + client_factory (IProtocolFactory): outgoing connection + expected_sni (bytes): SNI that we expect the outgoing connection to send + target_server (bytes): target server that we should redirect to in the + .well-known response. + """ + # make the connection for .well-known + well_known_server = self._make_connection( + client_factory, + expected_sni=expected_sni, + ) + # check the .well-known request and send a response + self.assertEqual(len(well_known_server.requests), 1) + request = well_known_server.requests[0] + self._send_well_known_response(request, target_server) + + def _send_well_known_response(self, request, target_server): + """Check that an incoming request looks like a valid .well-known request, and + send back the response. + """ + self.assertEqual(request.method, b'GET') + self.assertEqual(request.path, b'/.well-known/matrix/server') + self.assertEqual( + request.requestHeaders.getRawHeaders(b'host'), + [b'testserv'], + ) + # send back a response + request.responseHeaders.setRawHeaders(b'Content-Type', [b'application/json']) + request.write(b'{ "m.server": "%s" }' % (target_server,)) + request.finish() + + self.reactor.pump((0.1, )) + def test_get(self): """ happy-path test of a GET request with an explicit port @@ -283,9 +331,9 @@ class MatrixFederationAgentTests(TestCase): self.reactor.pump((0.1,)) self.successResultOf(test_d) - def test_get_hostname_no_srv(self): + def test_get_no_srv_no_well_known(self): """ - Test the behaviour when the server name has no port, and no SRV record + Test the behaviour when the server name has no port, no SRV, and no well-known """ self.mock_resolver.resolve_service.side_effect = lambda _: [] @@ -300,11 +348,24 @@ class MatrixFederationAgentTests(TestCase): b"_matrix._tcp.testserv", ) - # Make sure treq is trying to connect + # there should be an attempt to connect on port 443 for the .well-known clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 443) + + # fonx the connection + client_factory.clientConnectionFailed(None, Exception("nope")) + + # attemptdelay on the hostnameendpoint is 0.3, so takes that long before the + # .well-known request fails. + self.reactor.pump((0.4,)) + + # we should fall back to a direct connection + self.assertEqual(len(clients), 2) + (host, port, client_factory, _timeout, _bindAddress) = clients[1] + self.assertEqual(host, '1.2.3.4') self.assertEqual(port, 8448) # make a test server, and wire up the client @@ -327,6 +388,67 @@ class MatrixFederationAgentTests(TestCase): self.reactor.pump((0.1,)) self.successResultOf(test_d) + def test_get_well_known(self): + """Test the behaviour when the server name has no port and no SRV record, but + the .well-known redirects elsewhere + """ + + self.mock_resolver.resolve_service.side_effect = lambda _: [] + self.reactor.lookups["testserv"] = "1.2.3.4" + self.reactor.lookups["target-server"] = "1::f" + + test_d = self._make_get_request(b"matrix://testserv/foo/bar") + + # Nothing happened yet + self.assertNoResult(test_d) + + self.mock_resolver.resolve_service.assert_called_once_with( + b"_matrix._tcp.testserv", + ) + self.mock_resolver.resolve_service.reset_mock() + + # there should be an attempt to connect on port 443 for the .well-known + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, client_factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 443) + + self._handle_well_known_connection( + client_factory, expected_sni=b"testserv", target_server=b"target-server", + ) + + # there should be another SRV lookup + self.mock_resolver.resolve_service.assert_called_once_with( + b"_matrix._tcp.target-server", + ) + + # now we should get a connection to the target server + self.assertEqual(len(clients), 2) + (host, port, client_factory, _timeout, _bindAddress) = clients[1] + self.assertEqual(host, '1::f') + self.assertEqual(port, 8448) + + # make a test server, and wire up the client + http_server = self._make_connection( + client_factory, + expected_sni=b'target-server', + ) + + self.assertEqual(len(http_server.requests), 1) + request = http_server.requests[0] + self.assertEqual(request.method, b'GET') + self.assertEqual(request.path, b'/foo/bar') + self.assertEqual( + request.requestHeaders.getRawHeaders(b'host'), + [b'target-server'], + ) + + # finish the request + request.finish() + self.reactor.pump((0.1,)) + self.successResultOf(test_d) + def test_get_hostname_srv(self): """ Test the behaviour when there is a single SRV record @@ -372,6 +494,71 @@ class MatrixFederationAgentTests(TestCase): self.reactor.pump((0.1,)) self.successResultOf(test_d) + def test_get_well_known_srv(self): + """Test the behaviour when the server name has no port and no SRV record, but + the .well-known redirects to a place where there is a SRV. + """ + + self.mock_resolver.resolve_service.side_effect = lambda _: [] + self.reactor.lookups["testserv"] = "1.2.3.4" + self.reactor.lookups["srvtarget"] = "5.6.7.8" + + test_d = self._make_get_request(b"matrix://testserv/foo/bar") + + # Nothing happened yet + self.assertNoResult(test_d) + + self.mock_resolver.resolve_service.assert_called_once_with( + b"_matrix._tcp.testserv", + ) + self.mock_resolver.resolve_service.reset_mock() + + # there should be an attempt to connect on port 443 for the .well-known + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, client_factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 443) + + self.mock_resolver.resolve_service.side_effect = lambda _: [ + Server(host=b"srvtarget", port=8443), + ] + + self._handle_well_known_connection( + client_factory, expected_sni=b"testserv", target_server=b"target-server", + ) + + # there should be another SRV lookup + self.mock_resolver.resolve_service.assert_called_once_with( + b"_matrix._tcp.target-server", + ) + + # now we should get a connection to the target of the SRV record + self.assertEqual(len(clients), 2) + (host, port, client_factory, _timeout, _bindAddress) = clients[1] + self.assertEqual(host, '5.6.7.8') + self.assertEqual(port, 8443) + + # make a test server, and wire up the client + http_server = self._make_connection( + client_factory, + expected_sni=b'target-server', + ) + + self.assertEqual(len(http_server.requests), 1) + request = http_server.requests[0] + self.assertEqual(request.method, b'GET') + self.assertEqual(request.path, b'/foo/bar') + self.assertEqual( + request.requestHeaders.getRawHeaders(b'host'), + [b'target-server'], + ) + + # finish the request + request.finish() + self.reactor.pump((0.1,)) + self.successResultOf(test_d) + def test_idna_servername(self): """test the behaviour when the server name has idna chars in""" @@ -390,11 +577,25 @@ class MatrixFederationAgentTests(TestCase): b"_matrix._tcp.xn--bcher-kva.com", ) - # Make sure treq is trying to connect + # there should be an attempt to connect on port 443 for the .well-known clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, '1.2.3.4') + self.assertEqual(port, 443) + + # fonx the connection + client_factory.clientConnectionFailed(None, Exception("nope")) + + # attemptdelay on the hostnameendpoint is 0.3, so takes that long before the + # .well-known request fails. + self.reactor.pump((0.4,)) + + # We should fall back to port 8448 + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 2) + (host, port, client_factory, _timeout, _bindAddress) = clients[1] + self.assertEqual(host, '1.2.3.4') self.assertEqual(port, 8448) # make a test server, and wire up the client @@ -492,3 +693,11 @@ def _build_test_server(): def _log_request(request): """Implements Factory.log, which is expected by Request.finish""" logger.info("Completed request %s", request) + + +@implementer(IPolicyForHTTPS) +class TrustingTLSPolicyForHTTPS(object): + """An IPolicyForHTTPS which doesn't do any certificate verification""" + def creatorForNetloc(self, hostname, port): + certificateOptions = OpenSSLCertificateOptions() + return ClientTLSOptions(hostname, certificateOptions.getContext()) diff --git a/tests/http/server.pem b/tests/http/server.pem new file mode 100644 index 0000000000..0584cf1a80 --- /dev/null +++ b/tests/http/server.pem @@ -0,0 +1,81 @@ +-----BEGIN PRIVATE KEY----- +MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCgF43/3lAgJ+p0 +x7Rn8UcL8a4fctvdkikvZrCngw96LkB34Evfq8YGWlOVjU+f9naUJLAKMatmAfEN +r+rMX4VOXmpTwuu6iLtqwreUrRFMESyrmvQxa15p+y85gkY0CFmXMblv6ORbxHTG +ncBGwST4WK4Poewcgt6jcISFCESTUKu1zc3cw1ANIDRyDLB5K44KwIe36dcKckyN +Kdtv4BJ+3fcIZIkPJH62zqCypgFF1oiFt40uJzClxgHdJZlKYpgkfnDTckw4Y/Mx +9k8BbE310KAzUNMV9H7I1eEolzrNr66FQj1eN64X/dqO8lTbwCqAd4diCT4sIUk0 +0SVsAUjNd3g8j651hx+Qb1t8fuOjrny8dmeMxtUgIBHoQcpcj76R55Fs7KZ9uar0 +8OFTyGIze51W1jG2K/7/5M1zxIqrA+7lsXu5OR81s7I+Ng/UUAhiHA/z+42/aiNa +qEuk6tqj3rHfLctnCbtZ+JrRNqSSwEi8F0lMA021ivEd2eJV+284OyJjhXOmKHrX +QADHrmS7Sh4syTZvRNm9n+qWID0KdDr2Sji/KnS3Enp44HDQ4xriT6/xhwEGsyuX +oH5aAkdLznulbWkHBbyx1SUQSTLpOqzaioF9m1vRrLsFvrkrY3D253mPJ5eU9HM/ +dilduFcUgj4rz+6cdXUAh+KK/v95zwIDAQABAoICAFG5tJPaOa0ws0/KYx5s3YgL +aIhFalhCNSQtmCDrlwsYcXDA3/rfBchYdDL0YKGYgBBAal3J3WXFt/j0xThvyu2m +5UC9UPl4s7RckrsjXqEmY1d3UxGnbhtMT19cUdpeKN42VCP9EBaIw9Rg07dLAkSF +gNYaIx6q8F0fI4eGIPvTQtUcqur4CfWpaxyNvckdovV6M85/YXfDwbCOnacPDGIX +jfSK3i0MxGMuOHr6o8uzKR6aBUh6WStHWcw7VXXTvzdiFNbckmx3Gb93rf1b/LBw +QFfx+tBKcC62gKroCOzXso/0sL9YTVeSD/DJZOiJwSiz3Dj/3u1IUMbVvfTU8wSi +CYS7Z+jHxwSOCSSNTXm1wO/MtDsNKbI1+R0cohr/J9pOMQvrVh1+2zSDOFvXAQ1S +yvjn+uqdmijRoV2VEGVHd+34C+ci7eJGAhL/f92PohuuFR2shUETgGWzpACZSJwg +j1d90Hs81hj07vWRb+xCeDh00vimQngz9AD8vYvv/S4mqRGQ6TZdfjLoUwSTg0JD +6sQgRXX026gQhLhn687vLKZfHwzQPZkpQdxOR0dTZ/ho/RyGGRJXH4kN4cA2tPr+ +AKYQ29YXGlEzGG7OqikaZcprNWG6UFgEpuXyBxCgp9r4ladZo3J+1Rhgus8ZYatd +uO98q3WEBmP6CZ2n32mBAoIBAQDS/c/ybFTos0YpGHakwdmSfj5OOQJto2y8ywfG +qDHwO0ebcpNnS1+MA+7XbKUQb/3Iq7iJljkkzJG2DIJ6rpKynYts1ViYpM7M/t0T +W3V1gvUcUL62iqkgws4pnpWmubFkqV31cPSHcfIIclnzeQ1aOEGsGHNAvhty0ciC +DnkJACbqApvopFLOR5f6UFTtKExE+hDH0WqgpsCAKJ1L4g6pBzZatI32/CN9JEVU +tDbxLV75hHlFFjUrG7nT1rPyr/gI8Ceh9/2xeXPfjJUR0PrG3U1nwLqUCZkvFzO6 +XpN2+A+/v4v5xqMjKDKDFy1oq6SCMomwv/viw6wl/84TMbolAoIBAQDCPiMecnR8 +REik6tqVzQO/uSe9ZHjz6J15t5xdwaI6HpSwLlIkQPkLTjyXtFpemK5DOYRxrJvQ +remfrZrN2qtLlb/DKpuGPWRsPOvWCrSuNEp48ivUehtclljrzxAFfy0sM+fWeJ48 +nTnR+td9KNhjNtZixzWdAy/mE+jdaMsXVnk66L73Uz+2WsnvVMW2R6cpCR0F2eP/ +B4zDWRqlT2w47sePAB81mFYSQLvPC6Xcgg1OqMubfiizJI49c8DO6Jt+FFYdsxhd +kG52Eqa/Net6rN3ueiS6yXL5TU3Y6g96bPA2KyNCypucGcddcBfqaiVx/o4AH6yT +NrdsrYtyvk/jAoIBAQDHUwKVeeRJJbvdbQAArCV4MI155n+1xhMe1AuXkCQFWGtQ +nlBE4D72jmyf1UKnIbW2Uwv15xY6/ouVWYIWlj9+QDmMaozVP7Uiko+WDuwLRNl8 +k4dn+dzHV2HejbPBG2JLv3lFOx23q1zEwArcaXrExaq9Ayg2fKJ/uVHcFAIiD6Oz +pR1XDY4w1A/uaN+iYFSVQUyDCQLbnEz1hej73CaPZoHh9Pq83vxD5/UbjVjuRTeZ +L55FNzKpc/r89rNvTPBcuUwnxplDhYKDKVNWzn9rSXwrzTY2Tk8J3rh+k4RqevSd +6D47jH1n5Dy7/TRn0ueKHGZZtTUnyEUkbOJo3ayFAoIBAHKDyZaQqaX9Z8p6fwWj +yVsFoK0ih8BcWkLBAdmwZ6DWGJjJpjmjaG/G3ygc9s4gO1R8m12dAnuDnGE8KzDD +gwtbrKM2Alyg4wyA2hTlWOH/CAzH0RlCJ9Fs/d1/xJVJBeuyajLiB3/6vXTS6qnq +I7BSSxAPG8eGcn21LSsjNeB7ZZtaTgNnu/8ZBUYo9yrgkWc67TZe3/ChldYxOOlO +qqHh/BqNWtjxB4VZTp/g4RbgQVInZ2ozdXEv0v/dt0UEk29ANAjsZif7F3RayJ2f +/0TilzCaJ/9K9pKNhaClVRy7Dt8QjYg6BIWCGSw4ApF7pLnQ9gySn95mersCkVzD +YDsCggEAb0E/TORjQhKfNQvahyLfQFm151e+HIoqBqa4WFyfFxe/IJUaLH/JSSFw +VohbQqPdCmaAeuQ8ERL564DdkcY5BgKcax79fLLCOYP5bT11aQx6uFpfl2Dcm6Z9 +QdCRI4jzPftsd5fxLNH1XtGyC4t6vTic4Pji2O71WgWzx0j5v4aeDY4sZQeFxqCV +/q7Ee8hem1Rn5RFHu14FV45RS4LAWl6wvf5pQtneSKzx8YL0GZIRRytOzdEfnGKr +FeUlAj5uL+5/p0ZEgM7gPsEBwdm8scF79qSUn8UWSoXNeIauF9D4BDg8RZcFFxka +KILVFsq3cQC+bEnoM4eVbjEQkGs1RQ== +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIIE/jCCAuagAwIBAgIJANFtVaGvJWZlMA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV +BAMMCHRlc3RzZXJ2MCAXDTE5MDEyNzIyMDIzNloYDzIxMTkwMTAzMjIwMjM2WjAT +MREwDwYDVQQDDAh0ZXN0c2VydjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBAKAXjf/eUCAn6nTHtGfxRwvxrh9y292SKS9msKeDD3ouQHfgS9+rxgZaU5WN +T5/2dpQksAoxq2YB8Q2v6sxfhU5ealPC67qIu2rCt5StEUwRLKua9DFrXmn7LzmC +RjQIWZcxuW/o5FvEdMadwEbBJPhYrg+h7ByC3qNwhIUIRJNQq7XNzdzDUA0gNHIM +sHkrjgrAh7fp1wpyTI0p22/gEn7d9whkiQ8kfrbOoLKmAUXWiIW3jS4nMKXGAd0l +mUpimCR+cNNyTDhj8zH2TwFsTfXQoDNQ0xX0fsjV4SiXOs2vroVCPV43rhf92o7y +VNvAKoB3h2IJPiwhSTTRJWwBSM13eDyPrnWHH5BvW3x+46OufLx2Z4zG1SAgEehB +ylyPvpHnkWzspn25qvTw4VPIYjN7nVbWMbYr/v/kzXPEiqsD7uWxe7k5HzWzsj42 +D9RQCGIcD/P7jb9qI1qoS6Tq2qPesd8ty2cJu1n4mtE2pJLASLwXSUwDTbWK8R3Z +4lX7bzg7ImOFc6YoetdAAMeuZLtKHizJNm9E2b2f6pYgPQp0OvZKOL8qdLcSenjg +cNDjGuJPr/GHAQazK5egfloCR0vOe6VtaQcFvLHVJRBJMuk6rNqKgX2bW9GsuwW+ +uStjcPbneY8nl5T0cz92KV24VxSCPivP7px1dQCH4or+/3nPAgMBAAGjUzBRMB0G +A1UdDgQWBBQcQZpzLzTk5KdS/Iz7sGCV7gTd/zAfBgNVHSMEGDAWgBQcQZpzLzTk +5KdS/Iz7sGCV7gTd/zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IC +AQAr/Pgha57jqYsDDX1LyRrVdqoVBpLBeB7x/p9dKYm7S6tBTDFNMZ0SZyQP8VEG +7UoC9/OQ9nCdEMoR7ZKpQsmipwcIqpXHS6l4YOkf5EEq5jpMgvlEesHmBJJeJew/ +FEPDl1bl8d0tSrmWaL3qepmwzA+2lwAAouWk2n+rLiP8CZ3jZeoTXFqYYrUlEqO9 +fHMvuWqTV4KCSyNY+GWCrnHetulgKHlg+W2J1mZnrCKcBhWf9C2DesTJO+JldIeM +ornTFquSt21hZi+k3aySuMn2N3MWiNL8XsZVsAnPSs0zA+2fxjJkShls8Gc7cCvd +a6XrNC+PY6pONguo7rEU4HiwbvnawSTngFFglmH/ImdA/HkaAekW6o82aI8/UxFx +V9fFMO3iKDQdOrg77hI1bx9RlzKNZZinE2/Pu26fWd5d2zqDWCjl8ykGQRAfXgYN +H3BjgyXLl+ao5/pOUYYtzm3ruTXTgRcy5hhL6hVTYhSrf9vYh4LNIeXNKnZ78tyG +TX77/kU2qXhBGCFEUUMqUNV/+ITir2lmoxVjknt19M07aGr8C7SgYt6Rs+qDpMiy +JurgvRh8LpVq4pHx1efxzxCFmo58DMrG40I0+CF3y/niNpOb1gp2wAqByRiORkds +f0ytW6qZ0TpHbD6gOtQLYDnhx3ISuX+QYSekVwQUpffeWQ== +-----END CERTIFICATE----- diff --git a/tests/server.py b/tests/server.py index 6adcc73f91..3d7ae9875c 100644 --- a/tests/server.py +++ b/tests/server.py @@ -354,6 +354,11 @@ class FakeTransport(object): :type: twisted.internet.interfaces.IReactorTime """ + _protocol = attr.ib(default=None) + """The Protocol which is producing data for this transport. Optional, but if set + will get called back for connectionLost() notifications etc. + """ + disconnecting = False buffer = attr.ib(default=b'') producer = attr.ib(default=None) @@ -364,8 +369,12 @@ class FakeTransport(object): def getHost(self): return None - def loseConnection(self): - self.disconnecting = True + def loseConnection(self, reason=None): + logger.info("FakeTransport: loseConnection(%s)", reason) + if not self.disconnecting: + self.disconnecting = True + if self._protocol: + self._protocol.connectionLost(reason) def abortConnection(self): self.disconnecting = True From 6bd43746367028849b0942bf7f8050a85175dffc Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 29 Jan 2019 14:09:10 +0000 Subject: [PATCH 15/20] Do not generate self-signed TLS certificates by default. (#4509) --- changelog.d/4509.removal | 1 + synapse/config/tls.py | 61 +++++++++------------------- tests/config/test_generate.py | 2 - tests/config/test_tls.py | 75 +++++++++++++++++++++++++++++++++++ 4 files changed, 94 insertions(+), 45 deletions(-) create mode 100644 changelog.d/4509.removal create mode 100644 tests/config/test_tls.py diff --git a/changelog.d/4509.removal b/changelog.d/4509.removal new file mode 100644 index 0000000000..9165009813 --- /dev/null +++ b/changelog.d/4509.removal @@ -0,0 +1 @@ +Synapse no longer generates self-signed TLS certificates when generating a configuration file. diff --git a/synapse/config/tls.py b/synapse/config/tls.py index a75e233aa0..734f612db7 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -15,6 +15,7 @@ import logging import os +import warnings from datetime import datetime from hashlib import sha256 @@ -39,8 +40,8 @@ class TlsConfig(Config): self.acme_bind_addresses = acme_config.get("bind_addresses", ["127.0.0.1"]) self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) - self.tls_certificate_file = os.path.abspath(config.get("tls_certificate_path")) - self.tls_private_key_file = os.path.abspath(config.get("tls_private_key_path")) + self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) + self.tls_private_key_file = self.abspath(config.get("tls_private_key_path")) self._original_tls_fingerprints = config["tls_fingerprints"] self.tls_fingerprints = list(self._original_tls_fingerprints) self.no_tls = config.get("no_tls", False) @@ -94,6 +95,16 @@ class TlsConfig(Config): """ self.tls_certificate = self.read_tls_certificate(self.tls_certificate_file) + # Check if it is self-signed, and issue a warning if so. + if self.tls_certificate.get_issuer() == self.tls_certificate.get_subject(): + warnings.warn( + ( + "Self-signed TLS certificates will not be accepted by Synapse 1.0. " + "Please either provide a valid certificate, or use Synapse's ACME " + "support to provision one." + ) + ) + if not self.no_tls: self.tls_private_key = self.read_tls_private_key(self.tls_private_key_file) @@ -118,10 +129,11 @@ class TlsConfig(Config): return ( """\ # PEM encoded X509 certificate for TLS. - # You can replace the self-signed certificate that synapse - # autogenerates on launch with your own SSL certificate + key pair - # if you like. Any required intermediary certificates can be - # appended after the primary certificate in hierarchical order. + # This certificate, as of Synapse 1.0, will need to be a valid + # and verifiable certificate, with a root that is available in + # the root store of other servers you wish to federate to. Any + # required intermediary certificates can be appended after the + # primary certificate in hierarchical order. tls_certificate_path: "%(tls_certificate_path)s" # PEM encoded private key for TLS @@ -183,40 +195,3 @@ class TlsConfig(Config): def read_tls_private_key(self, private_key_path): private_key_pem = self.read_file(private_key_path, "tls_private_key") return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem) - - def generate_files(self, config): - tls_certificate_path = config["tls_certificate_path"] - tls_private_key_path = config["tls_private_key_path"] - - if not self.path_exists(tls_private_key_path): - with open(tls_private_key_path, "wb") as private_key_file: - tls_private_key = crypto.PKey() - tls_private_key.generate_key(crypto.TYPE_RSA, 2048) - private_key_pem = crypto.dump_privatekey( - crypto.FILETYPE_PEM, tls_private_key - ) - private_key_file.write(private_key_pem) - else: - with open(tls_private_key_path) as private_key_file: - private_key_pem = private_key_file.read() - tls_private_key = crypto.load_privatekey( - crypto.FILETYPE_PEM, private_key_pem - ) - - if not self.path_exists(tls_certificate_path): - with open(tls_certificate_path, "wb") as certificate_file: - cert = crypto.X509() - subject = cert.get_subject() - subject.CN = config["server_name"] - - cert.set_serial_number(1000) - cert.gmtime_adj_notBefore(0) - cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60) - cert.set_issuer(cert.get_subject()) - cert.set_pubkey(tls_private_key) - - cert.sign(tls_private_key, 'sha256') - - cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) - - certificate_file.write(cert_pem) diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index b5ad99348d..795b4c298d 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -50,8 +50,6 @@ class ConfigGenerationTestCase(unittest.TestCase): "homeserver.yaml", "lemurs.win.log.config", "lemurs.win.signing.key", - "lemurs.win.tls.crt", - "lemurs.win.tls.key", ] ), set(os.listdir(self.dir)), diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py new file mode 100644 index 0000000000..4ccaf35603 --- /dev/null +++ b/tests/config/test_tls.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from synapse.config.tls import TlsConfig + +from tests.unittest import TestCase + + +class TLSConfigTests(TestCase): + + def test_warn_self_signed(self): + """ + Synapse will give a warning when it loads a self-signed certificate. + """ + config_dir = self.mktemp() + os.mkdir(config_dir) + with open(os.path.join(config_dir, "cert.pem"), 'w') as f: + f.write("""-----BEGIN CERTIFICATE----- +MIID6DCCAtACAws9CjANBgkqhkiG9w0BAQUFADCBtzELMAkGA1UEBhMCVFIxDzAN +BgNVBAgMBsOHb3J1bTEUMBIGA1UEBwwLQmHFn21ha8OnxLExEjAQBgNVBAMMCWxv +Y2FsaG9zdDEcMBoGA1UECgwTVHdpc3RlZCBNYXRyaXggTGFiczEkMCIGA1UECwwb +QXV0b21hdGVkIFRlc3RpbmcgQXV0aG9yaXR5MSkwJwYJKoZIhvcNAQkBFhpzZWN1 +cml0eUB0d2lzdGVkbWF0cml4LmNvbTAgFw0xNzA3MTIxNDAxNTNaGA8yMTE3MDYx +ODE0MDE1M1owgbcxCzAJBgNVBAYTAlRSMQ8wDQYDVQQIDAbDh29ydW0xFDASBgNV +BAcMC0JhxZ9tYWvDp8SxMRIwEAYDVQQDDAlsb2NhbGhvc3QxHDAaBgNVBAoME1R3 +aXN0ZWQgTWF0cml4IExhYnMxJDAiBgNVBAsMG0F1dG9tYXRlZCBUZXN0aW5nIEF1 +dGhvcml0eTEpMCcGCSqGSIb3DQEJARYac2VjdXJpdHlAdHdpc3RlZG1hdHJpeC5j +b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDwT6kbqtMUI0sMkx4h +I+L780dA59KfksZCqJGmOsMD6hte9EguasfkZzvCF3dk3NhwCjFSOvKx6rCwiteo +WtYkVfo+rSuVNmt7bEsOUDtuTcaxTzIFB+yHOYwAaoz3zQkyVW0c4pzioiLCGCmf +FLdiDBQGGp74tb+7a0V6kC3vMLFoM3L6QWq5uYRB5+xLzlPJ734ltyvfZHL3Us6p +cUbK+3WTWvb4ER0W2RqArAj6Bc/ERQKIAPFEiZi9bIYTwvBH27OKHRz+KoY/G8zY ++l+WZoJqDhupRAQAuh7O7V/y6bSP+KNxJRie9QkZvw1PSaGSXtGJI3WWdO12/Ulg +epJpAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAJXEq5P9xwvP9aDkXIqzcD0L8sf8 +ewlhlxTQdeqt2Nace0Yk18lIo2oj1t86Y8jNbpAnZJeI813Rr5M7FbHCXoRc/SZG +I8OtG1xGwcok53lyDuuUUDexnK4O5BkjKiVlNPg4HPim5Kuj2hRNFfNt/F2BVIlj +iZupikC5MT1LQaRwidkSNxCku1TfAyueiBwhLnFwTmIGNnhuDCutEVAD9kFmcJN2 +SznugAcPk4doX2+rL+ila+ThqgPzIkwTUHtnmjI0TI6xsDUlXz5S3UyudrE2Qsfz +s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg= +-----END CERTIFICATE-----""") + + config = { + "tls_certificate_path": os.path.join(config_dir, "cert.pem"), + "no_tls": True, + "tls_fingerprints": [] + } + + t = TlsConfig() + t.read_config(config) + t.read_certificate_from_disk() + + warnings = self.flushWarnings() + self.assertEqual(len(warnings), 1) + self.assertEqual( + warnings[0]["message"], + ( + "Self-signed TLS certificates will not be accepted by " + "Synapse 1.0. Please either provide a valid certificate, " + "or use Synapse's ACME support to provision one." + ) + ) From e65a17b26f92a857d91e960f998a6d0b86204b9c Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 29 Jan 2019 15:29:09 +0000 Subject: [PATCH 16/20] Check consent dir path on startup --- synapse/config/consent_config.py | 14 +++++++++++++- synapse/rest/consent/consent_resource.py | 11 +---------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py index f193a090ae..9f2e85342f 100644 --- a/synapse/config/consent_config.py +++ b/synapse/config/consent_config.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from os import path + +from synapse.config import ConfigError + from ._base import Config DEFAULT_CONFIG = """\ @@ -85,7 +89,15 @@ class ConsentConfig(Config): if consent_config is None: return self.user_consent_version = str(consent_config["version"]) - self.user_consent_template_dir = consent_config["template_dir"] + self.user_consent_template_dir = self.abspath( + consent_config["template_dir"] + ) + if not path.isdir(self.user_consent_template_dir): + raise ConfigError( + "Could not find template directory '%s'" % ( + self.user_consent_template_dir, + ), + ) self.user_consent_server_notice_content = consent_config.get( "server_notice_content", ) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 80611cfe84..008d4edae5 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -101,16 +101,7 @@ class ConsentResource(Resource): "missing in config file.", ) - # daemonize changes the cwd to /, so make the path absolute now. - consent_template_directory = path.abspath( - hs.config.user_consent_template_dir, - ) - if not path.isdir(consent_template_directory): - raise ConfigError( - "Could not find template directory '%s'" % ( - consent_template_directory, - ), - ) + consent_template_directory = hs.config.user_consent_template_dir loader = jinja2.FileSystemLoader(consent_template_directory) self._jinja_env = jinja2.Environment( From 9adbc912b3d285ddf4eb6bd949bd2c5aba7e7d90 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 29 Jan 2019 15:34:06 +0000 Subject: [PATCH 17/20] Add changelog --- changelog.d/4512.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4512.bugfix diff --git a/changelog.d/4512.bugfix b/changelog.d/4512.bugfix new file mode 100644 index 0000000000..7a1345c4ac --- /dev/null +++ b/changelog.d/4512.bugfix @@ -0,0 +1 @@ +Fix a bug where setting a relative consent directory path would cause a crash. \ No newline at end of file From cc2d650ef72a7b1767888d2a778036de8a90e44d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 29 Jan 2019 16:49:17 +0000 Subject: [PATCH 18/20] Relax requirement for a content-type on .well-known (#4511) --- changelog.d/4511.feature | 1 + .../federation/matrix_federation_agent.py | 33 ++++++++----------- .../test_matrix_federation_agent.py | 1 - 3 files changed, 14 insertions(+), 21 deletions(-) create mode 100644 changelog.d/4511.feature diff --git a/changelog.d/4511.feature b/changelog.d/4511.feature new file mode 100644 index 0000000000..bda713adf9 --- /dev/null +++ b/changelog.d/4511.feature @@ -0,0 +1 @@ +Implement MSC1708 (.well-known routing for server-server federation) \ No newline at end of file diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 07c72c9351..f81fcd4301 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -12,7 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import cgi import json import logging @@ -263,37 +262,31 @@ class MatrixFederationAgent(object): # FIXME: add a cache uri = b"https://%s/.well-known/matrix/server" % (server_name, ) - logger.info("Fetching %s", uri.decode("ascii")) + uri_str = uri.decode("ascii") + logger.info("Fetching %s", uri_str) try: response = yield make_deferred_yieldable( self._well_known_agent.request(b"GET", uri), ) except Exception as e: - logger.info( - "Connection error fetching %s: %s", - uri.decode("ascii"), e, - ) + logger.info("Connection error fetching %s: %s", uri_str, e) defer.returnValue(None) body = yield make_deferred_yieldable(readBody(response)) if response.code != 200: - logger.info( - "Error response %i from %s: %s", - response.code, uri.decode("ascii"), body, - ) + logger.info("Error response %i from %s", response.code, uri_str) defer.returnValue(None) - content_types = response.headers.getRawHeaders(u'content-type') - if content_types is None: - raise Exception("no content-type header on .well-known response") - content_type, _opts = cgi.parse_header(content_types[-1]) - if content_type != 'application/json': - raise Exception("content-type not application/json on .well-known response") - parsed_body = json.loads(body.decode('utf-8')) - logger.info("Response from .well-known: %s", parsed_body) - if not isinstance(parsed_body, dict) or "m.server" not in parsed_body: - raise Exception("invalid .well-known response") + try: + parsed_body = json.loads(body.decode('utf-8')) + logger.info("Response from .well-known: %s", parsed_body) + if not isinstance(parsed_body, dict): + raise Exception("not a dict") + if "m.server" not in parsed_body: + raise Exception("Missing key 'm.server'") + except Exception as e: + raise Exception("invalid .well-known response from %s: %s" % (uri_str, e,)) defer.returnValue(parsed_body["m.server"].encode("ascii")) diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index bd80dd0cb6..11ea8ef10c 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -146,7 +146,6 @@ class MatrixFederationAgentTests(TestCase): [b'testserv'], ) # send back a response - request.responseHeaders.setRawHeaders(b'Content-Type', [b'application/json']) request.write(b'{ "m.server": "%s" }' % (target_server,)) request.finish() From 84af5773560d4c468e65fd61f79291e04ed8330f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 17:26:24 +0000 Subject: [PATCH 19/20] Implement event format v2 --- synapse/api/constants.py | 2 + synapse/events/__init__.py | 114 +++++++++++++++++++++++++++++++------ synapse/events/builder.py | 12 +++- synapse/events/utils.py | 3 + 4 files changed, 110 insertions(+), 21 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 51ee078bc3..b248e193fa 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -126,10 +126,12 @@ class EventFormatVersions(object): independently from the room version. """ V1 = 1 + V2 = 2 KNOWN_EVENT_FORMAT_VERSIONS = { EventFormatVersions.V1, + EventFormatVersions.V2, } diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 3fe52aaa45..627c55c404 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,11 +19,9 @@ from distutils.util import strtobool import six -from synapse.api.constants import ( - KNOWN_EVENT_FORMAT_VERSIONS, - KNOWN_ROOM_VERSIONS, - EventFormatVersions, -) +from unpaddedbase64 import encode_base64 + +from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventFormatVersions, RoomVersions from synapse.util.caches import intern_dict from synapse.util.frozenutils import freeze @@ -225,16 +224,6 @@ class FrozenEvent(EventBase): rejected_reason=rejected_reason, ) - @staticmethod - def from_event(event): - e = FrozenEvent( - event.get_pdu_json() - ) - - e.internal_metadata = event.internal_metadata - - return e - def __str__(self): return self.__repr__() @@ -246,6 +235,85 @@ class FrozenEvent(EventBase): ) +class FrozenEventV2(EventBase): + format_version = EventFormatVersions.V2 # All events of this type are V2 + + def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None): + event_dict = dict(event_dict) + + # Signatures is a dict of dicts, and this is faster than doing a + # copy.deepcopy + signatures = { + name: {sig_id: sig for sig_id, sig in sigs.items()} + for name, sigs in event_dict.pop("signatures", {}).items() + } + + assert "event_id" not in event_dict + + unsigned = dict(event_dict.pop("unsigned", {})) + + # We intern these strings because they turn up a lot (especially when + # caching). + event_dict = intern_dict(event_dict) + + if USE_FROZEN_DICTS: + frozen_dict = freeze(event_dict) + else: + frozen_dict = event_dict + + self._event_id = None + self.type = event_dict["type"] + if "state_key" in event_dict: + self.state_key = event_dict["state_key"] + + super(FrozenEventV2, self).__init__( + frozen_dict, + signatures=signatures, + unsigned=unsigned, + internal_metadata_dict=internal_metadata_dict, + rejected_reason=rejected_reason, + ) + + @property + def event_id(self): + # We have to import this here as otherwise we get an import loop which + # is hard to break. + from synapse.crypto.event_signing import compute_event_reference_hash + + if self._event_id: + return self._event_id + self._event_id = "$" + encode_base64(compute_event_reference_hash(self)[1]) + return self._event_id + + def prev_event_ids(self): + """Returns the list of prev event IDs. The order matches the order + specified in the event, though there is no meaning to it. + + Returns: + list[str]: The list of event IDs of this event's prev_events + """ + return self.prev_events + + def auth_event_ids(self): + """Returns the list of auth event IDs. The order matches the order + specified in the event, though there is no meaning to it. + + Returns: + list[str]: The list of event IDs of this event's auth_events + """ + return self.auth_events + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return "" % ( + self.event_id, + self.get("type", None), + self.get("state_key", None), + ) + + def room_version_to_event_format(room_version): """Converts a room version string to the event format @@ -259,7 +327,13 @@ def room_version_to_event_format(room_version): # We should have already checked version, so this should not happen raise RuntimeError("Unrecognized room version %s" % (room_version,)) - return EventFormatVersions.V1 + if room_version in ( + RoomVersions.V1, RoomVersions.V2, RoomVersions.VDH_TEST, + RoomVersions.STATE_V2_TEST, + ): + return EventFormatVersions.V1 + else: + raise RuntimeError("Unrecognized room version %s" % (room_version,)) def event_type_from_format_version(format_version): @@ -273,8 +347,12 @@ def event_type_from_format_version(format_version): type: A type that can be initialized as per the initializer of `FrozenEvent` """ - if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: + + if format_version == EventFormatVersions.V1: + return FrozenEvent + elif format_version == EventFormatVersions.V2: + return FrozenEventV2 + else: raise Exception( "No event format %r" % (format_version,) ) - return FrozenEvent diff --git a/synapse/events/builder.py b/synapse/events/builder.py index fb0683cea8..06e01be918 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -21,6 +21,7 @@ from synapse.api.constants import ( KNOWN_EVENT_FORMAT_VERSIONS, KNOWN_ROOM_VERSIONS, MAX_DEPTH, + EventFormatVersions, ) from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.types import EventID @@ -109,8 +110,12 @@ class EventBuilder(object): self, state_ids, ) - auth_events = yield self._store.add_event_hashes(auth_ids) - prev_events = yield self._store.add_event_hashes(prev_event_ids) + if self.format_version == EventFormatVersions.V1: + auth_events = yield self._store.add_event_hashes(auth_ids) + prev_events = yield self._store.add_event_hashes(prev_event_ids) + else: + auth_events = auth_ids + prev_events = prev_event_ids old_depth = yield self._store.get_max_depth_of( prev_event_ids, @@ -228,7 +233,8 @@ def create_local_event_from_event_dict(clock, hostname, signing_key, time_now = int(clock.time_msec()) - event_dict["event_id"] = _create_event_id(clock, hostname) + if format_version == EventFormatVersions.V1: + event_dict["event_id"] = _create_event_id(clock, hostname) event_dict["origin"] = hostname event_dict["origin_server_ts"] = time_now diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 63f693f259..07fccdd8f9 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -267,6 +267,7 @@ def serialize_event(e, time_now_ms, as_client_event=True, Returns: dict """ + # FIXME(erikj): To handle the case of presence events and the like if not isinstance(e, EventBase): return e @@ -276,6 +277,8 @@ def serialize_event(e, time_now_ms, as_client_event=True, # Should this strip out None's? d = {k: v for k, v in e.get_dict().items()} + d["event_id"] = e.event_id + if "age_ts" in d["unsigned"]: d["unsigned"]["age"] = time_now_ms - d["unsigned"]["age_ts"] del d["unsigned"]["age_ts"] From 3680bc18e9a666db69c75de9834a95bfbac9c191 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Jan 2019 18:05:35 +0000 Subject: [PATCH 20/20] Newsfile --- changelog.d/4483.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4483.feature diff --git a/changelog.d/4483.feature b/changelog.d/4483.feature new file mode 100644 index 0000000000..9538c64f08 --- /dev/null +++ b/changelog.d/4483.feature @@ -0,0 +1 @@ +Add support for room version 3