diff --git a/.gitignore b/.gitignore index d2b93ef61f..b91b52b615 100644 --- a/.gitignore +++ b/.gitignore @@ -24,4 +24,7 @@ graph/*.svg graph/*.png graph/*.dot +webclient/config.js +webclient/test/environment-protractor.js + uploads diff --git a/CHANGES.rst b/CHANGES.rst index 31eee891da..400ded0f15 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,119 @@ +Changes in synapse 0.3.3 (2014-09-22) +===================================== + +Homeserver: + * Fix bug where you continued to get events for rooms you had left. + +Webclient: + * Add support for video calls with basic UI. + * Fix bug where one to one chats were named after your display name rather + than the other person's. + * Fix bug which caused lag when typing in the textarea. + * Refuse to run on browsers we know won't work. + * Trigger pagination when joining new rooms. + * Fix bug where we sometimes didn't display invitations in recents. + * Automatically join room when accepting a VoIP call. + * Disable outgoing and reject incoming calls on browsers we don't support + VoIP in. + * Don't display desktop notifications for messages in the room you are + non-idle and speaking in. + +Changes in synapse 0.3.2 (2014-09-18) +===================================== + +Webclient: + * Fix bug where an empty "bing words" list in old accounts didn't send + notifications when it should have done. + +Changes in synapse 0.3.1 (2014-09-18) +===================================== +This is a release to hotfix v0.3.0 to fix two regressions. + +Webclient: + * Fix a regression where we sometimes displayed duplicate events. + * Fix a regression where we didn't immediately remove rooms you were + banned in from the recents list. + +Changes in synapse 0.3.0 (2014-09-18) +===================================== +See UPGRADE for information about changes to the client server API, including +breaking backwards compatibility with VoIP calls and registration API. + +Homeserver: + * When a user changes their displayname or avatar the server will now update + all their join states to reflect this. + * The server now adds "age" key to events to indicate how old they are. This + is clock independent, so at no point does any server or webclient have to + assume their clock is in sync with everyone else. + * Fix bug where we didn't correctly pull in missing PDUs. + * Fix bug where prev_content key wasn't always returned. + * Add support for password resets. + +Webclient: + * Improve page content loading. + * Join/parts now trigger desktop notifications. + * Always show room aliases in the UI if one is present. + * No longer show user-count in the recents side panel. + * Add up & down arrow support to the text box for message sending to step + through your sent history. + * Don't display notifications for our own messages. + * Emotes are now formatted correctly in desktop notifications. + * The recents list now differentiates between public & private rooms. + * Fix bug where when switching between rooms the pagination flickered before + the view jumped to the bottom of the screen. + * Add bing word support. + +Registration API: + * The registration API has been overhauled to function like the login API. In + practice, this means registration requests must now include the following: + 'type':'m.login.password'. See UPGRADE for more information on this. + * The 'user_id' key has been renamed to 'user' to better match the login API. + * There is an additional login type: 'm.login.email.identity'. + * The command client and web client have been updated to reflect these changes. + +Changes in synapse 0.2.3 (2014-09-12) +===================================== + +Homeserver: + * Fix bug where we stopped sending events to remote home servers if a + user from that home server left, even if there were some still in the + room. + * Fix bugs in the state conflict resolution where it was incorrectly + rejecting events. + +Webclient: + * Display room names and topics. + * Allow setting/editing of room names and topics. + * Display information about rooms on the main page. + * Handle ban and kick events in real time. + * VoIP UI and reliability improvements. + * Add glare support for VoIP. + * Improvements to initial startup speed. + * Don't display duplicate join events. + * Local echo of messages. + * Differentiate sending and sent of local echo. + * Various minor bug fixes. + +Changes in synapse 0.2.2 (2014-09-06) +===================================== + +Homeserver: + * When the server returns state events it now also includes the previous + content. + * Add support for inviting people when creating a new room. + * Make the homeserver inform the room via `m.room.aliases` when a new alias + is added for a room. + * Validate `m.room.power_level` events. + +Webclient: + * Add support for captchas on registration. + * Handle `m.room.aliases` events. + * Asynchronously send messages and show a local echo. + * Inform the UI when a message failed to send. + * Only autoscroll on receiving a new message if the user was already at the + bottom of the screen. + * Add support for ban/kick reasons. + Changes in synapse 0.2.1 (2014-09-03) ===================================== diff --git a/README.rst b/README.rst index 6791e686b7..6f7940e742 100644 --- a/README.rst +++ b/README.rst @@ -102,7 +102,7 @@ service provider in Matrix, unlike WhatsApp, Facebook, Hangouts, etc. Synapse ships with two basic demo Matrix clients: webclient (a basic group chat web client demo implemented in AngularJS) and cmdclient (a basic Python -commandline utility which lets you easily see what the JSON APIs are up to). +command line utility which lets you easily see what the JSON APIs are up to). We'd like to invite you to take a look at the Matrix spec, try to run a homeserver, and join the existing Matrix chatrooms already out there, experiment @@ -122,7 +122,7 @@ Homeserver Installation First, the dependencies need to be installed. Start by installing 'python2.7-dev' and the various tools of the compiler toolchain. - Installing prerequisites on ubuntu:: + Installing prerequisites on Ubuntu:: $ sudo apt-get install build-essential python2.7-dev libffi-dev @@ -151,8 +151,8 @@ you can check PyNaCl out of git directly (https://github.com/pyca/pynacl) and installing it. Installing PyNaCl using pip may also work (remember to remove any other versions installed by setuputils in, for example, ~/.local/lib). -On OSX, if you encounter ``clang: error: unknown argument: '-mno-fused-madd'`` you will -need to ``export CFLAGS=-Qunused-arguments``. +On OSX, if you encounter ``clang: error: unknown argument: '-mno-fused-madd'`` +you will need to ``export CFLAGS=-Qunused-arguments``. This will run a process of downloading and installing into your user's .local/lib directory all of the required dependencies that are @@ -203,9 +203,10 @@ For the first form, simply pass the required hostname (of the machine) as the --generate-config $ python synapse/app/homeserver.py --config-path homeserver.config -Alternatively, you can run synapse via synctl - running ``synctl start`` to generate a -homeserver.yaml config file, where you can then edit server-name to specify -machine.my.domain.name, and then set the actual server running again with synctl start. +Alternatively, you can run synapse via synctl - running ``synctl start`` to +generate a homeserver.yaml config file, where you can then edit server-name to +specify machine.my.domain.name, and then set the actual server running again +with synctl start. For the second form, first create your SRV record and publish it in DNS. This needs to be named _matrix._tcp.YOURDOMAIN, and point at at least one hostname @@ -293,7 +294,8 @@ track 3PID logins and publish end-user public keys. It's currently early days for identity servers as Matrix is not yet using 3PIDs as the primary means of identity and E2E encryption is not complete. As such, -we are running a single identity server (http://matrix.org:8090) at the current time. +we are running a single identity server (http://matrix.org:8090) at the current +time. Where's the spec?! diff --git a/UPGRADE.rst b/UPGRADE.rst index da2a7a0a21..713fb9ae83 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -1,3 +1,34 @@ +Upgrading to v0.3.0 +=================== + +This registration API now closely matches the login API. This introduces a bit +more backwards and forwards between the HS and the client, but this improves +the overall flexibility of the API. You can now GET on /register to retrieve a list +of valid registration flows. Upon choosing one, they are submitted in the same +way as login, e.g:: + + { + type: m.login.password, + user: foo, + password: bar + } + +The default HS supports 2 flows, with and without Identity Server email +authentication. Enabling captcha on the HS will add in an extra step to all +flows: ``m.login.recaptcha`` which must be completed before you can transition +to the next stage. There is a new login type: ``m.login.email.identity`` which +contains the ``threepidCreds`` key which were previously sent in the original +register request. For more information on this, see the specification. + +Web Client +---------- + +The VoIP specification has changed between v0.2.0 and v0.3.0. Users should +refresh any browser tabs to get the latest web client code. Users on +v0.2.0 of the web client will not be able to call those on v0.3.0 and +vice versa. + + Upgrading to v0.2.0 =================== diff --git a/VERSION b/VERSION index 0c62199f16..1c09c74e22 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.2.1 +0.3.3 diff --git a/WISHLIST.rst b/WISHLIST.rst index 68324ad9d4..a0713f1966 100644 --- a/WISHLIST.rst +++ b/WISHLIST.rst @@ -5,3 +5,5 @@ Broad-sweeping stuff which would be nice to have - homeserver implementation in go - homeserver implementation in node.js - client SDKs + - libpurple library + - irssi plugin? diff --git a/cmdclient/console.py b/cmdclient/console.py index 2e6b026762..d9c6ec6a70 100755 --- a/cmdclient/console.py +++ b/cmdclient/console.py @@ -145,35 +145,50 @@ class SynapseCmd(cmd.Cmd): : Do not automatically clobber config values. """ args = self._parse(line, ["userid", "noupdate"]) - path = "/register" password = None pwd = None pwd2 = "_" while pwd != pwd2: - pwd = getpass.getpass("(Optional) Type a password for this user: ") - if len(pwd) == 0: - print "Not using a password for this user." - break + pwd = getpass.getpass("Type a password for this user: ") pwd2 = getpass.getpass("Retype the password: ") - if pwd != pwd2: + if pwd != pwd2 or len(pwd) == 0: print "Password mismatch." + pwd = None else: password = pwd - body = {} + body = { + "type": "m.login.password" + } if "userid" in args: - body["user_id"] = args["userid"] + body["user"] = args["userid"] if password: body["password"] = password - reactor.callFromThread(self._do_register, "POST", path, body, + reactor.callFromThread(self._do_register, body, "noupdate" not in args) @defer.inlineCallbacks - def _do_register(self, method, path, data, update_config): - url = self._url() + path - json_res = yield self.http_client.do_request(method, url, data=data) + def _do_register(self, data, update_config): + # check the registration flows + url = self._url() + "/register" + json_res = yield self.http_client.do_request("GET", url) + print json.dumps(json_res, indent=4) + + passwordFlow = None + for flow in json_res["flows"]: + if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]): + print "Unable to register: Home server requires captcha." + return + if flow["type"] == "m.login.password" and "stages" not in flow: + passwordFlow = flow + break + + if not passwordFlow: + return + + json_res = yield self.http_client.do_request("POST", url, data=data) print json.dumps(json_res, indent=4) if update_config and "user_id" in json_res: self.config["user"] = json_res["user_id"] diff --git a/docs/client-server/howto.rst b/docs/client-server/howto.rst index c02ea8d897..ec941edddd 100644 --- a/docs/client-server/howto.rst +++ b/docs/client-server/howto.rst @@ -24,7 +24,7 @@ If you already have an account, you must **login** into it. `Try out the fiddle`__ -.. __: http://jsfiddle.net/4q2jyxng/ +.. __: http://jsfiddle.net/gh/get/jquery/1.8.3/matrix-org/synapse/tree/master/jsfiddles/register_login Registration ------------ @@ -87,7 +87,7 @@ user and **send a message** to that room. `Try out the fiddle`__ -.. __: http://jsfiddle.net/zL3zto9g/ +.. __: http://jsfiddle.net/gh/get/jquery/1.8.3/matrix-org/synapse/tree/master/jsfiddles/create_room_send_msg Creating a room --------------- @@ -137,7 +137,7 @@ join a room **via a room alias** if one was set up. `Try out the fiddle`__ -.. __: http://jsfiddle.net/7fhotf1b/ +.. __: http://jsfiddle.net/gh/get/jquery/1.8.3/matrix-org/synapse/tree/master/jsfiddles/room_memberships Inviting a user to a room ------------------------- @@ -183,7 +183,7 @@ of getting events, depending on what the client already knows. `Try out the fiddle`__ -.. __: http://jsfiddle.net/vw11mg37/ +.. __: http://jsfiddle.net/gh/get/jquery/1.8.3/matrix-org/synapse/tree/master/jsfiddles/event_stream Getting all state ----------------- @@ -633,4 +633,4 @@ application. `Try out the fiddle`__ -.. __: http://jsfiddle.net/uztL3yme/ +.. __: http://jsfiddle.net/gh/get/jquery/1.8.3/matrix-org/synapse/tree/master/jsfiddles/example_app diff --git a/docs/model/presence.rst b/docs/client-server/model/presence.rst similarity index 100% rename from docs/model/presence.rst rename to docs/client-server/model/presence.rst diff --git a/docs/model/profiles.rst b/docs/client-server/model/profiles.rst similarity index 100% rename from docs/model/profiles.rst rename to docs/client-server/model/profiles.rst diff --git a/docs/model/protocol_examples.rst b/docs/client-server/model/protocol_examples.rst similarity index 100% rename from docs/model/protocol_examples.rst rename to docs/client-server/model/protocol_examples.rst diff --git a/docs/model/room-join-workflow.rst b/docs/client-server/model/room-join-workflow.rst similarity index 100% rename from docs/model/room-join-workflow.rst rename to docs/client-server/model/room-join-workflow.rst diff --git a/docs/model/rooms.rst b/docs/client-server/model/rooms.rst similarity index 100% rename from docs/model/rooms.rst rename to docs/client-server/model/rooms.rst diff --git a/docs/model/terminology.rst b/docs/client-server/model/terminology.rst similarity index 100% rename from docs/model/terminology.rst rename to docs/client-server/model/terminology.rst diff --git a/docs/model/third-party-id.rst b/docs/client-server/model/third-party-id.rst similarity index 100% rename from docs/model/third-party-id.rst rename to docs/client-server/model/third-party-id.rst diff --git a/docs/client-server/swagger_matrix/api-docs-registration b/docs/client-server/swagger_matrix/api-docs-registration index f4669ea2f0..11c170c3ec 100644 --- a/docs/client-server/swagger_matrix/api-docs-registration +++ b/docs/client-server/swagger_matrix/api-docs-registration @@ -3,35 +3,38 @@ "apis": [ { "operations": [ + { + "method": "GET", + "nickname": "get_registration_info", + "notes": "All login stages MUST be mentioned if there is >1 login type.", + "summary": "Get the login mechanism to use when registering.", + "type": "RegistrationFlows" + }, { "method": "POST", - "nickname": "register", - "notes": "Volatile: This API is likely to change.", + "nickname": "submit_registration", + "notes": "If this is part of a multi-stage registration, there MUST be a 'session' key.", "parameters": [ { - "description": "A registration request", + "description": "A registration submission", "name": "body", "paramType": "body", "required": true, - "type": "RegistrationRequest" + "type": "RegistrationSubmission" } ], "responseMessages": [ { "code": 400, - "message": "No JSON object." + "message": "Bad login type" }, { "code": 400, - "message": "User ID must only contain characters which do not require url encoding." - }, - { - "code": 400, - "message": "User ID already taken." + "message": "Missing JSON keys" } ], - "summary": "Register with the home server.", - "type": "RegistrationResponse" + "summary": "Submit a registration action.", + "type": "RegistrationResult" } ], "path": "/register" @@ -42,30 +45,68 @@ "application/json" ], "models": { - "RegistrationResponse": { - "id": "RegistrationResponse", + "RegistrationFlows": { + "id": "RegistrationFlows", "properties": { - "access_token": { - "description": "The access token for this user.", - "type": "string" + "flows": { + "description": "A list of valid registration flows.", + "type": "array", + "items": { + "$ref": "RegistrationInfo" + } + } + } + }, + "RegistrationInfo": { + "id": "RegistrationInfo", + "properties": { + "stages": { + "description": "Multi-stage registration only: An array of all the login types required to registration.", + "items": { + "$ref": "string" + }, + "type": "array" }, - "user_id": { - "description": "The fully-qualified user ID.", - "type": "string" - }, - "home_server": { - "description": "The name of the home server.", + "type": { + "description": "The first login type that must be used when logging in.", "type": "string" } } }, - "RegistrationRequest": { - "id": "RegistrationRequest", + "RegistrationResult": { + "id": "RegistrationResult", "properties": { + "access_token": { + "description": "The access token for this user's registration if this is the final stage of the registration process.", + "type": "string" + }, "user_id": { - "description": "The desired user ID. If not specified, a random user ID will be allocated.", - "type": "string", - "required": false + "description": "The user's fully-qualified user ID.", + "type": "string" + }, + "next": { + "description": "Multi-stage registration only: The next registration type to submit.", + "type": "string" + }, + "session": { + "description": "Multi-stage registration only: The session token to send when submitting the next registration type.", + "type": "string" + } + } + }, + "RegistrationSubmission": { + "id": "RegistrationSubmission", + "properties": { + "type": { + "description": "The type of registration being submitted.", + "type": "string" + }, + "session": { + "description": "Multi-stage registration only: The session token from an earlier registration stage.", + "type": "string" + }, + "_registration_type_defined_keys_": { + "description": "Keys as defined by the specified registration type, e.g. \"user\", \"password\"" } } } diff --git a/docs/freenode.txt b/docs/freenode.txt new file mode 100644 index 0000000000..84fdf6d523 --- /dev/null +++ b/docs/freenode.txt @@ -0,0 +1 @@ +NCjcRSEG diff --git a/docs/human-id-rules.rst b/docs/human-id-rules.rst new file mode 100644 index 0000000000..3a1ff39892 --- /dev/null +++ b/docs/human-id-rules.rst @@ -0,0 +1,79 @@ +This document outlines the format for human-readable IDs within matrix. + +Overview +-------- +UTF-8 is quickly becoming the standard character encoding set on the web. As +such, Matrix requires that all strings MUST be encoded as UTF-8. However, +using Unicode as the character set for human-readable IDs is troublesome. There +are many different characters which appear identical to each other, but would +identify different users. In addition, there are non-printable characters which +cannot be rendered by the end-user. This opens up a security vulnerability with +phishing/spoofing of IDs, commonly known as a homograph attack. + +Web browers encountered this problem when International Domain Names were +introduced. A variety of checks were put in place in order to protect users. If +an address failed the check, the raw punycode would be displayed to disambiguate +the address. Similar checks are performed by home servers in Matrix. However, +Matrix does not use punycode representations, and so does not show raw punycode +on a failed check. Instead, home servers must outright reject these misleading +IDs. + +Types of human-readable IDs +--------------------------- +There are two main human-readable IDs in question: + +- Room aliases +- User IDs + +Room aliases look like ``#localpart:domain``. These aliases point to opaque +non human-readable room IDs. These pointers can change, so there is already an +issue present with the same ID pointing to a different destination at a later +date. + +User IDs look like ``@localpart:domain``. These represent actual end-users, and +unlike room aliases, there is no layer of indirection. This presents a much +greater concern with homograph attacks. + +Checks +------ +- Similar to web browsers. +- blacklisted chars (e.g. non-printable characters) +- mix of language sets from 'preferred' language not allowed. +- Language sets from CLDR dataset. +- Treated in segments (localpart, domain) +- Additional restrictions for ease of processing IDs. + - Room alias localparts MUST NOT have ``#`` or ``:``. + - User ID localparts MUST NOT have ``@`` or ``:``. + +Rejecting +--------- +- Home servers MUST reject room aliases which do not pass the check, both on + GETs and PUTs. +- Home servers MUST reject user ID localparts which do not pass the check, both + on creation and on events. +- Any home server whose domain does not pass this check, MUST use their punycode + domain name instead of the IDN, to prevent other home servers rejecting you. +- Error code is ``M_FAILED_HUMAN_ID_CHECK``. (generic enough for both failing + due to homograph attacks, and failing due to including ``:`` s, etc) +- Error message MAY go into further information about which characters were + rejected and why. +- Error message SHOULD contain a ``failed_keys`` key which contains an array + of strings which represent the keys which failed the check e.g:: + + failed_keys: [ user_id, room_alias ] + +Other considerations +-------------------- +- Basic security: Informational key on the event attached by HS to say "unsafe + ID". Problem: clients can just ignore it, and since it will appear only very + rarely, easy to forget when implementing clients. +- Moderate security: Requires client handshake. Forces clients to implement + a check, else they cannot communicate with the misleading ID. However, this is + extra overhead in both client implementations and round-trips. +- High security: Outright rejection of the ID at the point of creation / + receiving event. Point of creation rejection is preferable to avoid the ID + entering the system in the first place. However, malicious HSes can just allow + the ID. Hence, other home servers must reject them if they see them in events. + Client never sees the problem ID, provided the HS is correctly implemented. +- High security decided; client doesn't need to worry about it, no additional + protocol complexity aside from rejection of an event. \ No newline at end of file diff --git a/docs/specification.rst b/docs/specification.rst index 1d3c283331..370e238e00 100644 --- a/docs/specification.rst +++ b/docs/specification.rst @@ -418,6 +418,16 @@ which can be set when creating a room: If this is included, an ``m.room.topic`` event will be sent into the room to indicate the topic for the room. See `Room Events`_ for more information on ``m.room.topic``. +``invite`` + Type: + List + Optional: + Yes + Value: + A list of user ids to invite. + Description: + This will tell the server to invite everyone in the list to the newly created room. + Example:: { @@ -745,15 +755,17 @@ There are several APIs provided to ``GET`` events for a room: Description: Get all ``m.room.member`` state events. Response format: - ``{ "start": "token", "end": "token", "chunk": [ { m.room.member event }, ... ] }`` + ``{ "start": "", "end": "", "chunk": [ { m.room.member event }, ... ] }`` Example: TODO |/rooms//messages|_ Description: - Get all ``m.room.message`` events. + Get all ``m.room.message`` and ``m.room.member`` events. This API supports pagination + using ``from`` and ``to`` query parameters, coupled with the ``start`` and ``end`` + tokens from an |initialSync|_ API. Response format: - ``{ TODO }`` + ``{ "start": "", "end": "" }`` Example: TODO @@ -909,6 +921,22 @@ prefixed with ``m.`` ``ban_level`` will be greater than or equal to ``kick_level`` since banning is more severe than kicking. +``m.room.aliases`` + Summary: + These state events are used to inform the room about what room aliases it has. + Type: + State event + JSON format: + ``{ "aliases": ["string", ...] }`` + Example: + ``{ "aliases": ["#foo:example.com"] }`` + Description: + A server `may` inform the room that it has added or removed an alias for + the room. This is purely for informational purposes and may become stale. + Clients `should` check that the room alias is still valid before using it. + The ``state_key`` of the event is the homeserver which owns the room + alias. + ``m.room.message`` Summary: A message. @@ -1141,8 +1169,14 @@ This event is sent by the caller when they wish to establish a call. Required keys: - ``call_id`` : "string" - A unique identifier for the call - ``offer`` : "offer object" - The session description - - ``version`` : "integer" - The version of the VoIP specification this message - adheres to. This specification is version 0. + - ``version`` : "integer" - The version of the VoIP specification this + message adheres to. This specification is + version 0. + - ``lifetime`` : "integer" - The time in milliseconds that the invite is + valid for. Once the invite age exceeds this + value, clients should discard it. They + should also no longer show the call as + awaiting an answer in the UI. Optional keys: None. @@ -1154,16 +1188,16 @@ This event is sent by the caller when they wish to establish a call. - ``type`` : "string" - The type of session description, in this case 'offer' - ``sdp`` : "string" - The SDP text of the session description -``m.call.candidate`` +``m.call.candidates`` This event is sent by callers after sending an invite and by the callee after answering. -Its purpose is to give the other party an additional ICE candidate to try using to +Its purpose is to give the other party additional ICE candidates to try using to communicate. Required keys: - ``call_id`` : "string" - The ID of the call this event relates to - ``version`` : "integer" - The version of the VoIP specification this messages adheres to. his specification is version 0. - - ``candidate`` : "candidate object" - Object describing the candidate. + - ``candidates`` : "array of candidate objects" - Array of object describing the candidates. ``Candidate Object`` @@ -1221,7 +1255,33 @@ Or a rejected call: <------- m.call.hangup Calls are negotiated according to the WebRTC specification. - + + +Glare +----- +This specification aims to address the problem of two users calling each other +at roughly the same time and their invites crossing on the wire. It is a far +better experience for the users if their calls are connected if it is clear +that their intention is to set up a call with one another. + +In Matrix, calls are to rooms rather than users (even if those rooms may only +contain one other user) so we consider calls which are to the same room. + +The rules for dealing with such a situation are as follows: + + - If an invite to a room is received whilst the client is preparing to send an + invite to the same room, the client should cancel its outgoing call and + instead automatically accept the incoming call on behalf of the user. + - If an invite to a room is received after the client has sent an invite to the + same room and is waiting for a response, the client should perform a + lexicographical comparison of the call IDs of the two calls and use the + lesser of the two calls, aborting the greater. If the incoming call is the + lesser, the client should accept this call on behalf of the user. + +The call setup should appear seamless to the user as if they had simply placed +a call and the other party had accepted. Thusly, any media stream that had been +setup for use on a call should be transferred and used for the call that +replaces it. Profiles ======== @@ -1251,12 +1311,6 @@ display name other than it being a valid unicode string. Registration and login ====================== -.. WARNING:: - The registration API is likely to change. - -.. TODO - - TODO Kegan : Make registration like login (just omit the "user" key on the - initial request?) Clients must register with a home server in order to use Matrix. After registering, the client will be given an access token which must be used in ALL @@ -1269,9 +1323,11 @@ a token sent to their email address, etc. This specification does not define how home servers should authorise their users who want to login to their existing accounts, but instead defines the standard interface which implementations should follow so that ANY client can login to ANY home server. Clients login -using the |login|_ API. +using the |login|_ API. Clients register using the |register|_ API. Registration +follows the same procedure as login, but the path requests are sent to are +different. -The login process breaks down into the following: +The registration/login process breaks down into the following: 1. Determine the requirements for logging in. 2. Submit the login stage credentials. 3. Get credentials or be told the next stage in the login process and repeat @@ -1329,7 +1385,7 @@ This specification defines the following login types: - ``m.login.oauth2`` - ``m.login.email.code`` - ``m.login.email.url`` - + - ``m.login.email.identity`` Password-based -------------- @@ -1477,6 +1533,31 @@ If the link has not been visited yet, a standard error response with an errcode ``M_LOGIN_EMAIL_URL_NOT_YET`` should be returned. +Email-based (identity server) +----------------------------- +:Type: + ``m.login.email.identity`` +:Description: + Login is supported by authorising an email address with an identity server. + +Prior to submitting this, the client should authenticate with an identity server. +After authenticating, the session information should be submitted to the home server. + +To respond to this type, reply with:: + + { + "type": "m.login.email.identity", + "threepidCreds": [ + { + "sid": "", + "clientSecret": "", + "idServer": "" + } + ] + } + + + N-Factor Authentication ----------------------- Multiple login stages can be combined to create N-factor authentication during login. @@ -1946,6 +2027,10 @@ victim would then include in their view of the chatroom history. Other servers in the chatroom would reject the invalid messages and potentially reject the victims messages as well since they depended on the invalid messages. +.. TODO + Track trustworthiness of HS or users based on if they try to pretend they + haven't seen recent events, and fake a splitbrain... --M + Threat: Block Network Traffic +++++++++++++++++++++++++++++ @@ -2184,6 +2269,9 @@ Transaction: .. |login| replace:: ``/login`` .. _login: /docs/api/client-server/#!/-login +.. |register| replace:: ``/register`` +.. _register: /docs/api/client-server/#!/-registration + .. |/rooms//messages| replace:: ``/rooms//messages`` .. _/rooms//messages: /docs/api/client-server/#!/-rooms/get_messages diff --git a/jsfiddles/create_room_send_msg/demo.js b/jsfiddles/create_room_send_msg/demo.js index 3dc7263830..9c346e2f64 100644 --- a/jsfiddles/create_room_send_msg/demo.js +++ b/jsfiddles/create_room_send_msg/demo.js @@ -19,7 +19,12 @@ $('.login').live('click', function() { showLoggedIn(data); }, error: function(err) { - alert(JSON.stringify($.parseJSON(err.responseText))); + var errMsg = "To try this, you need a home server running!"; + var errJson = $.parseJSON(err.responseText); + if (errJson) { + errMsg = JSON.stringify(errJson); + } + alert(errMsg); } }); }); diff --git a/jsfiddles/event_stream/demo.js b/jsfiddles/event_stream/demo.js index 5c81e08caa..acba8391fa 100644 --- a/jsfiddles/event_stream/demo.js +++ b/jsfiddles/event_stream/demo.js @@ -58,7 +58,12 @@ $('.login').live('click', function() { showLoggedIn(data); }, error: function(err) { - alert(JSON.stringify($.parseJSON(err.responseText))); + var errMsg = "To try this, you need a home server running!"; + var errJson = $.parseJSON(err.responseText); + if (errJson) { + errMsg = JSON.stringify(errJson); + } + alert(errMsg); } }); }); diff --git a/jsfiddles/example_app/demo.details b/jsfiddles/example_app/demo.details new file mode 100644 index 0000000000..3f96d3e744 --- /dev/null +++ b/jsfiddles/example_app/demo.details @@ -0,0 +1,7 @@ + name: Example Matrix Client + description: Includes login, live event streaming, creating rooms, sending messages and viewing member lists. + authors: + - matrix.org + resources: + - http://matrix.org + normalize_css: no \ No newline at end of file diff --git a/jsfiddles/register_login/demo.js b/jsfiddles/register_login/demo.js index 9595039173..fffa9e0551 100644 --- a/jsfiddles/register_login/demo.js +++ b/jsfiddles/register_login/demo.js @@ -20,7 +20,12 @@ $('.register').live('click', function() { showLoggedIn(data); }, error: function(err) { - alert(JSON.stringify($.parseJSON(err.responseText))); + var errMsg = "To try this, you need a home server running!"; + var errJson = $.parseJSON(err.responseText); + if (errJson) { + errMsg = JSON.stringify(errJson); + } + alert(errMsg); } }); }); @@ -36,7 +41,12 @@ var login = function(user, password) { showLoggedIn(data); }, error: function(err) { - alert(JSON.stringify($.parseJSON(err.responseText))); + var errMsg = "To try this, you need a home server running!"; + var errJson = $.parseJSON(err.responseText); + if (errJson) { + errMsg = JSON.stringify(errJson); + } + alert(errMsg); } }); }; diff --git a/jsfiddles/room_memberships/demo.js b/jsfiddles/room_memberships/demo.js index 64ba767138..8a7b1aa88e 100644 --- a/jsfiddles/room_memberships/demo.js +++ b/jsfiddles/room_memberships/demo.js @@ -28,7 +28,12 @@ $('.login').live('click', function() { showLoggedIn(data); }, error: function(err) { - alert(JSON.stringify($.parseJSON(err.responseText))); + var errMsg = "To try this, you need a home server running!"; + var errJson = $.parseJSON(err.responseText); + if (errJson) { + errMsg = JSON.stringify(errJson); + } + alert(errMsg); } }); }); diff --git a/synapse/__init__.py b/synapse/__init__.py index 440e633966..bba551b2c4 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a synapse home server. """ -__version__ = "0.2.1" +__version__ = "0.3.3" diff --git a/synapse/api/auth.py b/synapse/api/auth.py index b4eda3df01..8f32191b57 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -18,8 +18,8 @@ from twisted.internet import defer from synapse.api.constants import Membership, JoinRules -from synapse.api.errors import AuthError, StoreError, Codes -from synapse.api.events.room import RoomMemberEvent +from synapse.api.errors import AuthError, StoreError, Codes, SynapseError +from synapse.api.events.room import RoomMemberEvent, RoomPowerLevelsEvent from synapse.util.logutils import log_function import logging @@ -67,6 +67,9 @@ class Auth(object): else: yield self._can_send_event(event) + if event.type == RoomPowerLevelsEvent.TYPE: + yield self._check_power_levels(event) + defer.returnValue(True) else: raise AuthError(500, "Unknown event: %s" % event) @@ -172,7 +175,7 @@ class Auth(object): if kick_level: kick_level = int(kick_level) else: - kick_level = 5 + kick_level = 50 if user_level < kick_level: raise AuthError( @@ -189,7 +192,7 @@ class Auth(object): if ban_level: ban_level = int(ban_level) else: - ban_level = 5 # FIXME (erikj): What should we do here? + ban_level = 50 # FIXME (erikj): What should we do here? if user_level < ban_level: raise AuthError(403, "You don't have permission to ban") @@ -305,7 +308,9 @@ class Auth(object): else: user_level = 0 - logger.debug("Checking power level for %s, %s", event.user_id, user_level) + logger.debug( + "Checking power level for %s, %s", event.user_id, user_level + ) if current_state and hasattr(current_state, "required_power_level"): req = current_state.required_power_level @@ -315,3 +320,101 @@ class Auth(object): 403, "You don't have permission to change that state" ) + + @defer.inlineCallbacks + def _check_power_levels(self, event): + for k, v in event.content.items(): + if k == "default": + continue + + # FIXME (erikj): We don't want hsob_Ts in content. + if k == "hsob_ts": + continue + + try: + self.hs.parse_userid(k) + except: + raise SynapseError(400, "Not a valid user_id: %s" % (k,)) + + try: + int(v) + except: + raise SynapseError(400, "Not a valid power level: %s" % (v,)) + + current_state = yield self.store.get_current_state( + event.room_id, + event.type, + event.state_key, + ) + + if not current_state: + return + else: + current_state = current_state[0] + + user_level = yield self.store.get_power_level( + event.room_id, + event.user_id, + ) + + if user_level: + user_level = int(user_level) + else: + user_level = 0 + + old_list = current_state.content + + # FIXME (erikj) + old_people = {k: v for k, v in old_list.items() if k.startswith("@")} + new_people = { + k: v for k, v in event.content.items() + if k.startswith("@") + } + + removed = set(old_people.keys()) - set(new_people.keys()) + added = set(old_people.keys()) - set(new_people.keys()) + same = set(old_people.keys()) & set(new_people.keys()) + + for r in removed: + if int(old_list.content[r]) > user_level: + raise AuthError( + 403, + "You don't have permission to remove user: %s" % (r, ) + ) + + for n in added: + if int(event.content[n]) > user_level: + raise AuthError( + 403, + "You don't have permission to add ops level greater " + "than your own" + ) + + for s in same: + if int(event.content[s]) != int(old_list[s]): + if int(event.content[s]) > user_level: + raise AuthError( + 403, + "You don't have permission to add ops level greater " + "than your own" + ) + + if "default" in old_list: + old_default = int(old_list["default"]) + + if old_default > user_level: + raise AuthError( + 403, + "You don't have permission to add ops level greater than " + "your own" + ) + + if "default" in event.content: + new_default = int(event.content["default"]) + + if new_default > user_level: + raise AuthError( + 403, + "You don't have permission to add ops level greater " + "than your own" + ) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index fcef062fc9..618d3d7577 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -50,3 +50,12 @@ class JoinRules(object): KNOCK = u"knock" INVITE = u"invite" PRIVATE = u"private" + + +class LoginType(object): + PASSWORD = u"m.login.password" + OAUTH = u"m.login.oauth2" + EMAIL_CODE = u"m.login.email.code" + EMAIL_URL = u"m.login.email.url" + EMAIL_IDENTITY = u"m.login.email.identity" + RECAPTCHA = u"m.login.recaptcha" \ No newline at end of file diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 84afe4fa37..88175602c4 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -29,6 +29,8 @@ class Codes(object): NOT_FOUND = "M_NOT_FOUND" UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN" LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED" + CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED" + CAPTCHA_INVALID = "M_CAPTCHA_INVALID" class CodeMessageException(Exception): @@ -101,6 +103,19 @@ class StoreError(SynapseError): pass +class InvalidCaptchaError(SynapseError): + def __init__(self, code=400, msg="Invalid captcha.", error_url=None, + errcode=Codes.CAPTCHA_INVALID): + super(InvalidCaptchaError, self).__init__(code, msg, errcode) + self.error_url = error_url + + def error_dict(self): + return cs_error( + self.msg, + self.errcode, + error_url=self.error_url, + ) + class LimitExceededError(SynapseError): """A client has sent too many requests and is being throttled. """ diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index f95468fc65..0cee196851 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -17,6 +17,19 @@ from synapse.api.errors import SynapseError, Codes from synapse.util.jsonobject import JsonEncodedObject +def serialize_event(hs, e): + # FIXME(erikj): To handle the case of presence events and the like + if not isinstance(e, SynapseEvent): + return e + + d = e.get_dict() + if "age_ts" in d: + d["age"] = int(hs.get_clock().time_msec()) - d["age_ts"] + del d["age_ts"] + + return d + + class SynapseEvent(JsonEncodedObject): """Base class for Synapse events. These are JSON objects which must abide @@ -43,6 +56,8 @@ class SynapseEvent(JsonEncodedObject): "content", # HTTP body, JSON "state_key", "required_power_level", + "age_ts", + "prev_content", ] internal_keys = [ @@ -141,7 +156,8 @@ class SynapseEvent(JsonEncodedObject): return "Missing %s key" % key if type(content[key]) != type(template[key]): - return "Key %s is of the wrong type." % key + return "Key %s is of the wrong type (got %s, want %s)" % ( + key, type(content[key]), type(template[key])) if type(content[key]) == dict: # we must go deeper @@ -157,7 +173,8 @@ class SynapseEvent(JsonEncodedObject): class SynapseStateEvent(SynapseEvent): - def __init__(self, **kwargs): + + def __init__(self, **kwargs): if "state_key" not in kwargs: kwargs["state_key"] = "" super(SynapseStateEvent, self).__init__(**kwargs) diff --git a/synapse/api/events/factory.py b/synapse/api/events/factory.py index a3b293e024..d3d96d73eb 100644 --- a/synapse/api/events/factory.py +++ b/synapse/api/events/factory.py @@ -47,15 +47,26 @@ class EventFactory(object): self._event_list[event_class.TYPE] = event_class self.clock = hs.get_clock() + self.hs = hs def create_event(self, etype=None, **kwargs): kwargs["type"] = etype if "event_id" not in kwargs: - kwargs["event_id"] = random_string(10) + kwargs["event_id"] = "%s@%s" % ( + random_string(10), self.hs.hostname + ) if "ts" not in kwargs: kwargs["ts"] = int(self.clock.time_msec()) + # The "age" key is a delta timestamp that should be converted into an + # absolute timestamp the minute we see it. + if "age" in kwargs: + kwargs["age_ts"] = int(self.clock.time_msec()) - int(kwargs["age"]) + del kwargs["age"] + elif "age_ts" not in kwargs: + kwargs["age_ts"] = int(self.clock.time_msec()) + if etype in self._event_list: handler = self._event_list[etype] else: diff --git a/synapse/api/events/room.py b/synapse/api/events/room.py index 33f0f0cb99..3a4dbc58ce 100644 --- a/synapse/api/events/room.py +++ b/synapse/api/events/room.py @@ -173,3 +173,10 @@ class RoomOpsPowerLevelsEvent(SynapseStateEvent): def get_content_template(self): return {} + + +class RoomAliasesEvent(SynapseStateEvent): + TYPE = "m.room.aliases" + + def get_content_template(self): + return {} diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 49cf928cc1..2f1b954902 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage import read_schema +from synapse.storage import prepare_database from synapse.server import HomeServer @@ -36,30 +36,14 @@ from daemonize import Daemonize import twisted.manhole.telnet import logging -import sqlite3 import os import re import sys +import sqlite3 logger = logging.getLogger(__name__) -SCHEMAS = [ - "transactions", - "pdu", - "users", - "profiles", - "presence", - "im", - "room_aliases", -] - - -# Remember to update this number every time an incompatible change is made to -# database schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 2 - - class SynapseHomeServer(HomeServer): def build_http_client(self): @@ -80,52 +64,12 @@ class SynapseHomeServer(HomeServer): ) def build_db_pool(self): - """ Set up all the dbs. Since all the *.sql have IF NOT EXISTS, so we - don't have to worry about overwriting existing content. - """ - logging.info("Preparing database: %s...", self.db_name) - - with sqlite3.connect(self.db_name) as db_conn: - c = db_conn.cursor() - c.execute("PRAGMA user_version") - row = c.fetchone() - - if row and row[0]: - user_version = row[0] - - if user_version > SCHEMA_VERSION: - raise ValueError("Cannot use this database as it is too " + - "new for the server to understand" - ) - elif user_version < SCHEMA_VERSION: - logging.info("Upgrading database from version %d", - user_version - ) - - # Run every version since after the current version. - for v in range(user_version + 1, SCHEMA_VERSION + 1): - sql_script = read_schema("delta/v%d" % (v)) - c.executescript(sql_script) - - db_conn.commit() - - else: - for sql_loc in SCHEMAS: - sql_script = read_schema(sql_loc) - - c.executescript(sql_script) - db_conn.commit() - c.execute("PRAGMA user_version = %d" % SCHEMA_VERSION) - - c.close() - - logging.info("Database prepared in %s.", self.db_name) - - pool = adbapi.ConnectionPool( - 'sqlite3', self.db_name, check_same_thread=False, - cp_min=1, cp_max=1) - - return pool + return adbapi.ConnectionPool( + "sqlite3", self.get_db_name(), + check_same_thread=False, + cp_min=1, + cp_max=1 + ) def create_resource_tree(self, web_client, redirect_root_to_web_client): """Create the resource tree for this Home Server. @@ -230,10 +174,6 @@ class SynapseHomeServer(HomeServer): logger.info("Synapse now listening on port %d", unsecure_port) -def run(): - reactor.run() - - def setup(): config = HomeServerConfig.load_config( "Synapse Homeserver", @@ -268,7 +208,15 @@ def setup(): web_client=config.webclient, redirect_root_to_web_client=True, ) - hs.start_listening(config.bind_port, config.unsecure_port) + + db_name = hs.get_db_name() + + logging.info("Preparing database: %s...", db_name) + + with sqlite3.connect(db_name) as db_conn: + prepare_database(db_conn) + + logging.info("Database prepared in %s.", db_name) hs.get_db_pool() @@ -279,12 +227,14 @@ def setup(): f.namespace['hs'] = hs reactor.listenTCP(config.manhole, f, interface='127.0.0.1') + hs.start_listening(config.bind_port, config.unsecure_port) + if config.daemonize: print config.pid_file daemon = Daemonize( app="synapse-homeserver", pid=config.pid_file, - action=run, + action=reactor.run, auto_close_fds=False, verbose=True, logger=logger, @@ -292,7 +242,7 @@ def setup(): daemon.start() else: - run() + reactor.run() if __name__ == '__main__': diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py new file mode 100644 index 0000000000..8ebcfc3623 --- /dev/null +++ b/synapse/config/captcha.py @@ -0,0 +1,46 @@ +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import Config + + +class CaptchaConfig(Config): + + def __init__(self, args): + super(CaptchaConfig, self).__init__(args) + self.recaptcha_private_key = args.recaptcha_private_key + self.enable_registration_captcha = args.enable_registration_captcha + self.captcha_ip_origin_is_x_forwarded = ( + args.captcha_ip_origin_is_x_forwarded + ) + + @classmethod + def add_arguments(cls, parser): + super(CaptchaConfig, cls).add_arguments(parser) + group = parser.add_argument_group("recaptcha") + group.add_argument( + "--recaptcha-private-key", type=str, default="YOUR_PRIVATE_KEY", + help="The matching private key for the web client's public key." + ) + group.add_argument( + "--enable-registration-captcha", type=bool, default=False, + help="Enables ReCaptcha checks when registering, preventing signup" + + " unless a captcha is answered. Requires a valid ReCaptcha " + + "public/private key." + ) + group.add_argument( + "--captcha_ip_origin_is_x_forwarded", type=bool, default=False, + help="When checking captchas, use the X-Forwarded-For (XFF) header" + + " as the client IP and not the actual client IP." + ) \ No newline at end of file diff --git a/synapse/config/email.py b/synapse/config/email.py new file mode 100644 index 0000000000..9bcc5a8fea --- /dev/null +++ b/synapse/config/email.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import Config + + +class EmailConfig(Config): + + def __init__(self, args): + super(EmailConfig, self).__init__(args) + self.email_from_address = args.email_from_address + self.email_smtp_server = args.email_smtp_server + + @classmethod + def add_arguments(cls, parser): + super(EmailConfig, cls).add_arguments(parser) + email_group = parser.add_argument_group("email") + email_group.add_argument( + "--email-from-address", + default="FROM@EXAMPLE.COM", + help="The address to send emails from (e.g. for password resets)." + ) + email_group.add_argument( + "--email-smtp-server", + default="", + help="The SMTP server to send emails from (e.g. for password resets)." + ) \ No newline at end of file diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 76e2cdeddd..4b810a2302 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -19,11 +19,16 @@ from .logger import LoggingConfig from .database import DatabaseConfig from .ratelimiting import RatelimitConfig from .repository import ContentRepositoryConfig +from .captcha import CaptchaConfig +from .email import EmailConfig + class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, - RatelimitConfig, ContentRepositoryConfig): + RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, + EmailConfig): pass -if __name__=='__main__': + +if __name__ == '__main__': import sys HomeServerConfig.load_config("Generate config", sys.argv[1:], "HomeServer") diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py index e12510017f..96b82f00cb 100644 --- a/synapse/federation/replication.py +++ b/synapse/federation/replication.py @@ -291,6 +291,13 @@ class ReplicationLayer(object): def on_incoming_transaction(self, transaction_data): transaction = Transaction(**transaction_data) + for p in transaction.pdus: + if "age" in p: + p["age_ts"] = int(self._clock.time_msec()) - int(p["age"]) + del p["age"] + + pdu_list = [Pdu(**p) for p in transaction.pdus] + logger.debug("[%s] Got transaction", transaction.transaction_id) response = yield self.transaction_actions.have_responded(transaction) @@ -303,8 +310,6 @@ class ReplicationLayer(object): logger.debug("[%s] Transacition is new", transaction.transaction_id) - pdu_list = [Pdu(**p) for p in transaction.pdus] - dl = [] for pdu in pdu_list: dl.append(self._handle_new_pdu(pdu)) @@ -405,9 +410,14 @@ class ReplicationLayer(object): """Returns a new Transaction containing the given PDUs suitable for transmission. """ + pdus = [p.get_dict() for p in pdu_list] + for p in pdus: + if "age_ts" in pdus: + p["age"] = int(self.clock.time_msec()) - p["age_ts"] + return Transaction( - pdus=[p.get_dict() for p in pdu_list], origin=self.server_name, + pdus=pdus, ts=int(self._clock.time_msec()), destination=None, ) @@ -593,8 +603,21 @@ class _TransactionQueue(object): logger.debug("TX [%s] Sending transaction...", destination) # Actually send the transaction + + # FIXME (erikj): This is a bit of a hack to make the Pdu age + # keys work + def cb(transaction): + now = int(self._clock.time_msec()) + if "pdus" in transaction: + for p in transaction["pdus"]: + if "age_ts" in p: + p["age"] = now - int(p["age_ts"]) + + return transaction + code, response = yield self.transport_layer.send_transaction( - transaction + transaction, + on_send_callback=cb, ) logger.debug("TX [%s] Sent transaction", destination) diff --git a/synapse/federation/transport.py b/synapse/federation/transport.py index 6e62ae7c74..afc777ec9e 100644 --- a/synapse/federation/transport.py +++ b/synapse/federation/transport.py @@ -144,7 +144,7 @@ class TransportLayer(object): @defer.inlineCallbacks @log_function - def send_transaction(self, transaction): + def send_transaction(self, transaction, on_send_callback=None): """ Sends the given Transaction to it's destination Args: @@ -165,10 +165,23 @@ class TransportLayer(object): data = transaction.get_dict() + # FIXME (erikj): This is a bit of a hack to make the Pdu age + # keys work + def cb(destination, method, path_bytes, producer): + if not on_send_callback: + return + + transaction = json.loads(producer.body) + + new_transaction = on_send_callback(transaction) + + producer.reset(new_transaction) + code, response = yield self.client.put_json( transaction.destination, path=PREFIX + "/send/%s/" % transaction.transaction_id, - data=data + data=data, + on_send_callback=cb, ) logger.debug( diff --git a/synapse/federation/units.py b/synapse/federation/units.py index 9740431279..622fe66a8f 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -69,6 +69,7 @@ class Pdu(JsonEncodedObject): "prev_state_id", "prev_state_origin", "required_power_level", + "user_id", ] internal_keys = [ diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 9989fe8670..de4d23bbb3 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -42,9 +42,6 @@ class BaseHandler(object): retry_after_ms=int(1000*(time_allowed - time_now)), ) - -class BaseRoomHandler(BaseHandler): - @defer.inlineCallbacks def _on_new_room_event(self, event, snapshot, extra_destinations=[], extra_users=[]): diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 1b9e831fc0..4ab00a761a 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -19,8 +19,10 @@ from ._base import BaseHandler from synapse.api.errors import SynapseError from synapse.http.client import HttpClient +from synapse.api.events.room import RoomAliasesEvent import logging +import sqlite3 logger = logging.getLogger(__name__) @@ -37,7 +39,8 @@ class DirectoryHandler(BaseHandler): ) @defer.inlineCallbacks - def create_association(self, room_alias, room_id, servers=None): + def create_association(self, user_id, room_alias, room_id, servers=None): + # TODO(erikj): Do auth. if not room_alias.is_mine: @@ -54,12 +57,37 @@ class DirectoryHandler(BaseHandler): if not servers: raise SynapseError(400, "Failed to get server list") - yield self.store.create_room_alias_association( - room_alias, - room_id, - servers + + try: + yield self.store.create_room_alias_association( + room_alias, + room_id, + servers + ) + except sqlite3.IntegrityError: + defer.returnValue("Already exists") + + # TODO: Send the room event. + + aliases = yield self.store.get_aliases_for_room(room_id) + + event = self.event_factory.create_event( + etype=RoomAliasesEvent.TYPE, + state_key=self.hs.hostname, + room_id=room_id, + user_id=user_id, + content={"aliases": aliases}, ) + snapshot = yield self.store.snapshot_room( + room_id=room_id, + user_id=user_id, + ) + + yield self.state_handler.handle_new_event(event, snapshot) + yield self._on_new_room_event(event, snapshot, extra_users=[user_id]) + + @defer.inlineCallbacks def get_association(self, room_alias): room_id = None diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index fd24a11fb8..93dcd40324 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -15,7 +15,6 @@ from twisted.internet import defer -from synapse.api.events import SynapseEvent from synapse.util.logutils import log_function from ._base import BaseHandler @@ -71,10 +70,7 @@ class EventStreamHandler(BaseHandler): auth_user, room_ids, pagin_config, timeout ) - chunks = [ - e.get_dict() if isinstance(e, SynapseEvent) else e - for e in events - ] + chunks = [self.hs.serialize_event(e) for e in events] chunk = { "chunk": chunks, @@ -92,7 +88,9 @@ class EventStreamHandler(BaseHandler): # 10 seconds of grace to allow the client to reconnect again # before we think they're gone def _later(): - logger.debug("_later stopped_user_eventstream %s", auth_user) + logger.debug( + "_later stopped_user_eventstream %s", auth_user + ) self.distributor.fire( "stopped_user_eventstream", auth_user ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 59cbf71d78..001c6c110c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -93,22 +93,18 @@ class FederationHandler(BaseHandler): """ event = self.pdu_codec.event_from_pdu(pdu) + logger.debug("Got event: %s", event.event_id) + with (yield self.lock_manager.lock(pdu.context)): if event.is_state and not backfilled: is_new_state = yield self.state_handler.handle_new_state( pdu ) - if not is_new_state: - return else: is_new_state = False # TODO: Implement something in federation that allows us to # respond to PDU. - if hasattr(event, "state_key") and not is_new_state: - logger.debug("Ignoring old state.") - return - target_is_mine = False if hasattr(event, "target_host"): target_is_mine = event.target_host == self.hs.hostname @@ -139,7 +135,11 @@ class FederationHandler(BaseHandler): else: with (yield self.room_lock.lock(event.room_id)): - yield self.store.persist_event(event, backfilled) + yield self.store.persist_event( + event, + backfilled, + is_new_state=is_new_state + ) room = yield self.store.get_room(event.room_id) diff --git a/synapse/handlers/login.py b/synapse/handlers/login.py index 6ee7ce5a2d..80ffdd2726 100644 --- a/synapse/handlers/login.py +++ b/synapse/handlers/login.py @@ -17,9 +17,13 @@ from twisted.internet import defer from ._base import BaseHandler from synapse.api.errors import LoginError, Codes +from synapse.http.client import PlainHttpClient +from synapse.util.emailutils import EmailException +import synapse.util.emailutils as emailutils import bcrypt import logging +import urllib logger = logging.getLogger(__name__) @@ -62,4 +66,41 @@ class LoginHandler(BaseHandler): defer.returnValue(token) else: logger.warn("Failed password login for user %s", user) - raise LoginError(403, "", errcode=Codes.FORBIDDEN) \ No newline at end of file + raise LoginError(403, "", errcode=Codes.FORBIDDEN) + + @defer.inlineCallbacks + def reset_password(self, user_id, email): + is_valid = yield self._check_valid_association(user_id, email) + logger.info("reset_password user=%s email=%s valid=%s", user_id, email, + is_valid) + if is_valid: + try: + # send an email out + emailutils.send_email( + smtp_server=self.hs.config.email_smtp_server, + from_addr=self.hs.config.email_from_address, + to_addr=email, + subject="Password Reset", + body="TODO." + ) + except EmailException as e: + logger.exception(e) + + @defer.inlineCallbacks + def _check_valid_association(self, user_id, email): + identity = yield self._query_email(email) + if identity and "mxid" in identity: + if identity["mxid"] == user_id: + defer.returnValue(True) + return + defer.returnValue(False) + + @defer.inlineCallbacks + def _query_email(self, email): + httpCli = PlainHttpClient(self.hs) + data = yield httpCli.get_json( + 'matrix.org:8090', # TODO FIXME This should be configurable. + "/_matrix/identity/api/v1/lookup?medium=email&address=" + + "%s" % urllib.quote(email) + ) + defer.returnValue(data) \ No newline at end of file diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index dad2bbd1a4..14fae689f2 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -19,7 +19,7 @@ from synapse.api.constants import Membership from synapse.api.events.room import RoomTopicEvent from synapse.api.errors import RoomError from synapse.streams.config import PaginationConfig -from ._base import BaseRoomHandler +from ._base import BaseHandler import logging @@ -27,7 +27,7 @@ logger = logging.getLogger(__name__) -class MessageHandler(BaseRoomHandler): +class MessageHandler(BaseHandler): def __init__(self, hs): super(MessageHandler, self).__init__(hs) @@ -124,7 +124,7 @@ class MessageHandler(BaseRoomHandler): ) chunk = { - "chunk": [e.get_dict() for e in events], + "chunk": [self.hs.serialize_event(e) for e in events], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } @@ -268,6 +268,9 @@ class MessageHandler(BaseRoomHandler): user, pagination_config, None ) + public_rooms = yield self.store.get_rooms(is_public=True) + public_room_ids = [r["room_id"] for r in public_rooms] + limit = pagin_config.limit if not limit: limit = 10 @@ -276,6 +279,8 @@ class MessageHandler(BaseRoomHandler): d = { "room_id": event.room_id, "membership": event.membership, + "visibility": ("public" if event.room_id in + public_room_ids else "private"), } if event.membership == Membership.INVITE: @@ -296,7 +301,7 @@ class MessageHandler(BaseRoomHandler): end_token = now_token.copy_and_replace("room_key", token[1]) d["messages"] = { - "chunk": [m.get_dict() for m in messages], + "chunk": [self.hs.serialize_event(m) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), } @@ -304,7 +309,7 @@ class MessageHandler(BaseRoomHandler): current_state = yield self.store.get_current_state( event.room_id ) - d["state"] = [c.get_dict() for c in current_state] + d["state"] = [self.hs.serialize_event(c) for c in current_state] except: logger.exception("Failed to get snapshot") diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index c79bb6ff76..b2af09f090 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -796,11 +796,12 @@ class PresenceEventSource(object): updates = [] # TODO(paul): use a DeferredList ? How to limit concurrency. for observed_user in cachemap.keys(): - if not (from_key < cachemap[observed_user].serial): + cached = cachemap[observed_user] + if not (from_key < cached.serial): continue if (yield self.is_visible(observer_user, observed_user)): - updates.append((observed_user, cachemap[observed_user])) + updates.append((observed_user, cached)) # TODO(paul): limit diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 023d8c0cf2..dab9b03f04 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -15,9 +15,9 @@ from twisted.internet import defer -from synapse.api.errors import SynapseError, AuthError - -from synapse.api.errors import CodeMessageException +from synapse.api.errors import SynapseError, AuthError, CodeMessageException +from synapse.api.constants import Membership +from synapse.api.events.room import RoomMemberEvent from ._base import BaseHandler @@ -97,6 +97,8 @@ class ProfileHandler(BaseHandler): } ) + yield self._update_join_states(target_user) + @defer.inlineCallbacks def get_avatar_url(self, target_user): if target_user.is_mine: @@ -144,6 +146,8 @@ class ProfileHandler(BaseHandler): } ) + yield self._update_join_states(target_user) + @defer.inlineCallbacks def collect_presencelike_data(self, user, state): if not user.is_mine: @@ -180,3 +184,39 @@ class ProfileHandler(BaseHandler): ) defer.returnValue(response) + + @defer.inlineCallbacks + def _update_join_states(self, user): + if not user.is_mine: + return + + joins = yield self.store.get_rooms_for_user_where_membership_is( + user.to_string(), + [Membership.JOIN], + ) + + for j in joins: + snapshot = yield self.store.snapshot_room( + j.room_id, j.state_key, RoomMemberEvent.TYPE, + j.state_key + ) + + content = { + "membership": j.content["membership"], + "prev": j.content["membership"], + } + + yield self.distributor.fire( + "collect_presencelike_data", user, content + ) + + new_event = self.event_factory.create_event( + etype=j.type, + room_id=j.room_id, + state_key=j.state_key, + content=content, + user_id=j.state_key, + ) + + yield self.state_handler.handle_new_event(new_event, snapshot) + yield self._on_new_room_event(new_event, snapshot) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index bee052274f..a019d770d4 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -17,7 +17,9 @@ from twisted.internet import defer from synapse.types import UserID -from synapse.api.errors import SynapseError, RegistrationError +from synapse.api.errors import ( + SynapseError, RegistrationError, InvalidCaptchaError +) from ._base import BaseHandler import synapse.util.stringutils as stringutils from synapse.http.client import PlainHttpClient @@ -38,7 +40,7 @@ class RegistrationHandler(BaseHandler): self.distributor.declare("registered_user") @defer.inlineCallbacks - def register(self, localpart=None, password=None, threepidCreds=None): + def register(self, localpart=None, password=None): """Registers a new client on the server. Args: @@ -51,20 +53,6 @@ class RegistrationHandler(BaseHandler): Raises: RegistrationError if there was a problem registering. """ - - if threepidCreds: - for c in threepidCreds: - logger.info("validating theeepidcred sid %s on id server %s", c['sid'], c['idServer']) - try: - threepid = yield self._threepid_from_creds(c) - except: - logger.err() - raise RegistrationError(400, "Couldn't validate 3pid") - - if not threepid: - raise RegistrationError(400, "Couldn't validate 3pid") - logger.info("got threepid medium %s address %s", threepid['medium'], threepid['address']) - password_hash = None if password: password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) @@ -106,15 +94,54 @@ class RegistrationHandler(BaseHandler): raise RegistrationError( 500, "Cannot generate user ID.") - # Now we have a matrix ID, bind it to the threepids we were given - if threepidCreds: - for c in threepidCreds: - # XXX: This should be a deferred list, shouldn't it? - yield self._bind_threepid(c, user_id) - - defer.returnValue((user_id, token)) + @defer.inlineCallbacks + def check_recaptcha(self, ip, private_key, challenge, response): + """Checks a recaptcha is correct.""" + + captcha_response = yield self._validate_captcha( + ip, + private_key, + challenge, + response + ) + if not captcha_response["valid"]: + logger.info("Invalid captcha entered from %s. Error: %s", + ip, captcha_response["error_url"]) + raise InvalidCaptchaError( + error_url=captcha_response["error_url"] + ) + else: + logger.info("Valid captcha entered from %s", ip) + + @defer.inlineCallbacks + def register_email(self, threepidCreds): + """Registers emails with an identity server.""" + + for c in threepidCreds: + logger.info("validating theeepidcred sid %s on id server %s", + c['sid'], c['idServer']) + try: + threepid = yield self._threepid_from_creds(c) + except: + logger.err() + raise RegistrationError(400, "Couldn't validate 3pid") + + if not threepid: + raise RegistrationError(400, "Couldn't validate 3pid") + logger.info("got threepid medium %s address %s", + threepid['medium'], threepid['address']) + + @defer.inlineCallbacks + def bind_emails(self, user_id, threepidCreds): + """Links emails with a user ID and informs an identity server.""" + + # Now we have a matrix ID, bind it to the threepids we were given + for c in threepidCreds: + # XXX: This should be a deferred list, shouldn't it? + yield self._bind_threepid(c, user_id) + def _generate_token(self, user_id): # urlsafe variant uses _ and - so use . as the separator and replace # all =s with .s so http clients don't quote =s when it is used as @@ -129,16 +156,17 @@ class RegistrationHandler(BaseHandler): def _threepid_from_creds(self, creds): httpCli = PlainHttpClient(self.hs) # XXX: make this configurable! - trustedIdServers = [ 'matrix.org:8090' ] + trustedIdServers = ['matrix.org:8090'] if not creds['idServer'] in trustedIdServers: - logger.warn('%s is not a trusted ID server: rejecting 3pid credentials', creds['idServer']) + logger.warn('%s is not a trusted ID server: rejecting 3pid ' + + 'credentials', creds['idServer']) defer.returnValue(None) data = yield httpCli.get_json( creds['idServer'], "/_matrix/identity/api/v1/3pid/getValidated3pid", - { 'sid': creds['sid'], 'clientSecret': creds['clientSecret'] } + {'sid': creds['sid'], 'clientSecret': creds['clientSecret']} ) - + if 'medium' in data: defer.returnValue(data) defer.returnValue(None) @@ -149,9 +177,45 @@ class RegistrationHandler(BaseHandler): data = yield httpCli.post_urlencoded_get_json( creds['idServer'], "/_matrix/identity/api/v1/3pid/bind", - { 'sid': creds['sid'], 'clientSecret': creds['clientSecret'], 'mxid':mxid } + {'sid': creds['sid'], 'clientSecret': creds['clientSecret'], + 'mxid': mxid} ) defer.returnValue(data) - - + + @defer.inlineCallbacks + def _validate_captcha(self, ip_addr, private_key, challenge, response): + """Validates the captcha provided. + + Returns: + dict: Containing 'valid'(bool) and 'error_url'(str) if invalid. + + """ + response = yield self._submit_captcha(ip_addr, private_key, challenge, + response) + # parse Google's response. Lovely format.. + lines = response.split('\n') + json = { + "valid": lines[0] == 'true', + "error_url": "http://www.google.com/recaptcha/api/challenge?" + + "error=%s" % lines[1] + } + defer.returnValue(json) + + @defer.inlineCallbacks + def _submit_captcha(self, ip_addr, private_key, challenge, response): + client = PlainHttpClient(self.hs) + data = yield client.post_urlencoded_get_raw( + "www.google.com:80", + "/recaptcha/api/verify", + # twisted dislikes google's response, no content length. + accept_partial=True, + args={ + 'privatekey': private_key, + 'remoteip': ip_addr, + 'challenge': challenge, + 'response': response + } + ) + defer.returnValue(data) + diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 8171e9eb45..5bc1280432 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -25,14 +25,14 @@ from synapse.api.events.room import ( RoomSendEventLevelEvent, RoomOpsPowerLevelsEvent, RoomNameEvent, ) from synapse.util import stringutils -from ._base import BaseRoomHandler +from ._base import BaseHandler import logging logger = logging.getLogger(__name__) -class RoomCreationHandler(BaseRoomHandler): +class RoomCreationHandler(BaseHandler): @defer.inlineCallbacks def create_room(self, user_id, room_id, config): @@ -65,6 +65,13 @@ class RoomCreationHandler(BaseRoomHandler): else: room_alias = None + invite_list = config.get("invite", []) + for i in invite_list: + try: + self.hs.parse_userid(i) + except: + raise SynapseError(400, "Invalid user_id: %s" % (i,)) + is_public = config.get("visibility", None) == "public" if room_id: @@ -105,7 +112,9 @@ class RoomCreationHandler(BaseRoomHandler): ) if room_alias: - yield self.store.create_room_alias_association( + directory_handler = self.hs.get_handlers().directory_handler + yield directory_handler.create_association( + user_id=user_id, room_id=room_id, room_alias=room_alias, servers=[self.hs.hostname], @@ -132,7 +141,7 @@ class RoomCreationHandler(BaseRoomHandler): etype=RoomNameEvent.TYPE, room_id=room_id, user_id=user_id, - required_power_level=5, + required_power_level=50, content={"name": name}, ) @@ -143,7 +152,7 @@ class RoomCreationHandler(BaseRoomHandler): etype=RoomNameEvent.TYPE, room_id=room_id, user_id=user_id, - required_power_level=5, + required_power_level=50, content={"name": name}, ) @@ -155,7 +164,7 @@ class RoomCreationHandler(BaseRoomHandler): etype=RoomTopicEvent.TYPE, room_id=room_id, user_id=user_id, - required_power_level=5, + required_power_level=50, content={"topic": topic}, ) @@ -176,6 +185,25 @@ class RoomCreationHandler(BaseRoomHandler): do_auth=False ) + content = {"membership": Membership.INVITE} + for invitee in invite_list: + invite_event = self.event_factory.create_event( + etype=RoomMemberEvent.TYPE, + state_key=invitee, + room_id=room_id, + user_id=user_id, + content=content + ) + + yield self.hs.get_handlers().room_member_handler.change_membership( + invite_event, + do_auth=False + ) + + yield self.hs.get_handlers().room_member_handler.change_membership( + join_event, + do_auth=False + ) result = {"room_id": room_id} if room_alias: result["room_alias"] = room_alias.to_string() @@ -186,7 +214,7 @@ class RoomCreationHandler(BaseRoomHandler): event_keys = { "room_id": room_id, "user_id": creator.to_string(), - "required_power_level": 10, + "required_power_level": 100, } def create(etype, **content): @@ -203,7 +231,7 @@ class RoomCreationHandler(BaseRoomHandler): power_levels_event = self.event_factory.create_event( etype=RoomPowerLevelsEvent.TYPE, - content={creator.to_string(): 10, "default": 0}, + content={creator.to_string(): 100, "default": 0}, **event_keys ) @@ -215,7 +243,7 @@ class RoomCreationHandler(BaseRoomHandler): add_state_event = create( etype=RoomAddStateLevelEvent.TYPE, - level=10, + level=100, ) send_event = create( @@ -225,8 +253,8 @@ class RoomCreationHandler(BaseRoomHandler): ops = create( etype=RoomOpsPowerLevelsEvent.TYPE, - ban_level=5, - kick_level=5, + ban_level=50, + kick_level=50, ) return [ @@ -239,7 +267,7 @@ class RoomCreationHandler(BaseRoomHandler): ] -class RoomMemberHandler(BaseRoomHandler): +class RoomMemberHandler(BaseHandler): # TODO(paul): This handler currently contains a messy conflation of # low-level API that works on UserID objects and so on, and REST-level # API that takes ID strings and returns pagination chunks. These concerns @@ -307,7 +335,7 @@ class RoomMemberHandler(BaseRoomHandler): member_list = yield self.store.get_room_members(room_id=room_id) event_list = [ - entry.get_dict() + self.hs.serialize_event(entry) for entry in member_list ] chunk_data = { @@ -560,11 +588,17 @@ class RoomMemberHandler(BaseRoomHandler): extra_users=[target_user] ) -class RoomListHandler(BaseRoomHandler): +class RoomListHandler(BaseHandler): @defer.inlineCallbacks def get_public_room_list(self): chunk = yield self.store.get_rooms(is_public=True) + for room in chunk: + joined_members = yield self.store.get_room_members( + room_id=room["room_id"], + membership=Membership.JOIN + ) + room["num_joined_members"] = len(joined_members) # FIXME (erikj): START is no longer a valid value defer.returnValue({"start": "START", "end": "END", "chunk": chunk}) diff --git a/synapse/http/client.py b/synapse/http/client.py index ebf1aa47c4..eb11bfd4d5 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -16,7 +16,7 @@ from twisted.internet import defer, reactor from twisted.internet.error import DNSLookupError -from twisted.web.client import _AgentBase, _URI, readBody, FileBodyProducer +from twisted.web.client import _AgentBase, _URI, readBody, FileBodyProducer, PartialDownloadError from twisted.web.http_headers import Headers from synapse.http.endpoint import matrix_endpoint @@ -122,7 +122,7 @@ class TwistedHttpClient(HttpClient): self.hs = hs @defer.inlineCallbacks - def put_json(self, destination, path, data): + def put_json(self, destination, path, data, on_send_callback=None): if destination in _destination_mappings: destination = _destination_mappings[destination] @@ -131,7 +131,8 @@ class TwistedHttpClient(HttpClient): "PUT", path.encode("ascii"), producer=_JsonProducer(data), - headers_dict={"Content-Type": ["application/json"]} + headers_dict={"Content-Type": ["application/json"]}, + on_send_callback=on_send_callback, ) logger.debug("Getting resp body") @@ -188,11 +189,37 @@ class TwistedHttpClient(HttpClient): body = yield readBody(response) defer.returnValue(json.loads(body)) + + # XXX FIXME : I'm so sorry. + @defer.inlineCallbacks + def post_urlencoded_get_raw(self, destination, path, accept_partial=False, args={}): + if destination in _destination_mappings: + destination = _destination_mappings[destination] + + query_bytes = urllib.urlencode(args, True) + + response = yield self._create_request( + destination.encode("ascii"), + "POST", + path.encode("ascii"), + producer=FileBodyProducer(StringIO(urllib.urlencode(args))), + headers_dict={"Content-Type": ["application/x-www-form-urlencoded"]} + ) + + try: + body = yield readBody(response) + defer.returnValue(body) + except PartialDownloadError as e: + if accept_partial: + defer.returnValue(e.response) + else: + raise e + @defer.inlineCallbacks def _create_request(self, destination, method, path_bytes, param_bytes=b"", query_bytes=b"", producer=None, headers_dict={}, - retry_on_dns_fail=True): + retry_on_dns_fail=True, on_send_callback=None): """ Creates and sends a request to the given url """ headers_dict[b"User-Agent"] = [b"Synapse"] @@ -216,6 +243,9 @@ class TwistedHttpClient(HttpClient): endpoint = self._getEndpoint(reactor, destination); while True: + if on_send_callback: + on_send_callback(destination, method, path_bytes, producer) + try: response = yield self.agent.request( destination, @@ -284,6 +314,9 @@ class _JsonProducer(object): """ Used by the twisted http client to create the HTTP body from json """ def __init__(self, jsn): + self.reset(jsn) + + def reset(self, jsn): self.body = encode_canonical_json(jsn) self.length = len(self.body) diff --git a/synapse/rest/directory.py b/synapse/rest/directory.py index 18df7c8d8b..31849246a1 100644 --- a/synapse/rest/directory.py +++ b/synapse/rest/directory.py @@ -45,6 +45,8 @@ class ClientDirectoryServer(RestServlet): @defer.inlineCallbacks def on_PUT(self, request, room_alias): + user = yield self.auth.get_user_by_req(request) + content = _parse_json(request) if not "room_id" in content: raise SynapseError(400, "Missing room_id key", @@ -69,12 +71,13 @@ class ClientDirectoryServer(RestServlet): try: yield dir_handler.create_association( - room_alias, room_id, servers + user.to_string(), room_alias, room_id, servers ) except SynapseError as e: raise e except: logger.exception("Failed to create association") + raise defer.returnValue((200, {})) diff --git a/synapse/rest/events.py b/synapse/rest/events.py index 7fde143200..097195d7cc 100644 --- a/synapse/rest/events.py +++ b/synapse/rest/events.py @@ -59,7 +59,7 @@ class EventRestServlet(RestServlet): event = yield handler.get_event(auth_user, event_id) if event: - defer.returnValue((200, event.get_dict())) + defer.returnValue((200, self.hs.serialize_event(event))) else: defer.returnValue((404, "Event not found.")) diff --git a/synapse/rest/login.py b/synapse/rest/login.py index c7bf901c8e..ad71f6c61d 100644 --- a/synapse/rest/login.py +++ b/synapse/rest/login.py @@ -70,7 +70,28 @@ class LoginFallbackRestServlet(RestServlet): def on_GET(self, request): # TODO(kegan): This should be returning some HTML which is capable of # hitting LoginRestServlet - return (200, "") + return (200, {}) + + +class PasswordResetRestServlet(RestServlet): + PATTERN = client_path_pattern("/login/reset") + + @defer.inlineCallbacks + def on_POST(self, request): + reset_info = _parse_json(request) + try: + email = reset_info["email"] + user_id = reset_info["user_id"] + handler = self.handlers.login_handler + yield handler.reset_password(user_id, email) + # purposefully give no feedback to avoid people hammering different + # combinations. + defer.returnValue((200, {})) + except KeyError: + raise SynapseError( + 400, + "Missing keys. Requires 'email' and 'user_id'." + ) def _parse_json(request): @@ -85,3 +106,4 @@ def _parse_json(request): def register_servlets(hs, http_server): LoginRestServlet(hs).register(http_server) + # TODO PasswordResetRestServlet(hs).register(http_server) diff --git a/synapse/rest/profile.py b/synapse/rest/profile.py index 2e17f87fa1..dad5a208c7 100644 --- a/synapse/rest/profile.py +++ b/synapse/rest/profile.py @@ -51,7 +51,7 @@ class ProfileDisplaynameRestServlet(RestServlet): yield self.handlers.profile_handler.set_displayname( user, auth_user, new_name) - defer.returnValue((200, "")) + defer.returnValue((200, {})) def on_OPTIONS(self, request, user_id): return (200, {}) @@ -86,7 +86,7 @@ class ProfileAvatarURLRestServlet(RestServlet): yield self.handlers.profile_handler.set_avatar_url( user, auth_user, new_name) - defer.returnValue((200, "")) + defer.returnValue((200, {})) def on_OPTIONS(self, request, user_id): return (200, {}) diff --git a/synapse/rest/register.py b/synapse/rest/register.py index b8de3b250d..af528a44f6 100644 --- a/synapse/rest/register.py +++ b/synapse/rest/register.py @@ -16,58 +16,219 @@ """This module contains REST servlets to do with registration: /register""" from twisted.internet import defer -from synapse.api.errors import SynapseError +from synapse.api.errors import SynapseError, Codes +from synapse.api.constants import LoginType from base import RestServlet, client_path_pattern +import synapse.util.stringutils as stringutils import json +import logging import urllib +logger = logging.getLogger(__name__) + class RegisterRestServlet(RestServlet): + """Handles registration with the home server. + + This servlet is in control of the registration flow; the registration + handler doesn't have a concept of multi-stages or sessions. + """ + PATTERN = client_path_pattern("/register$") + def __init__(self, hs): + super(RegisterRestServlet, self).__init__(hs) + # sessions are stored as: + # self.sessions = { + # "session_id" : { __session_dict__ } + # } + # TODO: persistent storage + self.sessions = {} + + def on_GET(self, request): + if self.hs.config.enable_registration_captcha: + return (200, { + "flows": [ + { + "type": LoginType.RECAPTCHA, + "stages": ([LoginType.RECAPTCHA, + LoginType.EMAIL_IDENTITY, + LoginType.PASSWORD]) + }, + { + "type": LoginType.RECAPTCHA, + "stages": [LoginType.RECAPTCHA, LoginType.PASSWORD] + } + ] + }) + else: + return (200, { + "flows": [ + { + "type": LoginType.EMAIL_IDENTITY, + "stages": ([LoginType.EMAIL_IDENTITY, + LoginType.PASSWORD]) + }, + { + "type": LoginType.PASSWORD + } + ] + }) + @defer.inlineCallbacks def on_POST(self, request): - desired_user_id = None - password = None + register_json = _parse_json(request) + + session = (register_json["session"] if "session" in register_json + else None) + login_type = None + if "type" not in register_json: + raise SynapseError(400, "Missing 'type' key.") + try: - register_json = json.loads(request.content.read()) - if "password" in register_json: - password = register_json["password"].encode("utf-8") + login_type = register_json["type"] + stages = { + LoginType.RECAPTCHA: self._do_recaptcha, + LoginType.PASSWORD: self._do_password, + LoginType.EMAIL_IDENTITY: self._do_email_identity + } - if type(register_json["user_id"]) == unicode: - desired_user_id = register_json["user_id"].encode("utf-8") - if urllib.quote(desired_user_id) != desired_user_id: - raise SynapseError( - 400, - "User ID must only contain characters which do not " + - "require URL encoding.") - except ValueError: - defer.returnValue((400, "No JSON object.")) + session_info = self._get_session_info(request, session) + logger.debug("%s : session info %s request info %s", + login_type, session_info, register_json) + response = yield stages[login_type]( + request, + register_json, + session_info + ) + + if "access_token" not in response: + # isn't a final response + response["session"] = session_info["id"] + + defer.returnValue((200, response)) + except KeyError as e: + logger.exception(e) + raise SynapseError(400, "Missing JSON keys for login type %s." % login_type) + + def on_OPTIONS(self, request): + return (200, {}) + + def _get_session_info(self, request, session_id): + if not session_id: + # create a new session + while session_id is None or session_id in self.sessions: + session_id = stringutils.random_string(24) + self.sessions[session_id] = { + "id": session_id, + LoginType.EMAIL_IDENTITY: False, + LoginType.RECAPTCHA: False + } + + return self.sessions[session_id] + + def _save_session(self, session): + # TODO: Persistent storage + logger.debug("Saving session %s", session) + self.sessions[session["id"]] = session + + def _remove_session(self, session): + logger.debug("Removing session %s", session) + self.sessions.pop(session["id"]) + + @defer.inlineCallbacks + def _do_recaptcha(self, request, register_json, session): + if not self.hs.config.enable_registration_captcha: + raise SynapseError(400, "Captcha not required.") + + challenge = None + user_response = None + try: + challenge = register_json["challenge"] + user_response = register_json["response"] except KeyError: - pass # user_id is optional + raise SynapseError(400, "Captcha response is required", + errcode=Codes.CAPTCHA_NEEDED) - threepidCreds = None - if 'threepidCreds' in register_json: - threepidCreds = register_json['threepidCreds'] + # May be an X-Forwarding-For header depending on config + ip_addr = request.getClientIP() + if self.hs.config.captcha_ip_origin_is_x_forwarded: + # use the header + if request.requestHeaders.hasHeader("X-Forwarded-For"): + ip_addr = request.requestHeaders.getRawHeaders( + "X-Forwarded-For")[0] handler = self.handlers.registration_handler + yield handler.check_recaptcha( + ip_addr, + self.hs.config.recaptcha_private_key, + challenge, + user_response + ) + session[LoginType.RECAPTCHA] = True # mark captcha as done + self._save_session(session) + defer.returnValue({ + "next": [LoginType.PASSWORD, LoginType.EMAIL_IDENTITY] + }) + + @defer.inlineCallbacks + def _do_email_identity(self, request, register_json, session): + if (self.hs.config.enable_registration_captcha and + not session[LoginType.RECAPTCHA]): + raise SynapseError(400, "Captcha is required.") + + threepidCreds = register_json['threepidCreds'] + handler = self.handlers.registration_handler + yield handler.register_email(threepidCreds) + session["threepidCreds"] = threepidCreds # store creds for next stage + session[LoginType.EMAIL_IDENTITY] = True # mark email as done + self._save_session(session) + defer.returnValue({ + "next": LoginType.PASSWORD + }) + + @defer.inlineCallbacks + def _do_password(self, request, register_json, session): + if (self.hs.config.enable_registration_captcha and + not session[LoginType.RECAPTCHA]): + # captcha should've been done by this stage! + raise SynapseError(400, "Captcha is required.") + + password = register_json["password"].encode("utf-8") + desired_user_id = (register_json["user"].encode("utf-8") if "user" + in register_json else None) + if desired_user_id and urllib.quote(desired_user_id) != desired_user_id: + raise SynapseError( + 400, + "User ID must only contain characters which do not " + + "require URL encoding.") + handler = self.handlers.registration_handler (user_id, token) = yield handler.register( localpart=desired_user_id, - password=password, - threepidCreds=threepidCreds) + password=password + ) + + if session[LoginType.EMAIL_IDENTITY]: + yield handler.bind_emails(user_id, session["threepidCreds"]) result = { "user_id": user_id, "access_token": token, "home_server": self.hs.hostname, } - defer.returnValue( - (200, result) - ) + self._remove_session(session) + defer.returnValue(result) - def on_OPTIONS(self, request): - return (200, {}) + +def _parse_json(request): + try: + content = json.loads(request.content.read()) + if type(content) != dict: + raise SynapseError(400, "Content must be a JSON object.") + return content + except ValueError: + raise SynapseError(400, "Content not JSON.") def register_servlets(hs, http_server): diff --git a/synapse/rest/room.py b/synapse/rest/room.py index 308b447090..ecb1e346d9 100644 --- a/synapse/rest/room.py +++ b/synapse/rest/room.py @@ -154,14 +154,14 @@ class RoomStateEventRestServlet(RestServlet): # membership events are special handler = self.handlers.room_member_handler yield handler.change_membership(event) - defer.returnValue((200, "")) + defer.returnValue((200, {})) else: # store random bits of state msg_handler = self.handlers.message_handler yield msg_handler.store_room_data( event=event ) - defer.returnValue((200, "")) + defer.returnValue((200, {})) # TODO: Needs unit testing for generic events + feedback @@ -249,7 +249,7 @@ class JoinRoomAliasServlet(RestServlet): ) handler = self.handlers.room_member_handler yield handler.change_membership(event) - defer.returnValue((200, "")) + defer.returnValue((200, {})) @defer.inlineCallbacks def on_PUT(self, request, room_identifier, txn_id): @@ -378,7 +378,7 @@ class RoomTriggerBackfill(RestServlet): handler = self.handlers.federation_handler events = yield handler.backfill(remote_server, room_id, limit) - res = [event.get_dict() for event in events] + res = [self.hs.serialize_event(event) for event in events] defer.returnValue((200, res)) @@ -416,7 +416,7 @@ class RoomMembershipRestServlet(RestServlet): ) handler = self.handlers.room_member_handler yield handler.change_membership(event) - defer.returnValue((200, "")) + defer.returnValue((200, {})) @defer.inlineCallbacks def on_PUT(self, request, room_id, membership_action, txn_id): diff --git a/synapse/server.py b/synapse/server.py index 83368ea5a7..cdea49e6ab 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -20,6 +20,7 @@ # Imports required for the default HomeServer() implementation from synapse.federation import initialize_http_replication +from synapse.api.events import serialize_event from synapse.api.events.factory import EventFactory from synapse.notifier import Notifier from synapse.api.auth import Auth @@ -57,6 +58,7 @@ class BaseHomeServer(object): DEPENDENCIES = [ 'clock', 'http_client', + 'db_name', 'db_pool', 'persistence_service', 'replication_layer', @@ -138,6 +140,9 @@ class BaseHomeServer(object): object.""" return RoomID.from_string(s, hs=self) + def serialize_event(self, e): + return serialize_event(self, e) + # Build magic accessors for every dependency for depname in BaseHomeServer.DEPENDENCIES: BaseHomeServer._make_dependency_method(depname) diff --git a/synapse/state.py b/synapse/state.py index 36d8210eb5..9db84c9b5c 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -16,7 +16,7 @@ from twisted.internet import defer -from synapse.federation.pdu_codec import encode_event_id +from synapse.federation.pdu_codec import encode_event_id, decode_event_id from synapse.util.logutils import log_function from collections import namedtuple @@ -87,9 +87,11 @@ class StateHandler(object): # than the power level of the user # power_level = self._get_power_level_for_event(event) + pdu_id, origin = decode_event_id(event.event_id, self.server_name) + yield self.store.update_current_state( - pdu_id=event.event_id, - origin=self.server_name, + pdu_id=pdu_id, + origin=origin, context=key.context, pdu_type=key.type, state_key=key.state_key @@ -113,6 +115,8 @@ class StateHandler(object): is_new = yield self._handle_new_state(new_pdu) + logger.debug("is_new: %s %s %s", is_new, new_pdu.pdu_id, new_pdu.origin) + if is_new: yield self.store.update_current_state( pdu_id=new_pdu.pdu_id, @@ -132,7 +136,9 @@ class StateHandler(object): @defer.inlineCallbacks @log_function def _handle_new_state(self, new_pdu): - tree = yield self.store.get_unresolved_state_tree(new_pdu) + tree, missing_branch = yield self.store.get_unresolved_state_tree( + new_pdu + ) new_branch, current_branch = tree logger.debug( @@ -140,64 +146,17 @@ class StateHandler(object): new_branch, current_branch ) - if not current_branch: - # There is no current state - defer.returnValue(True) - return - - n = new_branch[-1] - c = current_branch[-1] - - if n.pdu_id == c.pdu_id and n.origin == c.origin: - # We have all the PDUs we need, so we can just do the conflict - # resolution. - - if len(current_branch) == 1: - # This is a direct clobber so we can just... - defer.returnValue(True) - - conflict_res = [ - self._do_power_level_conflict_res, - self._do_chain_length_conflict_res, - self._do_hash_conflict_res, - ] - - for algo in conflict_res: - new_res, curr_res = algo(new_branch, current_branch) - - if new_res < curr_res: - defer.returnValue(False) - elif new_res > curr_res: - defer.returnValue(True) - - raise Exception("Conflict resolution failed.") - - else: - # We need to ask for PDUs. - missing_prev = max( - new_branch[-1], current_branch[-1], - key=lambda x: x.depth - ) - - if not hasattr(missing_prev, "prev_state_id"): - # FIXME Hmm - # temporary fallback - for algo in conflict_res: - new_res, curr_res = algo(new_branch, current_branch) - - if new_res < curr_res: - defer.returnValue(False) - elif new_res > curr_res: - defer.returnValue(True) - return + if missing_branch is not None: + # We're missing some PDUs. Fetch them. + # TODO (erikj): Limit this. + missing_prev = tree[missing_branch][-1] pdu_id = missing_prev.prev_state_id origin = missing_prev.prev_state_origin is_missing = yield self.store.get_pdu(pdu_id, origin) is None - if not is_missing: - raise Exception("Conflict resolution failed.") + raise Exception("Conflict resolution failed") yield self._replication.get_pdu( destination=missing_prev.origin, @@ -209,23 +168,93 @@ class StateHandler(object): updated_current = yield self._handle_new_state(new_pdu) defer.returnValue(updated_current) - def _do_power_level_conflict_res(self, new_branch, current_branch): - max_power_new = max( - new_branch[:-1], - key=lambda t: t.power_level - ).power_level + if not current_branch: + # There is no current state + defer.returnValue(True) + return - max_power_current = max( - current_branch[:-1], - key=lambda t: t.power_level - ).power_level + n = new_branch[-1] + c = current_branch[-1] - return (max_power_new, max_power_current) + common_ancestor = n.pdu_id == c.pdu_id and n.origin == c.origin - def _do_chain_length_conflict_res(self, new_branch, current_branch): + if common_ancestor: + # We found a common ancestor! + + if len(current_branch) == 1: + # This is a direct clobber so we can just... + defer.returnValue(True) + + else: + # We didn't find a common ancestor. This is probably fine. + pass + + result = yield self._do_conflict_res( + new_branch, current_branch, common_ancestor + ) + defer.returnValue(result) + + @defer.inlineCallbacks + def _do_conflict_res(self, new_branch, current_branch, common_ancestor): + conflict_res = [ + self._do_power_level_conflict_res, + self._do_chain_length_conflict_res, + self._do_hash_conflict_res, + ] + + for algo in conflict_res: + new_res, curr_res = yield defer.maybeDeferred( + algo, + new_branch, current_branch, common_ancestor + ) + + if new_res < curr_res: + defer.returnValue(False) + elif new_res > curr_res: + defer.returnValue(True) + + raise Exception("Conflict resolution failed.") + + @defer.inlineCallbacks + def _do_power_level_conflict_res(self, new_branch, current_branch, + common_ancestor): + new_powers_deferreds = [] + for e in new_branch[:-1] if common_ancestor else new_branch: + if hasattr(e, "user_id"): + new_powers_deferreds.append( + self.store.get_power_level(e.context, e.user_id) + ) + + current_powers_deferreds = [] + for e in current_branch[:-1] if common_ancestor else current_branch: + if hasattr(e, "user_id"): + current_powers_deferreds.append( + self.store.get_power_level(e.context, e.user_id) + ) + + new_powers = yield defer.gatherResults( + new_powers_deferreds, + consumeErrors=True + ) + + current_powers = yield defer.gatherResults( + current_powers_deferreds, + consumeErrors=True + ) + + max_power_new = max(new_powers) + max_power_current = max(current_powers) + + defer.returnValue( + (max_power_new, max_power_current) + ) + + def _do_chain_length_conflict_res(self, new_branch, current_branch, + common_ancestor): return (len(new_branch), len(current_branch)) - def _do_hash_conflict_res(self, new_branch, current_branch): + def _do_hash_conflict_res(self, new_branch, current_branch, + common_ancestor): new_str = "".join([p.pdu_id + p.origin for p in new_branch]) c_str = "".join([p.pdu_id + p.origin for p in current_branch]) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index d97014f4da..66658f6721 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -36,7 +36,7 @@ from .registration import RegistrationStore from .room import RoomStore from .roommember import RoomMemberStore from .stream import StreamStore -from .pdu import StatePduStore, PduStore +from .pdu import StatePduStore, PduStore, PdusTable from .transactions import TransactionStore from .keys import KeyStore @@ -48,6 +48,28 @@ import os logger = logging.getLogger(__name__) +SCHEMAS = [ + "transactions", + "pdu", + "users", + "profiles", + "presence", + "im", + "room_aliases", +] + + +# Remember to update this number every time an incompatible change is made to +# database schema files, so the users will be informed on server restarts. +SCHEMA_VERSION = 3 + + +class _RollbackButIsFineException(Exception): + """ This exception is used to rollback a transaction without implying + something went wrong. + """ + pass + class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, PresenceStore, PduStore, StatePduStore, TransactionStore, @@ -63,7 +85,8 @@ class DataStore(RoomMemberStore, RoomStore, @defer.inlineCallbacks @log_function - def persist_event(self, event=None, backfilled=False, pdu=None): + def persist_event(self, event=None, backfilled=False, pdu=None, + is_new_state=True): stream_ordering = None if backfilled: if not self.min_token_deferred.called: @@ -71,17 +94,20 @@ class DataStore(RoomMemberStore, RoomStore, self.min_token -= 1 stream_ordering = self.min_token - latest = yield self._db_pool.runInteraction( - self._persist_pdu_event_txn, - pdu=pdu, - event=event, - backfilled=backfilled, - stream_ordering=stream_ordering, - ) - defer.returnValue(latest) + try: + yield self.runInteraction( + self._persist_pdu_event_txn, + pdu=pdu, + event=event, + backfilled=backfilled, + stream_ordering=stream_ordering, + is_new_state=is_new_state, + ) + except _RollbackButIsFineException as e: + pass @defer.inlineCallbacks - def get_event(self, event_id): + def get_event(self, event_id, allow_none=False): events_dict = yield self._simple_select_one( "events", {"event_id": event_id}, @@ -92,18 +118,24 @@ class DataStore(RoomMemberStore, RoomStore, "content", "unrecognized_keys" ], + allow_none=allow_none, ) + if not events_dict: + defer.returnValue(None) + event = self._parse_event_from_row(events_dict) defer.returnValue(event) def _persist_pdu_event_txn(self, txn, pdu=None, event=None, - backfilled=False, stream_ordering=None): + backfilled=False, stream_ordering=None, + is_new_state=True): if pdu is not None: self._persist_event_pdu_txn(txn, pdu) if event is not None: return self._persist_event_txn( - txn, event, backfilled, stream_ordering + txn, event, backfilled, stream_ordering, + is_new_state=is_new_state, ) def _persist_event_pdu_txn(self, txn, pdu): @@ -112,6 +144,12 @@ class DataStore(RoomMemberStore, RoomStore, del cols["content"] del cols["prev_pdus"] cols["content_json"] = json.dumps(pdu.content) + + unrec_keys.update({ + k: v for k, v in cols.items() + if k not in PdusTable.fields + }) + cols["unrecognized_keys"] = json.dumps(unrec_keys) logger.debug("Persisting: %s", repr(cols)) @@ -124,7 +162,8 @@ class DataStore(RoomMemberStore, RoomStore, self._update_min_depth_for_context_txn(txn, pdu.context, pdu.depth) @log_function - def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None): + def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None, + is_new_state=True): if event.type == RoomMemberEvent.TYPE: self._store_room_member_txn(txn, event) elif event.type == FeedbackEvent.TYPE: @@ -171,13 +210,14 @@ class DataStore(RoomMemberStore, RoomStore, try: self._simple_insert_txn(txn, "events", vals) except: - logger.exception( + logger.warn( "Failed to persist, probably duplicate: %s", - event.event_id + event.event_id, + exc_info=True, ) - return + raise _RollbackButIsFineException("_persist_event") - if not backfilled and hasattr(event, "state_key"): + if is_new_state and hasattr(event, "state_key"): vals = { "event_id": event.event_id, "room_id": event.room_id, @@ -201,8 +241,6 @@ class DataStore(RoomMemberStore, RoomStore, } ) - return self._get_room_events_max_id_txn(txn) - @defer.inlineCallbacks def get_current_state(self, room_id, event_type=None, state_key=""): sql = ( @@ -220,7 +258,8 @@ class DataStore(RoomMemberStore, RoomStore, results = yield self._execute_and_decode(sql, *args) - defer.returnValue([self._parse_event_from_row(r) for r in results]) + events = yield self._parse_events(results) + defer.returnValue(events) @defer.inlineCallbacks def _get_min_token(self): @@ -269,7 +308,7 @@ class DataStore(RoomMemberStore, RoomStore, prev_state_pdu=prev_state_pdu, ) - return self._db_pool.runInteraction(_snapshot) + return self.runInteraction(_snapshot) class Snapshot(object): @@ -339,3 +378,42 @@ def read_schema(schema): """ with open(schema_path(schema)) as schema_file: return schema_file.read() + + +def prepare_database(db_conn): + """ Set up all the dbs. Since all the *.sql have IF NOT EXISTS, so we + don't have to worry about overwriting existing content. + """ + c = db_conn.cursor() + c.execute("PRAGMA user_version") + row = c.fetchone() + + if row and row[0]: + user_version = row[0] + + if user_version > SCHEMA_VERSION: + raise ValueError("Cannot use this database as it is too " + + "new for the server to understand" + ) + elif user_version < SCHEMA_VERSION: + logging.info("Upgrading database from version %d", + user_version + ) + + # Run every version since after the current version. + for v in range(user_version + 1, SCHEMA_VERSION + 1): + sql_script = read_schema("delta/v%d" % (v)) + c.executescript(sql_script) + + db_conn.commit() + + else: + for sql_loc in SCHEMAS: + sql_script = read_schema(sql_loc) + + c.executescript(sql_script) + db_conn.commit() + c.execute("PRAGMA user_version = %d" % SCHEMA_VERSION) + + c.close() + diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index bae50e7d1f..76ed7d06fb 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -17,6 +17,7 @@ import logging from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.util.logutils import log_function import collections import copy @@ -25,6 +26,44 @@ import json logger = logging.getLogger(__name__) +sql_logger = logging.getLogger("synapse.storage.SQL") + + +class LoggingTransaction(object): + """An object that almost-transparently proxies for the 'txn' object + passed to the constructor. Adds logging to the .execute() method.""" + __slots__ = ["txn"] + + def __init__(self, txn): + object.__setattr__(self, "txn", txn) + + def __getattribute__(self, name): + if name == "execute": + return object.__getattribute__(self, "execute") + + return getattr(object.__getattribute__(self, "txn"), name) + + def __setattr__(self, name, value): + setattr(object.__getattribute__(self, "txn"), name, value) + + def execute(self, sql, *args, **kwargs): + # TODO(paul): Maybe use 'info' and 'debug' for values? + sql_logger.debug("[SQL] %s", sql) + try: + if args and args[0]: + values = args[0] + sql_logger.debug("[SQL values] " + + ", ".join(("<%s>",) * len(values)), *values) + except: + # Don't let logging failures stop SQL from working + pass + + # TODO(paul): Here would be an excellent place to put some timing + # measurements, and log (warning?) slow queries. + return object.__getattribute__(self, "txn").execute( + sql, *args, **kwargs + ) + class SQLBaseStore(object): @@ -34,6 +73,13 @@ class SQLBaseStore(object): self.event_factory = hs.get_event_factory() self._clock = hs.get_clock() + def runInteraction(self, func, *args, **kwargs): + """Wraps the .runInteraction() method on the underlying db_pool.""" + def inner_func(txn, *args, **kwargs): + return func(LoggingTransaction(txn), *args, **kwargs) + + return self._db_pool.runInteraction(inner_func, *args, **kwargs) + def cursor_to_dict(self, cursor): """Converts a SQL cursor into an list of dicts. @@ -59,11 +105,6 @@ class SQLBaseStore(object): Returns: The result of decoder(results) """ - logger.debug( - "[SQL] %s Args=%s Func=%s", - query, args, decoder.__name__ if decoder else None - ) - def interaction(txn): cursor = txn.execute(query, args) if decoder: @@ -71,7 +112,7 @@ class SQLBaseStore(object): else: return cursor.fetchall() - return self._db_pool.runInteraction(interaction) + return self.runInteraction(interaction) def _execute_and_decode(self, query, *args): return self._execute(self.cursor_to_dict, query, *args) @@ -87,10 +128,11 @@ class SQLBaseStore(object): values : dict of new column names and values for them or_replace : bool; if True performs an INSERT OR REPLACE """ - return self._db_pool.runInteraction( + return self.runInteraction( self._simple_insert_txn, table, values, or_replace=or_replace ) + @log_function def _simple_insert_txn(self, txn, table, values, or_replace=False): sql = "%s INTO %s (%s) VALUES(%s)" % ( ("INSERT OR REPLACE" if or_replace else "INSERT"), @@ -98,6 +140,12 @@ class SQLBaseStore(object): ", ".join(k for k in values), ", ".join("?" for k in values) ) + + logger.debug( + "[SQL] %s Args=%s Func=%s", + sql, values.values(), + ) + txn.execute(sql, values.values()) return txn.lastrowid @@ -164,7 +212,7 @@ class SQLBaseStore(object): txn.execute(sql, keyvalues.values()) return txn.fetchall() - res = yield self._db_pool.runInteraction(func) + res = yield self.runInteraction(func) defer.returnValue([r[0] for r in res]) @@ -187,7 +235,7 @@ class SQLBaseStore(object): txn.execute(sql, keyvalues.values()) return self.cursor_to_dict(txn) - return self._db_pool.runInteraction(func) + return self.runInteraction(func) def _simple_update_one(self, table, keyvalues, updatevalues, retcols=None): @@ -255,7 +303,7 @@ class SQLBaseStore(object): raise StoreError(500, "More than one row matched") return ret - return self._db_pool.runInteraction(func) + return self.runInteraction(func) def _simple_delete_one(self, table, keyvalues): """Executes a DELETE query on the named table, expecting to delete a @@ -276,7 +324,7 @@ class SQLBaseStore(object): raise StoreError(404, "No row found") if txn.rowcount > 1: raise StoreError(500, "more than one row matched") - return self._db_pool.runInteraction(func) + return self.runInteraction(func) def _simple_max_id(self, table): """Executes a SELECT query on the named table, expecting to return the @@ -294,7 +342,7 @@ class SQLBaseStore(object): return 0 return max_id - return self._db_pool.runInteraction(func) + return self.runInteraction(func) def _parse_event_from_row(self, row_dict): d = copy.deepcopy({k: v for k, v in row_dict.items() if v}) @@ -307,11 +355,34 @@ class SQLBaseStore(object): d["content"] = json.loads(d["content"]) del d["unrecognized_keys"] + if "age_ts" not in d: + # For compatibility + d["age_ts"] = d["ts"] if "ts" in d else 0 + return self.event_factory.create_event( etype=d["type"], **d ) + def _parse_events(self, rows): + return self.runInteraction(self._parse_events_txn, rows) + + def _parse_events_txn(self, txn, rows): + events = [self._parse_event_from_row(r) for r in rows] + + sql = "SELECT * FROM events WHERE event_id = ?" + + for ev in events: + if hasattr(ev, "prev_state"): + # Load previous state_content. + # TODO: Should we be pulling this out above? + cursor = txn.execute(sql, (ev.prev_state,)) + prevs = self.cursor_to_dict(cursor) + if prevs: + prev = self._parse_event_from_row(prevs[0]) + ev.prev_content = prev.content + + return events class Table(object): """ A base class used to store information about a particular table. diff --git a/synapse/storage/directory.py b/synapse/storage/directory.py index bf55449253..540eb4c2c4 100644 --- a/synapse/storage/directory.py +++ b/synapse/storage/directory.py @@ -92,3 +92,10 @@ class DirectoryStore(SQLBaseStore): "server": server, } ) + + def get_aliases_for_room(self, room_id): + return self._simple_select_onecol( + "room_aliases", + {"room_id": room_id}, + "room_alias", + ) diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py index 0bf97e37ee..d70467dcd6 100644 --- a/synapse/storage/pdu.py +++ b/synapse/storage/pdu.py @@ -17,6 +17,7 @@ from twisted.internet import defer from ._base import SQLBaseStore, Table, JoinHelper +from synapse.federation.units import Pdu from synapse.util.logutils import log_function from collections import namedtuple @@ -42,7 +43,7 @@ class PduStore(SQLBaseStore): PduTuple: If the pdu does not exist in the database, returns None """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_pdu_tuple, pdu_id, origin ) @@ -94,7 +95,7 @@ class PduStore(SQLBaseStore): list: A list of PduTuples """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_current_state_for_context, context ) @@ -142,7 +143,7 @@ class PduStore(SQLBaseStore): pdu_origin (str) """ - return self._db_pool.runInteraction( + return self.runInteraction( self._mark_as_processed, pdu_id, pdu_origin ) @@ -151,7 +152,7 @@ class PduStore(SQLBaseStore): def get_all_pdus_from_context(self, context): """Get a list of all PDUs for a given context.""" - return self._db_pool.runInteraction( + return self.runInteraction( self._get_all_pdus_from_context, context, ) @@ -178,7 +179,7 @@ class PduStore(SQLBaseStore): Return: list: A list of PduTuples """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_backfill, context, pdu_list, limit ) @@ -239,7 +240,7 @@ class PduStore(SQLBaseStore): txn context (str) """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_min_depth_for_context, context ) @@ -308,8 +309,8 @@ class PduStore(SQLBaseStore): @defer.inlineCallbacks def get_oldest_pdus_in_context(self, context): - """Get a list of Pdus that we haven't backfilled beyond yet (and haven't - seen). This list is used when we want to backfill backwards and is the + """Get a list of Pdus that we haven't backfilled beyond yet (and havent + seen). This list is used when we want to backfill backwards and is the list we send to the remote server. Args: @@ -345,7 +346,7 @@ class PduStore(SQLBaseStore): bool """ - return self._db_pool.runInteraction( + return self.runInteraction( self._is_pdu_new, pdu_id=pdu_id, origin=origin, @@ -498,7 +499,7 @@ class StatePduStore(SQLBaseStore): ) def get_unresolved_state_tree(self, new_state_pdu): - return self._db_pool.runInteraction( + return self.runInteraction( self._get_unresolved_state_tree, new_state_pdu ) @@ -516,7 +517,7 @@ class StatePduStore(SQLBaseStore): if not current: logger.debug("get_unresolved_state_tree No current state.") - return return_value + return (return_value, None) return_value.current_branch.append(current) @@ -524,17 +525,20 @@ class StatePduStore(SQLBaseStore): txn, new_pdu, current ) + missing_branch = None for branch, prev_state, state in enum_branches: if state: return_value[branch].append(state) else: + # We don't have prev_state :( + missing_branch = branch break - return return_value + return (return_value, missing_branch) def update_current_state(self, pdu_id, origin, context, pdu_type, state_key): - return self._db_pool.runInteraction( + return self.runInteraction( self._update_current_state, pdu_id, origin, context, pdu_type, state_key ) @@ -573,7 +577,7 @@ class StatePduStore(SQLBaseStore): PduEntry """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_current_state_pdu, context, pdu_type, state_key ) @@ -622,53 +626,6 @@ class StatePduStore(SQLBaseStore): return result - def get_next_missing_pdu(self, new_pdu): - """When we get a new state pdu we need to check whether we need to do - any conflict resolution, if we do then we need to check if we need - to go back and request some more state pdus that we haven't seen yet. - - Args: - txn - new_pdu - - Returns: - PduIdTuple: A pdu that we are missing, or None if we have all the - pdus required to do the conflict resolution. - """ - return self._db_pool.runInteraction( - self._get_next_missing_pdu, new_pdu - ) - - def _get_next_missing_pdu(self, txn, new_pdu): - logger.debug( - "get_next_missing_pdu %s %s", - new_pdu.pdu_id, new_pdu.origin - ) - - current = self._get_current_interaction( - txn, - new_pdu.context, new_pdu.pdu_type, new_pdu.state_key - ) - - if (not current or not current.prev_state_id - or not current.prev_state_origin): - return None - - # Oh look, it's a straight clobber, so wooooo almost no-op. - if (new_pdu.prev_state_id == current.pdu_id - and new_pdu.prev_state_origin == current.origin): - return None - - enum_branches = self._enumerate_state_branches(txn, new_pdu, current) - for branch, prev_state, state in enum_branches: - if not state: - return PduIdTuple( - prev_state.prev_state_id, - prev_state.prev_state_origin - ) - - return None - def handle_new_state(self, new_pdu): """Actually perform conflict resolution on the new_pdu on the assumption we have all the pdus required to perform it. @@ -679,7 +636,7 @@ class StatePduStore(SQLBaseStore): Returns: bool: True if the new_pdu clobbered the current state, False if not """ - return self._db_pool.runInteraction( + return self.runInteraction( self._handle_new_state, new_pdu ) @@ -752,24 +709,11 @@ class StatePduStore(SQLBaseStore): return is_current - @classmethod @log_function - def _enumerate_state_branches(cls, txn, pdu_a, pdu_b): + def _enumerate_state_branches(self, txn, pdu_a, pdu_b): branch_a = pdu_a branch_b = pdu_b - get_query = ( - "SELECT %(fields)s FROM %(pdus)s as p " - "LEFT JOIN %(state)s as s " - "ON p.pdu_id = s.pdu_id AND p.origin = s.origin " - "WHERE p.pdu_id = ? AND p.origin = ? " - ) % { - "fields": _pdu_state_joiner.get_fields( - PdusTable="p", StatePdusTable="s"), - "pdus": PdusTable.table_name, - "state": StatePdusTable.table_name, - } - while True: if (branch_a.pdu_id == branch_b.pdu_id and branch_a.origin == branch_b.origin): @@ -801,13 +745,12 @@ class StatePduStore(SQLBaseStore): branch_a.prev_state_origin ) - logger.debug("getting branch_a prev %s", pdu_tuple) - txn.execute(get_query, pdu_tuple) - prev_branch = branch_a - res = txn.fetchone() - branch_a = PduEntry(*res) if res else None + logger.debug("getting branch_a prev %s", pdu_tuple) + branch_a = self._get_pdu_tuple(txn, *pdu_tuple) + if branch_a: + branch_a = Pdu.from_pdu_tuple(branch_a) logger.debug("branch_a=%s", branch_a) @@ -820,14 +763,13 @@ class StatePduStore(SQLBaseStore): branch_b.prev_state_id, branch_b.prev_state_origin ) - txn.execute(get_query, pdu_tuple) - - logger.debug("getting branch_b prev %s", pdu_tuple) prev_branch = branch_b - res = txn.fetchone() - branch_b = PduEntry(*res) if res else None + logger.debug("getting branch_b prev %s", pdu_tuple) + branch_b = self._get_pdu_tuple(txn, *pdu_tuple) + if branch_b: + branch_b = Pdu.from_pdu_tuple(branch_b) logger.debug("branch_b=%s", branch_b) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index fd762bc643..db20b1daa0 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -62,7 +62,7 @@ class RegistrationStore(SQLBaseStore): Raises: StoreError if the user_id could not be registered. """ - yield self._db_pool.runInteraction(self._register, user_id, token, + yield self.runInteraction(self._register, user_id, token, password_hash) def _register(self, txn, user_id, token, password_hash): @@ -99,7 +99,7 @@ class RegistrationStore(SQLBaseStore): Raises: StoreError if no user was found. """ - user_id = yield self._db_pool.runInteraction(self._query_for_auth, + user_id = yield self.runInteraction(self._query_for_auth, token) defer.returnValue(user_id) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 017169ce00..5adf8cdf1b 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -149,7 +149,7 @@ class RoomStore(SQLBaseStore): defer.returnValue(None) def get_power_level(self, room_id, user_id): - return self._db_pool.runInteraction( + return self.runInteraction( self._get_power_level, room_id, user_id, ) @@ -182,7 +182,7 @@ class RoomStore(SQLBaseStore): return None def get_ops_levels(self, room_id): - return self._db_pool.runInteraction( + return self.runInteraction( self._get_ops_levels, room_id, ) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 75c9a60101..04b4067d03 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -18,6 +18,7 @@ from twisted.internet import defer from ._base import SQLBaseStore from synapse.api.constants import Membership +from synapse.util.logutils import log_function import logging @@ -29,8 +30,18 @@ class RoomMemberStore(SQLBaseStore): def _store_room_member_txn(self, txn, event): """Store a room member in the database. """ - target_user_id = event.state_key - domain = self.hs.parse_userid(target_user_id).domain + try: + target_user_id = event.state_key + domain = self.hs.parse_userid(target_user_id).domain + except: + logger.exception("Failed to parse target_user_id=%s", target_user_id) + raise + + logger.debug( + "_store_room_member_txn: target_user_id=%s, membership=%s", + target_user_id, + event.membership, + ) self._simple_insert_txn( txn, @@ -51,12 +62,30 @@ class RoomMemberStore(SQLBaseStore): "VALUES (?, ?)" ) txn.execute(sql, (event.room_id, domain)) - else: - sql = ( - "DELETE FROM room_hosts WHERE room_id = ? AND host = ?" + elif event.membership != Membership.INVITE: + # Check if this was the last person to have left. + member_events = self._get_members_query_txn( + txn, + where_clause="c.room_id = ? AND m.membership = ? AND m.user_id != ?", + where_values=(event.room_id, Membership.JOIN, target_user_id,) ) - txn.execute(sql, (event.room_id, domain)) + joined_domains = set() + for e in member_events: + try: + joined_domains.add( + self.hs.parse_userid(e.state_key).domain + ) + except: + # FIXME: How do we deal with invalid user ids in the db? + logger.exception("Invalid user_id: %s", event.state_key) + + if domain not in joined_domains: + sql = ( + "DELETE FROM room_hosts WHERE room_id = ? AND host = ?" + ) + + txn.execute(sql, (event.room_id, domain)) @defer.inlineCallbacks def get_room_member(self, user_id, room_id): @@ -88,7 +117,7 @@ class RoomMemberStore(SQLBaseStore): txn.execute(sql, (user_id, room_id)) rows = self.cursor_to_dict(txn) if rows: - return self._parse_event_from_row(rows[0]) + return self._parse_events_txn(txn, rows)[0] else: return None @@ -120,7 +149,7 @@ class RoomMemberStore(SQLBaseStore): membership_list (list): A list of synapse.api.constants.Membership values which the user must be in. Returns: - A list of dicts with "room_id" and "membership" keys. + A list of RoomMemberEvent objects """ if not membership_list: return defer.succeed(None) @@ -146,8 +175,13 @@ class RoomMemberStore(SQLBaseStore): vals = where_dict.values() return self._get_members_query(clause, vals) - @defer.inlineCallbacks def _get_members_query(self, where_clause, where_values): + return self._db_pool.runInteraction( + self._get_members_query_txn, + where_clause, where_values + ) + + def _get_members_query_txn(self, txn, where_clause, where_values): sql = ( "SELECT e.* FROM events as e " "INNER JOIN room_memberships as m " @@ -157,18 +191,18 @@ class RoomMemberStore(SQLBaseStore): "WHERE %s " ) % (where_clause,) - rows = yield self._execute_and_decode(sql, *where_values) + txn.execute(sql, where_values) + rows = self.cursor_to_dict(txn) - # logger.debug("_get_members_query Got rows %s", rows) - - results = [self._parse_event_from_row(r) for r in rows] - defer.returnValue(results) + results = self._parse_events_txn(txn, rows) + return results @defer.inlineCallbacks - def user_rooms_intersect(self, user_list): - """ Checks whether a list of users share a room. + def user_rooms_intersect(self, user_id_list): + """ Checks whether all the users whose IDs are given in a list share a + room. """ - user_list_clause = " OR ".join(["m.user_id = ?"] * len(user_list)) + user_list_clause = " OR ".join(["m.user_id = ?"] * len(user_id_list)) sql = ( "SELECT m.room_id FROM room_memberships as m " "INNER JOIN current_state_events as c " @@ -178,8 +212,8 @@ class RoomMemberStore(SQLBaseStore): "GROUP BY m.room_id HAVING COUNT(m.room_id) = ?" ) % {"clause": user_list_clause} - args = user_list - args.append(len(user_list)) + args = list(user_id_list) + args.append(len(user_id_list)) rows = yield self._execute(None, sql, *args) diff --git a/synapse/storage/schema/delta/v3.sql b/synapse/storage/schema/delta/v3.sql new file mode 100644 index 0000000000..cade295989 --- /dev/null +++ b/synapse/storage/schema/delta/v3.sql @@ -0,0 +1,27 @@ +/* Copyright 2014 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +CREATE INDEX IF NOT EXISTS room_aliases_alias ON room_aliases(room_alias); +CREATE INDEX IF NOT EXISTS room_aliases_id ON room_aliases(room_id); + + +CREATE INDEX IF NOT EXISTS room_alias_servers_alias ON room_alias_servers(room_alias); + +DELETE FROM room_aliases WHERE rowid NOT IN (SELECT max(rowid) FROM room_aliases GROUP BY room_alias, room_id); + +CREATE UNIQUE INDEX IF NOT EXISTS room_aliases_uniq ON room_aliases(room_alias, room_id); + +PRAGMA user_version = 3; diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 2cb0067a67..a76fecf24f 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -146,7 +146,7 @@ class StreamStore(SQLBaseStore): current_room_membership_sql = ( "SELECT m.room_id FROM room_memberships as m " "INNER JOIN current_state_events as c ON m.event_id = c.event_id " - "WHERE m.user_id = ?" + "WHERE m.user_id = ? AND m.membership = 'join'" ) # We also want to get any membership events about that user, e.g. @@ -188,7 +188,7 @@ class StreamStore(SQLBaseStore): user_id, user_id, from_id, to_id ) - ret = [self._parse_event_from_row(r) for r in rows] + ret = yield self._parse_events(rows) if rows: key = "s%d" % max([r["stream_ordering"] for r in rows]) @@ -243,9 +243,11 @@ class StreamStore(SQLBaseStore): # TODO (erikj): We should work out what to do here instead. next_token = to_key if to_key else from_key + events = yield self._parse_events(rows) + defer.returnValue( ( - [self._parse_event_from_row(r) for r in rows], + events, next_token ) ) @@ -277,15 +279,14 @@ class StreamStore(SQLBaseStore): else: token = (end_token, end_token) - defer.returnValue( - ( - [self._parse_event_from_row(r) for r in rows], - token - ) - ) + events = yield self._parse_events(rows) + + ret = (events, token) + + defer.returnValue(ret) def get_room_events_max_id(self): - return self._db_pool.runInteraction(self._get_room_events_max_id_txn) + return self.runInteraction(self._get_room_events_max_id_txn) def _get_room_events_max_id_txn(self, txn): txn.execute( diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index 7467e1035b..ab4599b468 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -41,7 +41,7 @@ class TransactionStore(SQLBaseStore): this transaction or a 2-tuple of (int, dict) """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_received_txn_response, transaction_id, origin ) @@ -72,7 +72,7 @@ class TransactionStore(SQLBaseStore): response_json (str) """ - return self._db_pool.runInteraction( + return self.runInteraction( self._set_received_txn_response, transaction_id, origin, code, response_dict ) @@ -104,7 +104,7 @@ class TransactionStore(SQLBaseStore): list: A list of previous transaction ids. """ - return self._db_pool.runInteraction( + return self.runInteraction( self._prep_send_transaction, transaction_id, destination, ts, pdu_list ) @@ -159,7 +159,7 @@ class TransactionStore(SQLBaseStore): code (int) response_json (str) """ - return self._db_pool.runInteraction( + return self.runInteraction( self._delivered_txn, transaction_id, destination, code, response_dict ) @@ -184,7 +184,7 @@ class TransactionStore(SQLBaseStore): Returns: list: A list of `ReceivedTransactionsTable.EntryType` """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_transactions_after, transaction_id, destination ) @@ -214,7 +214,7 @@ class TransactionStore(SQLBaseStore): Returns list: A list of PduTuple """ - return self._db_pool.runInteraction( + return self.runInteraction( self._get_pdus_after_transaction, transaction_id, destination ) diff --git a/synapse/util/emailutils.py b/synapse/util/emailutils.py new file mode 100644 index 0000000000..cdb0abd7ea --- /dev/null +++ b/synapse/util/emailutils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" This module allows you to send out emails. +""" +import email.utils +import smtplib +import twisted.python.log +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart + +import logging + +logger = logging.getLogger(__name__) + + +class EmailException(Exception): + pass + + +def send_email(smtp_server, from_addr, to_addr, subject, body): + """Sends an email. + + Args: + smtp_server(str): The SMTP server to use. + from_addr(str): The address to send from. + to_addr(str): The address to send to. + subject(str): The subject of the email. + body(str): The plain text body of the email. + Raises: + EmailException if there was a problem sending the mail. + """ + if not smtp_server or not from_addr or not to_addr: + raise EmailException("Need SMTP server, from and to addresses. Check " + + "the config to set these.") + + msg = MIMEMultipart('alternative') + msg['Subject'] = subject + msg['From'] = from_addr + msg['To'] = to_addr + plain_part = MIMEText(body) + msg.attach(plain_part) + + raw_from = email.utils.parseaddr(from_addr)[1] + raw_to = email.utils.parseaddr(to_addr)[1] + if not raw_from or not raw_to: + raise EmailException("Couldn't parse from/to address.") + + logger.info("Sending email to %s on server %s with subject %s", + to_addr, smtp_server, subject) + + try: + smtp = smtplib.SMTP(smtp_server) + smtp.sendmail(raw_from, raw_to, msg.as_string()) + smtp.quit() + except Exception as origException: + twisted.python.log.err() + ese = EmailException() + ese.cause = origException + raise ese \ No newline at end of file diff --git a/synctl b/synctl index 99dc545ba5..0f83e9cb1f 100755 --- a/synctl +++ b/synctl @@ -4,7 +4,6 @@ SYNAPSE="synapse/app/homeserver.py" CONFIGFILE="homeserver.yaml" PIDFILE="homeserver.pid" -LOGFILE="homeserver.log" GREEN=$'\e[1;32m' NORMAL=$'\e[m' @@ -14,15 +13,13 @@ set -e case "$1" in start) if [ ! -f "$CONFIGFILE" ]; then - echo "No config file found - generating a default one..." - $SYNAPSE -c "$CONFIGFILE" --generate-config - echo "Wrote $CONFIGFILE" - echo "You must now edit this file before continuing" + echo "No config file found" + echo "To generate a config file, run 'python --generate-config'" exit 1 fi echo -n "Starting ..." - $SYNAPSE --daemonize -c "$CONFIGFILE" --pid-file "$PIDFILE" --log-file "$LOGFILE" + $SYNAPSE --daemonize -c "$CONFIGFILE" --pid-file "$PIDFILE" echo "${GREEN}started${NORMAL}" ;; stop) diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index dc2f83c7eb..dd0bc19ecf 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -1,6 +1,6 @@ from synapse.api.ratelimiting import Ratelimiter -import unittest +from tests import unittest class TestRatelimiter(unittest.TestCase): diff --git a/tests/events/test_events.py b/tests/events/test_events.py index 93d5c15c6f..a4b6cb3afd 100644 --- a/tests/events/test_events.py +++ b/tests/events/test_events.py @@ -15,7 +15,7 @@ from synapse.api.events import SynapseEvent -import unittest +from tests import unittest class SynapseTemplateCheckTestCase(unittest.TestCase): diff --git a/tests/federation/test_federation.py b/tests/federation/test_federation.py index 0b105fe723..bb17e9aafe 100644 --- a/tests/federation/test_federation.py +++ b/tests/federation/test_federation.py @@ -14,11 +14,10 @@ # trial imports from twisted.internet import defer -from twisted.trial import unittest +from tests import unittest # python imports -from mock import Mock -import logging +from mock import Mock, ANY from ..utils import MockHttpResource, MockClock @@ -28,9 +27,6 @@ from synapse.federation.units import Pdu from synapse.storage.pdu import PduTuple, PduEntry -logging.getLogger().addHandler(logging.NullHandler()) - - def make_pdu(prev_pdus=[], **kwargs): """Provide some default fields for making a PduTuple.""" pdu_fields = { @@ -185,7 +181,8 @@ class FederationTestCase(unittest.TestCase): "depth": 1, }, ] - } + }, + on_send_callback=ANY, ) @defer.inlineCallbacks @@ -216,7 +213,9 @@ class FederationTestCase(unittest.TestCase): "content": {"testing": "content here"}, } ], - }) + }, + on_send_callback=ANY, + ) @defer.inlineCallbacks def test_recv_edu(self): diff --git a/tests/federation/test_pdu_codec.py b/tests/federation/test_pdu_codec.py index 9f74ba119f..344e1baf60 100644 --- a/tests/federation/test_pdu_codec.py +++ b/tests/federation/test_pdu_codec.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.trial import unittest +from tests import unittest from synapse.federation.pdu_codec import ( PduCodec, encode_event_id, decode_event_id diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 72a2b1443a..dd5d85dde6 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -14,19 +14,17 @@ # limitations under the License. -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock -import logging from synapse.server import HomeServer from synapse.http.client import HttpClient from synapse.handlers.directory import DirectoryHandler from synapse.storage.directory import RoomAliasMapping - -logging.getLogger().addHandler(logging.NullHandler()) +from tests.utils import SQLiteMemoryDbPool class DirectoryHandlers(object): @@ -37,6 +35,7 @@ class DirectoryHandlers(object): class DirectoryTestCase(unittest.TestCase): """ Tests the directory service. """ + @defer.inlineCallbacks def setUp(self): self.mock_federation = Mock(spec=[ "make_query", @@ -47,11 +46,11 @@ class DirectoryTestCase(unittest.TestCase): self.query_handlers[query_type] = handler self.mock_federation.register_query_handler = register_query_handler + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + hs = HomeServer("test", - datastore=Mock(spec=[ - "get_association_from_room_alias", - "get_joined_hosts_for_room", - ]), + db_pool=db_pool, http_client=None, resource_for_federation=Mock(), replication_layer=self.mock_federation, @@ -60,20 +59,16 @@ class DirectoryTestCase(unittest.TestCase): self.handler = hs.get_handlers().directory_handler - self.datastore = hs.get_datastore() - - def hosts(room_id): - return defer.succeed([]) - self.datastore.get_joined_hosts_for_room.side_effect = hosts + self.store = hs.get_datastore() self.my_room = hs.parse_roomalias("#my-room:test") + self.your_room = hs.parse_roomalias("#your-room:test") self.remote_room = hs.parse_roomalias("#another:remote") @defer.inlineCallbacks def test_get_local_association(self): - mocked_get = self.datastore.get_association_from_room_alias - mocked_get.return_value = defer.succeed( - RoomAliasMapping("!8765qwer:test", "#my-room:test", ["test"]) + yield self.store.create_room_alias_association( + self.my_room, "!8765qwer:test", ["test"] ) result = yield self.handler.get_association(self.my_room) @@ -106,9 +101,8 @@ class DirectoryTestCase(unittest.TestCase): @defer.inlineCallbacks def test_incoming_fed_query(self): - mocked_get = self.datastore.get_association_from_room_alias - mocked_get.return_value = defer.succeed( - RoomAliasMapping("!8765asdf:test", "#your-room:test", ["test"]) + yield self.store.create_room_alias_association( + self.your_room, "!8765asdf:test", ["test"] ) response = yield self.query_handlers["directory"]( diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index 6fc3d8f7fd..eb6b7c22ef 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -14,7 +14,7 @@ from twisted.internet import defer -from twisted.trial import unittest +from tests import unittest from synapse.api.events.room import ( InviteJoinEvent, MessageEvent, RoomMemberEvent @@ -26,12 +26,8 @@ from synapse.federation.units import Pdu from mock import NonCallableMock, ANY -import logging - from ..utils import get_mock_call_args -logging.getLogger().addHandler(logging.NullHandler()) - class FederationTestCase(unittest.TestCase): @@ -78,7 +74,9 @@ class FederationTestCase(unittest.TestCase): yield self.handlers.federation_handler.on_receive_pdu(pdu, False) - self.datastore.persist_event.assert_called_once_with(ANY, False) + self.datastore.persist_event.assert_called_once_with( + ANY, False, is_new_state=False + ) self.notifier.on_new_room_event.assert_called_once_with(ANY) @defer.inlineCallbacks diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 9eb8b6909f..765929d204 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -14,14 +14,15 @@ # limitations under the License. -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer, reactor from mock import Mock, call, ANY -import logging import json -from ..utils import MockHttpResource, MockClock, DeferredMockCallable +from tests.utils import ( + MockHttpResource, MockClock, DeferredMockCallable, SQLiteMemoryDbPool +) from synapse.server import HomeServer from synapse.api.constants import PresenceState @@ -34,9 +35,6 @@ UNAVAILABLE = PresenceState.UNAVAILABLE ONLINE = PresenceState.ONLINE -logging.getLogger().addHandler(logging.NullHandler()) - - def _expect_edu(destination, edu_type, content, origin="test"): return { "origin": origin, @@ -64,41 +62,36 @@ class JustPresenceHandlers(object): class PresenceStateTestCase(unittest.TestCase): """ Tests presence management. """ + @defer.inlineCallbacks def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + hs = HomeServer("test", - clock=MockClock(), - db_pool=None, - datastore=Mock(spec=[ - "get_presence_state", - "set_presence_state", - "add_presence_list_pending", - "set_presence_list_accepted", - ]), - handlers=None, - resource_for_federation=Mock(), - http_client=None, - ) + clock=MockClock(), + db_pool=db_pool, + handlers=None, + resource_for_federation=Mock(), + http_client=None, + ) hs.handlers = JustPresenceHandlers(hs) - self.datastore = hs.get_datastore() - - def is_presence_visible(observed_localpart, observer_userid): - allow = (observed_localpart == "apple" and - observer_userid == "@banana:test" - ) - return defer.succeed(allow) - self.datastore.is_presence_visible = is_presence_visible + self.store = hs.get_datastore() # Mock the RoomMemberHandler room_member_handler = Mock(spec=[]) hs.handlers.room_member_handler = room_member_handler - logging.getLogger().debug("Mocking room_member_handler=%r", room_member_handler) # Some local users to test with self.u_apple = hs.parse_userid("@apple:test") self.u_banana = hs.parse_userid("@banana:test") self.u_clementine = hs.parse_userid("@clementine:test") + yield self.store.create_presence(self.u_apple.localpart) + yield self.store.set_presence_state( + self.u_apple.localpart, {"state": ONLINE, "status_msg": "Online"} + ) + self.handler = hs.get_handlers().presence_handler self.room_members = [] @@ -122,7 +115,7 @@ class PresenceStateTestCase(unittest.TestCase): shared = all(map(lambda i: i in room_member_ids, userlist)) return defer.succeed(shared) - self.datastore.user_rooms_intersect = user_rooms_intersect + self.store.user_rooms_intersect = user_rooms_intersect self.mock_start = Mock() self.mock_stop = Mock() @@ -132,11 +125,6 @@ class PresenceStateTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_my_state(self): - mocked_get = self.datastore.get_presence_state - mocked_get.return_value = defer.succeed( - {"state": ONLINE, "status_msg": "Online"} - ) - state = yield self.handler.get_state( target_user=self.u_apple, auth_user=self.u_apple ) @@ -145,13 +133,12 @@ class PresenceStateTestCase(unittest.TestCase): {"presence": ONLINE, "status_msg": "Online"}, state ) - mocked_get.assert_called_with("apple") @defer.inlineCallbacks def test_get_allowed_state(self): - mocked_get = self.datastore.get_presence_state - mocked_get.return_value = defer.succeed( - {"state": ONLINE, "status_msg": "Online"} + yield self.store.allow_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_banana.to_string(), ) state = yield self.handler.get_state( @@ -162,15 +149,9 @@ class PresenceStateTestCase(unittest.TestCase): {"presence": ONLINE, "status_msg": "Online"}, state ) - mocked_get.assert_called_with("apple") @defer.inlineCallbacks def test_get_same_room_state(self): - mocked_get = self.datastore.get_presence_state - mocked_get.return_value = defer.succeed( - {"state": ONLINE, "status_msg": "Online"} - ) - self.room_members = [self.u_apple, self.u_clementine] state = yield self.handler.get_state( @@ -184,11 +165,6 @@ class PresenceStateTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_disallowed_state(self): - mocked_get = self.datastore.get_presence_state - mocked_get.return_value = defer.succeed( - {"state": ONLINE, "status_msg": "Online"} - ) - self.room_members = [] yield self.assertFailure( @@ -200,16 +176,17 @@ class PresenceStateTestCase(unittest.TestCase): @defer.inlineCallbacks def test_set_my_state(self): - mocked_set = self.datastore.set_presence_state - mocked_set.return_value = defer.succeed({"state": OFFLINE}) - yield self.handler.set_state( target_user=self.u_apple, auth_user=self.u_apple, state={"presence": UNAVAILABLE, "status_msg": "Away"}) - mocked_set.assert_called_with("apple", - {"state": UNAVAILABLE, "status_msg": "Away"} + self.assertEquals( + {"state": UNAVAILABLE, + "status_msg": "Away", + "mtime": 1000000}, + (yield self.store.get_presence_state(self.u_apple.localpart)) ) + self.mock_start.assert_called_with(self.u_apple, state={ "presence": UNAVAILABLE, @@ -227,50 +204,34 @@ class PresenceStateTestCase(unittest.TestCase): class PresenceInvitesTestCase(unittest.TestCase): """ Tests presence management. """ + @defer.inlineCallbacks def setUp(self): self.mock_http_client = Mock(spec=[]) self.mock_http_client.put_json = DeferredMockCallable() self.mock_federation_resource = MockHttpResource() - hs = HomeServer("test", - clock=MockClock(), - db_pool=None, - datastore=Mock(spec=[ - "has_presence_state", - "allow_presence_visible", - "add_presence_list_pending", - "set_presence_list_accepted", - "get_presence_list", - "del_presence_list", + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() - # Bits that Federation needs - "prep_send_transaction", - "delivered_txn", - "get_received_txn_response", - "set_received_txn_response", - ]), - handlers=None, - resource_for_client=Mock(), - resource_for_federation=self.mock_federation_resource, - http_client=self.mock_http_client, - ) + hs = HomeServer("test", + clock=MockClock(), + db_pool=db_pool, + handlers=None, + resource_for_client=Mock(), + resource_for_federation=self.mock_federation_resource, + http_client=self.mock_http_client, + ) hs.handlers = JustPresenceHandlers(hs) - self.datastore = hs.get_datastore() - - def has_presence_state(user_localpart): - return defer.succeed( - user_localpart in ("apple", "banana")) - self.datastore.has_presence_state = has_presence_state - - def get_received_txn_response(*args): - return defer.succeed(None) - self.datastore.get_received_txn_response = get_received_txn_response + self.store = hs.get_datastore() # Some local users to test with self.u_apple = hs.parse_userid("@apple:test") self.u_banana = hs.parse_userid("@banana:test") + yield self.store.create_presence(self.u_apple.localpart) + yield self.store.create_presence(self.u_banana.localpart) + # ID of a local user that does not exist self.u_durian = hs.parse_userid("@durian:test") @@ -293,12 +254,16 @@ class PresenceInvitesTestCase(unittest.TestCase): yield self.handler.send_invite( observer_user=self.u_apple, observed_user=self.u_banana) - self.datastore.add_presence_list_pending.assert_called_with( - "apple", "@banana:test") - self.datastore.allow_presence_visible.assert_called_with( - "banana", "@apple:test") - self.datastore.set_presence_list_accepted.assert_called_with( - "apple", "@banana:test") + self.assertEquals( + [{"observed_user_id": "@banana:test", "accepted": 1}], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) + self.assertTrue( + (yield self.store.is_presence_visible( + observed_localpart=self.u_banana.localpart, + observer_userid=self.u_apple.to_string(), + )) + ) self.mock_start.assert_called_with( self.u_apple, target_user=self.u_banana) @@ -308,10 +273,10 @@ class PresenceInvitesTestCase(unittest.TestCase): yield self.handler.send_invite( observer_user=self.u_apple, observed_user=self.u_durian) - self.datastore.add_presence_list_pending.assert_called_with( - "apple", "@durian:test") - self.datastore.del_presence_list.assert_called_with( - "apple", "@durian:test") + self.assertEquals( + [], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) @defer.inlineCallbacks def test_invite_remote(self): @@ -324,7 +289,8 @@ class PresenceInvitesTestCase(unittest.TestCase): "observer_user": "@apple:test", "observed_user": "@cabbage:elsewhere", } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -332,8 +298,10 @@ class PresenceInvitesTestCase(unittest.TestCase): yield self.handler.send_invite( observer_user=self.u_apple, observed_user=self.u_cabbage) - self.datastore.add_presence_list_pending.assert_called_with( - "apple", "@cabbage:elsewhere") + self.assertEquals( + [{"observed_user_id": "@cabbage:elsewhere", "accepted": 0}], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) yield put_json.await_calls() @@ -350,7 +318,8 @@ class PresenceInvitesTestCase(unittest.TestCase): "observer_user": "@cabbage:elsewhere", "observed_user": "@apple:test", } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -365,8 +334,12 @@ class PresenceInvitesTestCase(unittest.TestCase): ) ) - self.datastore.allow_presence_visible.assert_called_with( - "apple", "@cabbage:elsewhere") + self.assertTrue( + (yield self.store.is_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_cabbage.to_string(), + )) + ) yield put_json.await_calls() @@ -381,7 +354,8 @@ class PresenceInvitesTestCase(unittest.TestCase): "observer_user": "@cabbage:elsewhere", "observed_user": "@durian:test", } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -400,6 +374,11 @@ class PresenceInvitesTestCase(unittest.TestCase): @defer.inlineCallbacks def test_accepted_remote(self): + yield self.store.add_presence_list_pending( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_cabbage.to_string(), + ) + yield self.mock_federation_resource.trigger("PUT", "/_matrix/federation/v1/send/1000000/", _make_edu_json("elsewhere", "m.presence_accept", @@ -410,14 +389,21 @@ class PresenceInvitesTestCase(unittest.TestCase): ) ) - self.datastore.set_presence_list_accepted.assert_called_with( - "apple", "@cabbage:elsewhere") + self.assertEquals( + [{"observed_user_id": "@cabbage:elsewhere", "accepted": 1}], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) self.mock_start.assert_called_with( self.u_apple, target_user=self.u_cabbage) @defer.inlineCallbacks def test_denied_remote(self): + yield self.store.add_presence_list_pending( + observer_localpart=self.u_apple.localpart, + observed_userid="@eggplant:elsewhere", + ) + yield self.mock_federation_resource.trigger("PUT", "/_matrix/federation/v1/send/1000000/", _make_edu_json("elsewhere", "m.presence_deny", @@ -428,32 +414,65 @@ class PresenceInvitesTestCase(unittest.TestCase): ) ) - self.datastore.del_presence_list.assert_called_with( - "apple", "@eggplant:elsewhere") + self.assertEquals( + [], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) @defer.inlineCallbacks def test_drop_local(self): - yield self.handler.drop( - observer_user=self.u_apple, observed_user=self.u_banana) + yield self.store.add_presence_list_pending( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), + ) + yield self.store.set_presence_list_accepted( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), + ) - self.datastore.del_presence_list.assert_called_with( - "apple", "@banana:test") + yield self.handler.drop( + observer_user=self.u_apple, + observed_user=self.u_banana, + ) + + self.assertEquals( + [], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) self.mock_stop.assert_called_with( self.u_apple, target_user=self.u_banana) @defer.inlineCallbacks def test_drop_remote(self): - yield self.handler.drop( - observer_user=self.u_apple, observed_user=self.u_cabbage) + yield self.store.add_presence_list_pending( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_cabbage.to_string(), + ) + yield self.store.set_presence_list_accepted( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_cabbage.to_string(), + ) - self.datastore.del_presence_list.assert_called_with( - "apple", "@cabbage:elsewhere") + yield self.handler.drop( + observer_user=self.u_apple, + observed_user=self.u_cabbage, + ) + + self.assertEquals( + [], + (yield self.store.get_presence_list(self.u_apple.localpart)) + ) @defer.inlineCallbacks def test_get_presence_list(self): - self.datastore.get_presence_list.return_value = defer.succeed( - [{"observed_user_id": "@banana:test"}] + yield self.store.add_presence_list_pending( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), + ) + yield self.store.set_presence_list_accepted( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), ) presence = yield self.handler.get_presence_list( @@ -461,29 +480,10 @@ class PresenceInvitesTestCase(unittest.TestCase): self.assertEquals([ {"observed_user": self.u_banana, - "presence": OFFLINE}, + "presence": OFFLINE, + "accepted": 1}, ], presence) - self.datastore.get_presence_list.assert_called_with("apple", - accepted=None - ) - - self.datastore.get_presence_list.return_value = defer.succeed( - [{"observed_user_id": "@banana:test"}] - ) - - presence = yield self.handler.get_presence_list( - observer_user=self.u_apple, accepted=True - ) - - self.assertEquals([ - {"observed_user": self.u_banana, - "presence": OFFLINE}, - ], presence) - - self.datastore.get_presence_list.assert_called_with("apple", - accepted=True) - class PresencePushTestCase(unittest.TestCase): """ Tests steady-state presence status updates. @@ -770,7 +770,8 @@ class PresencePushTestCase(unittest.TestCase): "last_active_ago": 0}, ], } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -785,7 +786,8 @@ class PresencePushTestCase(unittest.TestCase): "last_active_ago": 0}, ], } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -911,6 +913,7 @@ class PresencePushTestCase(unittest.TestCase): ], } ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -925,6 +928,7 @@ class PresencePushTestCase(unittest.TestCase): ], } ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -954,6 +958,7 @@ class PresencePushTestCase(unittest.TestCase): ], } ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -1150,6 +1155,7 @@ class PresencePollingTestCase(unittest.TestCase): "poll": [ "@potato:remote" ], }, ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -1162,6 +1168,7 @@ class PresencePollingTestCase(unittest.TestCase): "push": [ {"user_id": "@clementine:test" }], }, ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -1190,6 +1197,7 @@ class PresencePollingTestCase(unittest.TestCase): "push": [ {"user_id": "@fig:test" }], }, ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -1222,6 +1230,7 @@ class PresencePollingTestCase(unittest.TestCase): "unpoll": [ "@potato:remote" ], }, ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -1253,6 +1262,7 @@ class PresencePollingTestCase(unittest.TestCase): ], }, ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) diff --git a/tests/handlers/test_presencelike.py b/tests/handlers/test_presencelike.py index b35980d948..047752ad68 100644 --- a/tests/handlers/test_presencelike.py +++ b/tests/handlers/test_presencelike.py @@ -16,11 +16,10 @@ """This file contains tests of the "presence-like" data that is shared between presence and profiles; namely, the displayname and avatar_url.""" -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock, call, ANY -import logging from ..utils import MockClock @@ -35,9 +34,6 @@ UNAVAILABLE = PresenceState.UNAVAILABLE ONLINE = PresenceState.ONLINE -logging.getLogger().addHandler(logging.NullHandler()) - - class MockReplication(object): def __init__(self): self.edu_handlers = {} @@ -69,6 +65,8 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase): "is_presence_visible", "set_profile_displayname", + + "get_rooms_for_user_where_membership_is", ]), handlers=None, resource_for_federation=Mock(), @@ -136,6 +134,10 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase): # Remote user self.u_potato = hs.parse_userid("@potato:remote") + self.mock_get_joined = ( + self.datastore.get_rooms_for_user_where_membership_is + ) + @defer.inlineCallbacks def test_set_my_state(self): self.presence_list = [ @@ -156,6 +158,11 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase): @defer.inlineCallbacks def test_push_local(self): + def get_joined(*args): + return defer.succeed([]) + + self.mock_get_joined.side_effect = get_joined + self.presence_list = [ {"observed_user_id": "@banana:test"}, {"observed_user_id": "@clementine:test"}, diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 8e7a89b479..5dc9b456e1 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -14,18 +14,17 @@ # limitations under the License. -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock -import logging from synapse.api.errors import AuthError from synapse.server import HomeServer from synapse.handlers.profile import ProfileHandler +from synapse.api.constants import Membership - -logging.getLogger().addHandler(logging.NullHandler()) +from tests.utils import SQLiteMemoryDbPool class ProfileHandlers(object): @@ -36,6 +35,7 @@ class ProfileHandlers(object): class ProfileTestCase(unittest.TestCase): """ Tests profile management. """ + @defer.inlineCallbacks def setUp(self): self.mock_federation = Mock(spec=[ "make_query", @@ -46,27 +46,26 @@ class ProfileTestCase(unittest.TestCase): self.query_handlers[query_type] = handler self.mock_federation.register_query_handler = register_query_handler + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + hs = HomeServer("test", - db_pool=None, + db_pool=db_pool, http_client=None, - datastore=Mock(spec=[ - "get_profile_displayname", - "set_profile_displayname", - "get_profile_avatar_url", - "set_profile_avatar_url", - ]), handlers=None, resource_for_federation=Mock(), replication_layer=self.mock_federation, ) hs.handlers = ProfileHandlers(hs) - self.datastore = hs.get_datastore() + self.store = hs.get_datastore() self.frank = hs.parse_userid("@1234ABCD:test") self.bob = hs.parse_userid("@4567:test") self.alice = hs.parse_userid("@alice:remote") + yield self.store.create_profile(self.frank.localpart) + self.handler = hs.get_handlers().profile_handler # TODO(paul): Icky signal declarings.. booo @@ -74,22 +73,22 @@ class ProfileTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_my_name(self): - mocked_get = self.datastore.get_profile_displayname - mocked_get.return_value = defer.succeed("Frank") + yield self.store.set_profile_displayname( + self.frank.localpart, "Frank" + ) displayname = yield self.handler.get_displayname(self.frank) self.assertEquals("Frank", displayname) - mocked_get.assert_called_with("1234ABCD") @defer.inlineCallbacks def test_set_my_name(self): - mocked_set = self.datastore.set_profile_displayname - mocked_set.return_value = defer.succeed(()) - yield self.handler.set_displayname(self.frank, self.frank, "Frank Jr.") - mocked_set.assert_called_with("1234ABCD", "Frank Jr.") + self.assertEquals( + (yield self.store.get_profile_displayname(self.frank.localpart)), + "Frank Jr." + ) @defer.inlineCallbacks def test_set_my_name_noauth(self): @@ -114,32 +113,31 @@ class ProfileTestCase(unittest.TestCase): @defer.inlineCallbacks def test_incoming_fed_query(self): - mocked_get = self.datastore.get_profile_displayname - mocked_get.return_value = defer.succeed("Caroline") + yield self.store.create_profile("caroline") + yield self.store.set_profile_displayname("caroline", "Caroline") response = yield self.query_handlers["profile"]( {"user_id": "@caroline:test", "field": "displayname"} ) self.assertEquals({"displayname": "Caroline"}, response) - mocked_get.assert_called_with("caroline") @defer.inlineCallbacks def test_get_my_avatar(self): - mocked_get = self.datastore.get_profile_avatar_url - mocked_get.return_value = defer.succeed("http://my.server/me.png") + yield self.store.set_profile_avatar_url( + self.frank.localpart, "http://my.server/me.png" + ) avatar_url = yield self.handler.get_avatar_url(self.frank) self.assertEquals("http://my.server/me.png", avatar_url) - mocked_get.assert_called_with("1234ABCD") @defer.inlineCallbacks def test_set_my_avatar(self): - mocked_set = self.datastore.set_profile_avatar_url - mocked_set.return_value = defer.succeed(()) - yield self.handler.set_avatar_url(self.frank, self.frank, "http://my.server/pic.gif") - mocked_set.assert_called_with("1234ABCD", "http://my.server/pic.gif") + self.assertEquals( + (yield self.store.get_profile_avatar_url(self.frank.localpart)), + "http://my.server/pic.gif" + ) diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index 5687bbea0b..a1a2e80492 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -15,7 +15,7 @@ from twisted.internet import defer -from twisted.trial import unittest +from tests import unittest from synapse.api.events.room import ( InviteJoinEvent, RoomMemberEvent, RoomConfigEvent @@ -27,10 +27,6 @@ from synapse.server import HomeServer from mock import Mock, NonCallableMock -import logging - -logging.getLogger().addHandler(logging.NullHandler()) - class RoomMemberHandlerTestCase(unittest.TestCase): diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 6532ac94a3..a66f208abf 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -14,12 +14,11 @@ # limitations under the License. -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock, call, ANY import json -import logging from ..utils import MockHttpResource, MockClock, DeferredMockCallable @@ -27,9 +26,6 @@ from synapse.server import HomeServer from synapse.handlers.typing import TypingNotificationHandler -logging.getLogger().addHandler(logging.NullHandler()) - - def _expect_edu(destination, edu_type, content, origin="test"): return { "origin": origin, @@ -173,7 +169,8 @@ class TypingNotificationsTestCase(unittest.TestCase): "user_id": self.u_apple.to_string(), "typing": True, } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) @@ -223,7 +220,8 @@ class TypingNotificationsTestCase(unittest.TestCase): "user_id": self.u_apple.to_string(), "typing": False, } - ) + ), + on_send_callback=ANY, ), defer.succeed((200, "OK")) ) diff --git a/tests/rest/test_events.py b/tests/rest/test_events.py index 1dccf4c503..79b371c04d 100644 --- a/tests/rest/test_events.py +++ b/tests/rest/test_events.py @@ -14,7 +14,7 @@ # limitations under the License. """ Tests REST events for /events paths.""" -from twisted.trial import unittest +from tests import unittest # twisted imports from twisted.internet import defer @@ -27,14 +27,12 @@ from synapse.server import HomeServer # python imports import json -import logging from ..utils import MockHttpResource, MemoryDataStore from .utils import RestTestCase from mock import Mock, NonCallableMock -logging.getLogger().addHandler(logging.NullHandler()) PATH_PREFIX = "/_matrix/client/api/v1" @@ -145,6 +143,7 @@ class EventStreamPermissionsTestCase(RestTestCase): ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) + hs.config.enable_registration_captcha = False hs.get_handlers().federation_handler = Mock() diff --git a/tests/rest/test_presence.py b/tests/rest/test_presence.py index a1db0fbcf3..ea3478ac5d 100644 --- a/tests/rest/test_presence.py +++ b/tests/rest/test_presence.py @@ -15,11 +15,10 @@ """Tests REST events for /presence paths.""" -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock -import logging from ..utils import MockHttpResource @@ -28,9 +27,6 @@ from synapse.handlers.presence import PresenceHandler from synapse.server import HomeServer -logging.getLogger().addHandler(logging.NullHandler()) - - OFFLINE = PresenceState.OFFLINE UNAVAILABLE = PresenceState.UNAVAILABLE ONLINE = PresenceState.ONLINE diff --git a/tests/rest/test_profile.py b/tests/rest/test_profile.py index f41810df1f..e6e51f6dd0 100644 --- a/tests/rest/test_profile.py +++ b/tests/rest/test_profile.py @@ -15,7 +15,7 @@ """Tests REST events for /profile paths.""" -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock @@ -28,6 +28,7 @@ from synapse.server import HomeServer myid = "@1234ABCD:test" PATH_PREFIX = "/_matrix/client/api/v1" + class ProfileTestCase(unittest.TestCase): """ Tests profile management. """ diff --git a/tests/rest/utils.py b/tests/rest/utils.py index 77f5ecf0df..579441fb4a 100644 --- a/tests/rest/utils.py +++ b/tests/rest/utils.py @@ -17,7 +17,7 @@ from twisted.internet import defer # trial imports -from twisted.trial import unittest +from tests import unittest from synapse.api.constants import Membership @@ -95,8 +95,14 @@ class RestTestCase(unittest.TestCase): @defer.inlineCallbacks def register(self, user_id): - (code, response) = yield self.mock_resource.trigger("POST", "/register", - '{"user_id":"%s"}' % user_id) + (code, response) = yield self.mock_resource.trigger( + "POST", + "/register", + json.dumps({ + "user": user_id, + "password": "test", + "type": "m.login.password" + })) self.assertEquals(200, code) defer.returnValue(response) diff --git a/tests/storage/TESTS_NEEDED_FOR b/tests/storage/TESTS_NEEDED_FOR new file mode 100644 index 0000000000..8e5d0cbdc4 --- /dev/null +++ b/tests/storage/TESTS_NEEDED_FOR @@ -0,0 +1,5 @@ +synapse/storage/feedback.py +synapse/storage/keys.py +synapse/storage/pdu.py +synapse/storage/stream.py +synapse/storage/transactions.py diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index 330311448d..3ad9a4b0c0 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -14,7 +14,7 @@ # limitations under the License. -from twisted.trial import unittest +from tests import unittest from twisted.internet import defer from mock import Mock, call diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py new file mode 100644 index 0000000000..7e8e7e1e83 --- /dev/null +++ b/tests/storage/test_directory.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.storage.directory import DirectoryStore + +from tests.utils import SQLiteMemoryDbPool + + +class DirectoryStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + db_pool=db_pool, + ) + + self.store = DirectoryStore(hs) + + self.room = hs.parse_roomid("!abcde:test") + self.alias = hs.parse_roomalias("#my-room:test") + + @defer.inlineCallbacks + def test_room_to_alias(self): + yield self.store.create_room_alias_association( + room_alias=self.alias, + room_id=self.room.to_string(), + servers=["test"], + ) + + self.assertEquals( + ["#my-room:test"], + (yield self.store.get_aliases_for_room(self.room.to_string())) + ) + + @defer.inlineCallbacks + def test_alias_to_room(self): + yield self.store.create_room_alias_association( + room_alias=self.alias, + room_id=self.room.to_string(), + servers=["test"], + ) + + + self.assertObjectHasAttributes( + {"room_id": self.room.to_string(), + "servers": ["test"]}, + (yield self.store.get_association_from_room_alias(self.alias)) + ) diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py new file mode 100644 index 0000000000..9655d3cf42 --- /dev/null +++ b/tests/storage/test_presence.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.storage.presence import PresenceStore + +from tests.utils import SQLiteMemoryDbPool, MockClock + + +class PresenceStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + clock=MockClock(), + db_pool=db_pool, + ) + + self.store = PresenceStore(hs) + + self.u_apple = hs.parse_userid("@apple:test") + self.u_banana = hs.parse_userid("@banana:test") + + @defer.inlineCallbacks + def test_state(self): + yield self.store.create_presence( + self.u_apple.localpart + ) + + state = yield self.store.get_presence_state( + self.u_apple.localpart + ) + + self.assertEquals( + {"state": None, "status_msg": None, "mtime": None}, state + ) + + yield self.store.set_presence_state( + self.u_apple.localpart, {"state": "online", "status_msg": "Here"} + ) + + state = yield self.store.get_presence_state( + self.u_apple.localpart + ) + + self.assertEquals( + {"state": "online", "status_msg": "Here", "mtime": 1000000}, state + ) + + @defer.inlineCallbacks + def test_visibility(self): + self.assertFalse((yield self.store.is_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_banana.to_string(), + ))) + + yield self.store.allow_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_banana.to_string(), + ) + + self.assertTrue((yield self.store.is_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_banana.to_string(), + ))) + + yield self.store.disallow_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_banana.to_string(), + ) + + self.assertFalse((yield self.store.is_presence_visible( + observed_localpart=self.u_apple.localpart, + observer_userid=self.u_banana.to_string(), + ))) + + @defer.inlineCallbacks + def test_presence_list(self): + self.assertEquals( + [], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + )) + ) + self.assertEquals( + [], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + accepted=True, + )) + ) + + yield self.store.add_presence_list_pending( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), + ) + + self.assertEquals( + [{"observed_user_id": "@banana:test", "accepted": 0}], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + )) + ) + self.assertEquals( + [], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + accepted=True, + )) + ) + + yield self.store.set_presence_list_accepted( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), + ) + + self.assertEquals( + [{"observed_user_id": "@banana:test", "accepted": 1}], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + )) + ) + self.assertEquals( + [{"observed_user_id": "@banana:test", "accepted": 1}], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + accepted=True, + )) + ) + + yield self.store.del_presence_list( + observer_localpart=self.u_apple.localpart, + observed_userid=self.u_banana.to_string(), + ) + + self.assertEquals( + [], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + )) + ) + self.assertEquals( + [], + (yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, + accepted=True, + )) + ) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py new file mode 100644 index 0000000000..5d36723c28 --- /dev/null +++ b/tests/storage/test_profile.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.storage.profile import ProfileStore + +from tests.utils import SQLiteMemoryDbPool + + +class ProfileStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + db_pool=db_pool, + ) + + self.store = ProfileStore(hs) + + self.u_frank = hs.parse_userid("@frank:test") + + @defer.inlineCallbacks + def test_displayname(self): + yield self.store.create_profile( + self.u_frank.localpart + ) + + yield self.store.set_profile_displayname( + self.u_frank.localpart, "Frank" + ) + + self.assertEquals( + "Frank", + (yield self.store.get_profile_displayname(self.u_frank.localpart)) + ) + + @defer.inlineCallbacks + def test_avatar_url(self): + yield self.store.create_profile( + self.u_frank.localpart + ) + + yield self.store.set_profile_avatar_url( + self.u_frank.localpart, "http://my.site/here" + ) + + self.assertEquals( + "http://my.site/here", + (yield self.store.get_profile_avatar_url(self.u_frank.localpart)) + ) diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py new file mode 100644 index 0000000000..91e221d53e --- /dev/null +++ b/tests/storage/test_registration.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.storage.registration import RegistrationStore + +from tests.utils import SQLiteMemoryDbPool + + +class RegistrationStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + db_pool=db_pool, + ) + + self.store = RegistrationStore(hs) + + self.user_id = "@my-user:test" + self.tokens = ["AbCdEfGhIjKlMnOpQrStUvWxYz", + "BcDeFgHiJkLmNoPqRsTuVwXyZa"] + self.pwhash = "{xx1}123456789" + + @defer.inlineCallbacks + def test_register(self): + yield self.store.register(self.user_id, self.tokens[0], self.pwhash) + + self.assertEquals( + # TODO(paul): Surely this field should be 'user_id', not 'name' + # Additionally surely it shouldn't come in a 1-element list + [{"name": self.user_id, "password_hash": self.pwhash}], + (yield self.store.get_user_by_id(self.user_id)) + ) + + self.assertEquals( + self.user_id, + (yield self.store.get_user_by_token(self.tokens[0])) + ) + + @defer.inlineCallbacks + def test_add_tokens(self): + yield self.store.register(self.user_id, self.tokens[0], self.pwhash) + yield self.store.add_access_token_to_user(self.user_id, self.tokens[1]) + + self.assertEquals( + self.user_id, + (yield self.store.get_user_by_token(self.tokens[1])) + ) + diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py new file mode 100644 index 0000000000..369a73d917 --- /dev/null +++ b/tests/storage/test_room.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.api.events.room import ( + RoomNameEvent, RoomTopicEvent +) + +from tests.utils import SQLiteMemoryDbPool + + +class RoomStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + db_pool=db_pool, + ) + + # We can't test RoomStore on its own without the DirectoryStore, for + # management of the 'room_aliases' table + self.store = hs.get_datastore() + + self.room = hs.parse_roomid("!abcde:test") + self.alias = hs.parse_roomalias("#a-room-name:test") + self.u_creator = hs.parse_userid("@creator:test") + + yield self.store.store_room(self.room.to_string(), + room_creator_user_id=self.u_creator.to_string(), + is_public=True + ) + + @defer.inlineCallbacks + def test_get_room(self): + self.assertObjectHasAttributes( + {"room_id": self.room.to_string(), + "creator": self.u_creator.to_string(), + "is_public": True}, + (yield self.store.get_room(self.room.to_string())) + ) + + @defer.inlineCallbacks + def test_store_room_config(self): + yield self.store.store_room_config(self.room.to_string(), + visibility=False + ) + + self.assertObjectHasAttributes( + {"is_public": False}, + (yield self.store.get_room(self.room.to_string())) + ) + + @defer.inlineCallbacks + def test_get_rooms(self): + # get_rooms does an INNER JOIN on the room_aliases table :( + + rooms = yield self.store.get_rooms(is_public=True) + # Should be empty before we add the alias + self.assertEquals([], rooms) + + yield self.store.create_room_alias_association( + room_alias=self.alias, + room_id=self.room.to_string(), + servers=["test"] + ) + + rooms = yield self.store.get_rooms(is_public=True) + + self.assertEquals(1, len(rooms)) + self.assertEquals({ + "name": None, + "room_id": self.room.to_string(), + "topic": None, + "aliases": [self.alias.to_string()], + }, rooms[0]) + + +class RoomEventsStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + db_pool=db_pool, + ) + + # Room events need the full datastore, for persist_event() and + # get_room_state() + self.store = hs.get_datastore() + self.event_factory = hs.get_event_factory(); + + self.room = hs.parse_roomid("!abcde:test") + + yield self.store.store_room(self.room.to_string(), + room_creator_user_id="@creator:text", + is_public=True + ) + + @defer.inlineCallbacks + def inject_room_event(self, **kwargs): + yield self.store.persist_event( + self.event_factory.create_event( + room_id=self.room.to_string(), + **kwargs + ) + ) + + @defer.inlineCallbacks + def test_room_name(self): + name = u"A-Room-Name" + + yield self.inject_room_event( + etype=RoomNameEvent.TYPE, + name=name, + content={"name": name}, + depth=1, + ) + + state = yield self.store.get_current_state( + room_id=self.room.to_string() + ) + + self.assertEquals(1, len(state)) + self.assertObjectHasAttributes( + {"type": "m.room.name", + "room_id": self.room.to_string(), + "name": name}, + state[0] + ) + + @defer.inlineCallbacks + def test_room_name(self): + topic = u"A place for things" + + yield self.inject_room_event( + etype=RoomTopicEvent.TYPE, + topic=topic, + content={"topic": topic}, + depth=1, + ) + + state = yield self.store.get_current_state( + room_id=self.room.to_string() + ) + + self.assertEquals(1, len(state)) + self.assertObjectHasAttributes( + {"type": "m.room.topic", + "room_id": self.room.to_string(), + "topic": topic}, + state[0] + ) + + # Not testing the various 'level' methods for now because there's lots + # of them and need coalescing; see JIRA SPEC-11 diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py new file mode 100644 index 0000000000..eae278ee8d --- /dev/null +++ b/tests/storage/test_roommember.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.api.constants import Membership +from synapse.api.events.room import RoomMemberEvent + +from tests.utils import SQLiteMemoryDbPool + + +class RoomMemberStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer("test", + db_pool=db_pool, + ) + + # We can't test the RoomMemberStore on its own without the other event + # storage logic + self.store = hs.get_datastore() + self.event_factory = hs.get_event_factory() + + self.u_alice = hs.parse_userid("@alice:test") + self.u_bob = hs.parse_userid("@bob:test") + + # User elsewhere on another host + self.u_charlie = hs.parse_userid("@charlie:elsewhere") + + self.room = hs.parse_roomid("!abc123:test") + + @defer.inlineCallbacks + def inject_room_member(self, room, user, membership): + # Have to create a join event using the eventfactory + yield self.store.persist_event( + self.event_factory.create_event( + etype=RoomMemberEvent.TYPE, + user_id=user.to_string(), + state_key=user.to_string(), + room_id=room.to_string(), + membership=membership, + content={"membership": membership}, + depth=1, + ) + ) + + @defer.inlineCallbacks + def test_one_member(self): + yield self.inject_room_member(self.room, self.u_alice, Membership.JOIN) + + self.assertEquals( + Membership.JOIN, + (yield self.store.get_room_member( + user_id=self.u_alice.to_string(), + room_id=self.room.to_string(), + )).membership + ) + self.assertEquals( + [self.u_alice.to_string()], + [m.user_id for m in ( + yield self.store.get_room_members(self.room.to_string()) + )] + ) + self.assertEquals( + [self.room.to_string()], + [m.room_id for m in ( + yield self.store.get_rooms_for_user_where_membership_is( + self.u_alice.to_string(), [Membership.JOIN] + )) + ] + ) + self.assertFalse( + (yield self.store.user_rooms_intersect( + [self.u_alice.to_string(), self.u_bob.to_string()] + )) + ) + + @defer.inlineCallbacks + def test_two_members(self): + yield self.inject_room_member(self.room, self.u_alice, Membership.JOIN) + yield self.inject_room_member(self.room, self.u_bob, Membership.JOIN) + + self.assertEquals( + {self.u_alice.to_string(), self.u_bob.to_string()}, + {m.user_id for m in ( + yield self.store.get_room_members(self.room.to_string()) + )} + ) + self.assertTrue( + (yield self.store.user_rooms_intersect( + [self.u_alice.to_string(), self.u_bob.to_string()] + )) + ) + + @defer.inlineCallbacks + def test_room_hosts(self): + yield self.inject_room_member(self.room, self.u_alice, Membership.JOIN) + + self.assertEquals( + ["test"], + (yield self.store.get_joined_hosts_for_room(self.room.to_string())) + ) + + # Should still have just one host after second join from it + yield self.inject_room_member(self.room, self.u_bob, Membership.JOIN) + + self.assertEquals( + ["test"], + (yield self.store.get_joined_hosts_for_room(self.room.to_string())) + ) + + # Should now have two hosts after join from other host + yield self.inject_room_member(self.room, self.u_charlie, Membership.JOIN) + + self.assertEquals( + {"test", "elsewhere"}, + set((yield + self.store.get_joined_hosts_for_room(self.room.to_string()) + )) + ) + + # Should still have both hosts + yield self.inject_room_member(self.room, self.u_alice, Membership.LEAVE) + + self.assertEquals( + {"test", "elsewhere"}, + set((yield + self.store.get_joined_hosts_for_room(self.room.to_string()) + )) + ) + + # Should have only one host after other leaves + yield self.inject_room_member(self.room, self.u_charlie, Membership.LEAVE) + + self.assertEquals( + ["test"], + (yield self.store.get_joined_hosts_for_room(self.room.to_string())) + ) diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py new file mode 100644 index 0000000000..ab30e6ea25 --- /dev/null +++ b/tests/storage/test_stream.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from tests import unittest +from twisted.internet import defer + +from synapse.server import HomeServer +from synapse.api.constants import Membership +from synapse.api.events.room import RoomMemberEvent, MessageEvent + +from tests.utils import SQLiteMemoryDbPool + + +class StreamStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + + hs = HomeServer( + "test", + db_pool=db_pool, + ) + + self.store = hs.get_datastore() + self.event_factory = hs.get_event_factory() + + self.u_alice = hs.parse_userid("@alice:test") + self.u_bob = hs.parse_userid("@bob:test") + + self.room1 = hs.parse_roomid("!abc123:test") + self.room2 = hs.parse_roomid("!xyx987:test") + + self.depth = 1 + + @defer.inlineCallbacks + def inject_room_member(self, room, user, membership, prev_state=None): + self.depth += 1 + + event = self.event_factory.create_event( + etype=RoomMemberEvent.TYPE, + user_id=user.to_string(), + state_key=user.to_string(), + room_id=room.to_string(), + membership=membership, + content={"membership": membership}, + depth=self.depth, + ) + + if prev_state: + event.prev_state = prev_state + + # Have to create a join event using the eventfactory + yield self.store.persist_event( + event + ) + + defer.returnValue(event) + + @defer.inlineCallbacks + def inject_message(self, room, user, body): + self.depth += 1 + + # Have to create a join event using the eventfactory + yield self.store.persist_event( + self.event_factory.create_event( + etype=MessageEvent.TYPE, + user_id=user.to_string(), + room_id=room.to_string(), + content={"body": body, "msgtype": u"message"}, + depth=self.depth, + ) + ) + + @defer.inlineCallbacks + def test_event_stream_get_other(self): + # Both bob and alice joins the room + yield self.inject_room_member( + self.room1, self.u_alice, Membership.JOIN + ) + yield self.inject_room_member( + self.room1, self.u_bob, Membership.JOIN + ) + + # Initial stream key: + start = yield self.store.get_room_events_max_id() + + yield self.inject_message(self.room1, self.u_alice, u"test") + + end = yield self.store.get_room_events_max_id() + + results, _ = yield self.store.get_room_events_stream( + self.u_bob.to_string(), + start, + end, + None, # Is currently ignored + ) + + self.assertEqual(1, len(results)) + + event = results[0] + + self.assertObjectHasAttributes( + { + "type": MessageEvent.TYPE, + "user_id": self.u_alice.to_string(), + "content": {"body": "test", "msgtype": "message"}, + }, + event, + ) + + @defer.inlineCallbacks + def test_event_stream_get_own(self): + # Both bob and alice joins the room + yield self.inject_room_member( + self.room1, self.u_alice, Membership.JOIN + ) + yield self.inject_room_member( + self.room1, self.u_bob, Membership.JOIN + ) + + # Initial stream key: + start = yield self.store.get_room_events_max_id() + + yield self.inject_message(self.room1, self.u_alice, u"test") + + end = yield self.store.get_room_events_max_id() + + results, _ = yield self.store.get_room_events_stream( + self.u_alice.to_string(), + start, + end, + None, # Is currently ignored + ) + + self.assertEqual(1, len(results)) + + event = results[0] + + self.assertObjectHasAttributes( + { + "type": MessageEvent.TYPE, + "user_id": self.u_alice.to_string(), + "content": {"body": "test", "msgtype": "message"}, + }, + event, + ) + + @defer.inlineCallbacks + def test_event_stream_join_leave(self): + # Both bob and alice joins the room + yield self.inject_room_member( + self.room1, self.u_alice, Membership.JOIN + ) + yield self.inject_room_member( + self.room1, self.u_bob, Membership.JOIN + ) + + # Then bob leaves again. + yield self.inject_room_member( + self.room1, self.u_bob, Membership.LEAVE + ) + + # Initial stream key: + start = yield self.store.get_room_events_max_id() + + yield self.inject_message(self.room1, self.u_alice, u"test") + + end = yield self.store.get_room_events_max_id() + + results, _ = yield self.store.get_room_events_stream( + self.u_bob.to_string(), + start, + end, + None, # Is currently ignored + ) + + # We should not get the message, as it happened *after* bob left. + self.assertEqual(0, len(results)) + + @defer.inlineCallbacks + def test_event_stream_prev_content(self): + yield self.inject_room_member( + self.room1, self.u_bob, Membership.JOIN + ) + + event1 = yield self.inject_room_member( + self.room1, self.u_alice, Membership.JOIN + ) + + start = yield self.store.get_room_events_max_id() + + event2 = yield self.inject_room_member( + self.room1, self.u_alice, Membership.JOIN, + prev_state=event1.event_id, + ) + + end = yield self.store.get_room_events_max_id() + + results, _ = yield self.store.get_room_events_stream( + self.u_bob.to_string(), + start, + end, + None, # Is currently ignored + ) + + # We should not get the message, as it happened *after* bob left. + self.assertEqual(1, len(results)) + + event = results[0] + + self.assertTrue(hasattr(event, "prev_content"), msg="No prev_content key") diff --git a/tests/test_distributor.py b/tests/test_distributor.py index 04933f0ecf..39c5b8dff2 100644 --- a/tests/test_distributor.py +++ b/tests/test_distributor.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from tests import unittest from twisted.internet import defer -from twisted.trial import unittest from mock import Mock, patch diff --git a/tests/test_state.py b/tests/test_state.py index a1f5ee869b..b1624f0b25 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -13,23 +13,32 @@ # See the License for the specific language governing permissions and # limitations under the License. +from tests import unittest from twisted.internet import defer -from twisted.trial import unittest +from twisted.python.log import PythonLoggingObserver from synapse.state import StateHandler from synapse.storage.pdu import PduEntry from synapse.federation.pdu_codec import encode_event_id +from synapse.federation.units import Pdu from collections import namedtuple from mock import Mock +import mock + ReturnType = namedtuple( "StateReturnType", ["new_branch", "current_branch"] ) +def _gen_get_power_level(power_level_list): + def get_power_level(room_id, user_id): + return defer.succeed(power_level_list.get(user_id, None)) + return get_power_level + class StateTestCase(unittest.TestCase): def setUp(self): self.persistence = Mock(spec=[ @@ -38,6 +47,7 @@ class StateTestCase(unittest.TestCase): "get_latest_pdus_in_context", "get_current_state_pdu", "get_pdu", + "get_power_level", ]) self.replication = Mock(spec=["get_pdu"]) @@ -51,10 +61,12 @@ class StateTestCase(unittest.TestCase): @defer.inlineCallbacks def test_new_state_key(self): # We've never seen anything for this state before - new_pdu = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) + new_pdu = new_fake_pdu("A", "test", "mem", "x", None, "u") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({}) self.persistence.get_unresolved_state_tree.return_value = ( - ReturnType([new_pdu], []) + (ReturnType([new_pdu], []), None) ) is_new = yield self.state.handle_new_state(new_pdu) @@ -74,11 +86,44 @@ class StateTestCase(unittest.TestCase): # We do a direct overwriting of the old state, i.e., the new state # points to the old state. - old_pdu = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) - new_pdu = new_fake_pdu_entry("B", "test", "mem", "x", "A", 5) + old_pdu = new_fake_pdu("A", "test", "mem", "x", None, "u1") + new_pdu = new_fake_pdu("B", "test", "mem", "x", "A", "u2") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 5, + }) self.persistence.get_unresolved_state_tree.return_value = ( - ReturnType([new_pdu, old_pdu], [old_pdu]) + (ReturnType([new_pdu, old_pdu], [old_pdu]), None) + ) + + is_new = yield self.state.handle_new_state(new_pdu) + + self.assertTrue(is_new) + + self.persistence.get_unresolved_state_tree.assert_called_once_with( + new_pdu + ) + + self.assertEqual(1, self.persistence.update_current_state.call_count) + + self.assertFalse(self.replication.get_pdu.called) + + @defer.inlineCallbacks + def test_overwrite(self): + old_pdu_1 = new_fake_pdu("A", "test", "mem", "x", None, "u1") + old_pdu_2 = new_fake_pdu("B", "test", "mem", "x", "A", "u2") + new_pdu = new_fake_pdu("C", "test", "mem", "x", "B", "u3") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 5, + "u3": 0, + }) + + self.persistence.get_unresolved_state_tree.return_value = ( + (ReturnType([new_pdu, old_pdu_2, old_pdu_1], [old_pdu_1]), None) ) is_new = yield self.state.handle_new_state(new_pdu) @@ -98,12 +143,18 @@ class StateTestCase(unittest.TestCase): # We try to update the state based on an outdated state, and have a # too low power level. - old_pdu_1 = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) - old_pdu_2 = new_fake_pdu_entry("B", "test", "mem", "x", None, 10) - new_pdu = new_fake_pdu_entry("C", "test", "mem", "x", "A", 5) + old_pdu_1 = new_fake_pdu("A", "test", "mem", "x", None, "u1") + old_pdu_2 = new_fake_pdu("B", "test", "mem", "x", None, "u2") + new_pdu = new_fake_pdu("C", "test", "mem", "x", "A", "u3") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 5, + }) self.persistence.get_unresolved_state_tree.return_value = ( - ReturnType([new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1]) + (ReturnType([new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1]), None) ) is_new = yield self.state.handle_new_state(new_pdu) @@ -123,12 +174,18 @@ class StateTestCase(unittest.TestCase): # We try to update the state based on an outdated state, but have # sufficient power level to force the update. - old_pdu_1 = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) - old_pdu_2 = new_fake_pdu_entry("B", "test", "mem", "x", None, 10) - new_pdu = new_fake_pdu_entry("C", "test", "mem", "x", "A", 15) + old_pdu_1 = new_fake_pdu("A", "test", "mem", "x", None, "u1") + old_pdu_2 = new_fake_pdu("B", "test", "mem", "x", None, "u2") + new_pdu = new_fake_pdu("C", "test", "mem", "x", "A", "u3") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 15, + }) self.persistence.get_unresolved_state_tree.return_value = ( - ReturnType([new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1]) + (ReturnType([new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1]), None) ) is_new = yield self.state.handle_new_state(new_pdu) @@ -148,12 +205,18 @@ class StateTestCase(unittest.TestCase): # We try to update the state based on an outdated state, the power # levels are the same and so are the branch lengths - old_pdu_1 = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) - old_pdu_2 = new_fake_pdu_entry("B", "test", "mem", "x", None, 10) - new_pdu = new_fake_pdu_entry("C", "test", "mem", "x", "A", 10) + old_pdu_1 = new_fake_pdu("A", "test", "mem", "x", None, "u1") + old_pdu_2 = new_fake_pdu("B", "test", "mem", "x", None, "u2") + new_pdu = new_fake_pdu("C", "test", "mem", "x", "A", "u3") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 10, + }) self.persistence.get_unresolved_state_tree.return_value = ( - ReturnType([new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1]) + (ReturnType([new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1]), None) ) is_new = yield self.state.handle_new_state(new_pdu) @@ -173,13 +236,26 @@ class StateTestCase(unittest.TestCase): # We try to update the state based on an outdated state, the power # levels are the same but the branch length of the new one is longer. - old_pdu_1 = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) - old_pdu_2 = new_fake_pdu_entry("B", "test", "mem", "x", None, 10) - old_pdu_3 = new_fake_pdu_entry("C", "test", "mem", "x", "A", 10) - new_pdu = new_fake_pdu_entry("D", "test", "mem", "x", "C", 10) + old_pdu_1 = new_fake_pdu("A", "test", "mem", "x", None, "u1") + old_pdu_2 = new_fake_pdu("B", "test", "mem", "x", None, "u2") + old_pdu_3 = new_fake_pdu("C", "test", "mem", "x", "A", "u3") + new_pdu = new_fake_pdu("D", "test", "mem", "x", "C", "u4") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 10, + "u4": 10, + }) self.persistence.get_unresolved_state_tree.return_value = ( - ReturnType([new_pdu, old_pdu_3, old_pdu_1], [old_pdu_2, old_pdu_1]) + ( + ReturnType( + [new_pdu, old_pdu_3, old_pdu_1], + [old_pdu_2, old_pdu_1] + ), + None + ) ) is_new = yield self.state.handle_new_state(new_pdu) @@ -200,22 +276,38 @@ class StateTestCase(unittest.TestCase): # triggering a get_pdu request # The pdu we haven't seen - old_pdu_1 = new_fake_pdu_entry("A", "test", "mem", "x", None, 10) + old_pdu_1 = new_fake_pdu( + "A", "test", "mem", "x", None, "u1", depth=0 + ) - old_pdu_2 = new_fake_pdu_entry("B", "test", "mem", "x", None, 10) - new_pdu = new_fake_pdu_entry("C", "test", "mem", "x", "A", 20) + old_pdu_2 = new_fake_pdu( + "B", "test", "mem", "x", "A", "u2", depth=1 + ) + new_pdu = new_fake_pdu( + "C", "test", "mem", "x", "A", "u3", depth=2 + ) + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 20, + }) # The return_value of `get_unresolved_state_tree`, which changes after # the call to get_pdu - tree_to_return = [ReturnType([new_pdu], [old_pdu_2])] + tree_to_return = [(ReturnType([new_pdu], [old_pdu_2]), 0)] def return_tree(p): return tree_to_return[0] - def set_return_tree(*args, **kwargs): - tree_to_return[0] = ReturnType( - [new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1] + def set_return_tree(destination, pdu_origin, pdu_id, outlier=False): + tree_to_return[0] = ( + ReturnType( + [new_pdu, old_pdu_1], [old_pdu_2, old_pdu_1] + ), + None ) + return defer.succeed(None) self.persistence.get_unresolved_state_tree.side_effect = return_tree @@ -227,6 +319,13 @@ class StateTestCase(unittest.TestCase): self.assertTrue(is_new) + self.replication.get_pdu.assert_called_with( + destination=new_pdu.origin, + pdu_origin=old_pdu_1.origin, + pdu_id=old_pdu_1.pdu_id, + outlier=True + ) + self.persistence.get_unresolved_state_tree.assert_called_with( new_pdu ) @@ -237,11 +336,233 @@ class StateTestCase(unittest.TestCase): self.assertEqual(1, self.persistence.update_current_state.call_count) + @defer.inlineCallbacks + def test_missing_pdu_depth_1(self): + # We try to update state against a PDU we haven't yet seen, + # triggering a get_pdu request + + # The pdu we haven't seen + old_pdu_1 = new_fake_pdu( + "A", "test", "mem", "x", None, "u1", depth=0 + ) + + old_pdu_2 = new_fake_pdu( + "B", "test", "mem", "x", "A", "u2", depth=2 + ) + old_pdu_3 = new_fake_pdu( + "C", "test", "mem", "x", "B", "u3", depth=3 + ) + new_pdu = new_fake_pdu( + "D", "test", "mem", "x", "A", "u4", depth=4 + ) + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 10, + "u4": 20, + }) + + # The return_value of `get_unresolved_state_tree`, which changes after + # the call to get_pdu + tree_to_return = [ + ( + ReturnType([new_pdu], [old_pdu_3]), + 0 + ), + ( + ReturnType( + [new_pdu, old_pdu_1], [old_pdu_3] + ), + 1 + ), + ( + ReturnType( + [new_pdu, old_pdu_1], [old_pdu_3, old_pdu_2, old_pdu_1] + ), + None + ), + ] + + to_return = [0] + + def return_tree(p): + return tree_to_return[to_return[0]] + + def set_return_tree(destination, pdu_origin, pdu_id, outlier=False): + to_return[0] += 1 + return defer.succeed(None) + + self.persistence.get_unresolved_state_tree.side_effect = return_tree + + self.replication.get_pdu.side_effect = set_return_tree + + self.persistence.get_pdu.return_value = None + + is_new = yield self.state.handle_new_state(new_pdu) + + self.assertTrue(is_new) + + self.assertEqual(2, self.replication.get_pdu.call_count) + + self.replication.get_pdu.assert_has_calls( + [ + mock.call( + destination=new_pdu.origin, + pdu_origin=old_pdu_1.origin, + pdu_id=old_pdu_1.pdu_id, + outlier=True + ), + mock.call( + destination=old_pdu_3.origin, + pdu_origin=old_pdu_2.origin, + pdu_id=old_pdu_2.pdu_id, + outlier=True + ), + ] + ) + + self.persistence.get_unresolved_state_tree.assert_called_with( + new_pdu + ) + + self.assertEquals( + 3, self.persistence.get_unresolved_state_tree.call_count + ) + + self.assertEqual(1, self.persistence.update_current_state.call_count) + + @defer.inlineCallbacks + def test_missing_pdu_depth_2(self): + # We try to update state against a PDU we haven't yet seen, + # triggering a get_pdu request + + # The pdu we haven't seen + old_pdu_1 = new_fake_pdu( + "A", "test", "mem", "x", None, "u1", depth=0 + ) + + old_pdu_2 = new_fake_pdu( + "B", "test", "mem", "x", "A", "u2", depth=2 + ) + old_pdu_3 = new_fake_pdu( + "C", "test", "mem", "x", "B", "u3", depth=3 + ) + new_pdu = new_fake_pdu( + "D", "test", "mem", "x", "A", "u4", depth=1 + ) + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 10, + "u2": 10, + "u3": 10, + "u4": 20, + }) + + # The return_value of `get_unresolved_state_tree`, which changes after + # the call to get_pdu + tree_to_return = [ + ( + ReturnType([new_pdu], [old_pdu_3]), + 1, + ), + ( + ReturnType( + [new_pdu], [old_pdu_3, old_pdu_2] + ), + 0, + ), + ( + ReturnType( + [new_pdu, old_pdu_1], [old_pdu_3, old_pdu_2, old_pdu_1] + ), + None + ), + ] + + to_return = [0] + + def return_tree(p): + return tree_to_return[to_return[0]] + + def set_return_tree(destination, pdu_origin, pdu_id, outlier=False): + to_return[0] += 1 + return defer.succeed(None) + + self.persistence.get_unresolved_state_tree.side_effect = return_tree + + self.replication.get_pdu.side_effect = set_return_tree + + self.persistence.get_pdu.return_value = None + + is_new = yield self.state.handle_new_state(new_pdu) + + self.assertTrue(is_new) + + self.assertEqual(2, self.replication.get_pdu.call_count) + + self.replication.get_pdu.assert_has_calls( + [ + mock.call( + destination=old_pdu_3.origin, + pdu_origin=old_pdu_2.origin, + pdu_id=old_pdu_2.pdu_id, + outlier=True + ), + mock.call( + destination=new_pdu.origin, + pdu_origin=old_pdu_1.origin, + pdu_id=old_pdu_1.pdu_id, + outlier=True + ), + ] + ) + + self.persistence.get_unresolved_state_tree.assert_called_with( + new_pdu + ) + + self.assertEquals( + 3, self.persistence.get_unresolved_state_tree.call_count + ) + + self.assertEqual(1, self.persistence.update_current_state.call_count) + + @defer.inlineCallbacks + def test_no_common_ancestor(self): + # We do a direct overwriting of the old state, i.e., the new state + # points to the old state. + + old_pdu = new_fake_pdu("A", "test", "mem", "x", None, "u1") + new_pdu = new_fake_pdu("B", "test", "mem", "x", None, "u2") + + self.persistence.get_power_level.side_effect = _gen_get_power_level({ + "u1": 5, + "u2": 10, + }) + + self.persistence.get_unresolved_state_tree.return_value = ( + (ReturnType([new_pdu], [old_pdu]), None) + ) + + is_new = yield self.state.handle_new_state(new_pdu) + + self.assertTrue(is_new) + + self.persistence.get_unresolved_state_tree.assert_called_once_with( + new_pdu + ) + + self.assertEqual(1, self.persistence.update_current_state.call_count) + + self.assertFalse(self.replication.get_pdu.called) + @defer.inlineCallbacks def test_new_event(self): event = Mock() + event.event_id = "12123123@test" - state_pdu = new_fake_pdu_entry("C", "test", "mem", "x", "A", 20) + state_pdu = new_fake_pdu("C", "test", "mem", "x", "A", 20) snapshot = Mock() snapshot.prev_state_pdu = state_pdu @@ -268,24 +589,25 @@ class StateTestCase(unittest.TestCase): ) -def new_fake_pdu_entry(pdu_id, context, pdu_type, state_key, prev_state_id, - power_level): - new_pdu = PduEntry( +def new_fake_pdu(pdu_id, context, pdu_type, state_key, prev_state_id, + user_id, depth=0): + new_pdu = Pdu( pdu_id=pdu_id, pdu_type=pdu_type, state_key=state_key, - power_level=power_level, + user_id=user_id, prev_state_id=prev_state_id, origin="example.com", context="context", ts=1405353060021, - depth=0, + depth=depth, content_json="{}", unrecognized_keys="{}", outlier=True, is_state=True, prev_state_origin="example.com", have_processed=True, + content={}, ) return new_pdu diff --git a/tests/test_types.py b/tests/test_types.py index 571938356c..276ecc91fd 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +from tests import unittest from synapse.server import BaseHomeServer from synapse.types import UserID, RoomAlias diff --git a/tests/unittest.py b/tests/unittest.py new file mode 100644 index 0000000000..a9c0e05541 --- /dev/null +++ b/tests/unittest.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.trial import unittest + +import logging + + +# logging doesn't have a "don't log anything at all EVARRRR setting, +# but since the highest value is 50, 1000000 should do ;) +NEVER = 1000000 + +logging.getLogger().addHandler(logging.StreamHandler()) +logging.getLogger().setLevel(NEVER) + + +def around(target): + """A CLOS-style 'around' modifier, which wraps the original method of the + given instance with another piece of code. + + @around(self) + def method_name(orig, *args, **kwargs): + return orig(*args, **kwargs) + """ + def _around(code): + name = code.__name__ + orig = getattr(target, name) + def new(*args, **kwargs): + return code(orig, *args, **kwargs) + setattr(target, name, new) + return _around + + +class TestCase(unittest.TestCase): + """A subclass of twisted.trial's TestCase which looks for 'loglevel' + attributes on both itself and its individual test methods, to override the + root logger's logging level while that test (case|method) runs.""" + + def __init__(self, methodName, *args, **kwargs): + super(TestCase, self).__init__(methodName, *args, **kwargs) + + method = getattr(self, methodName) + + level = getattr(method, "loglevel", + getattr(self, "loglevel", + NEVER)) + + @around(self) + def setUp(orig): + old_level = logging.getLogger().level + + if old_level != level: + @around(self) + def tearDown(orig): + ret = orig() + logging.getLogger().setLevel(old_level) + return ret + + logging.getLogger().setLevel(level) + return orig() + + def assertObjectHasAttributes(self, attrs, obj): + """Asserts that the given object has each of the attributes given, and + that the value of each matches according to assertEquals.""" + for (key, value) in attrs.items(): + if not hasattr(obj, key): + raise AssertionError("Expected obj to have a '.%s'" % key) + try: + self.assertEquals(attrs[key], getattr(obj, key)) + except AssertionError as e: + raise (type(e))(e.message + " for '.%s'" % key) + + +def DEBUG(target): + """A decorator to set the .loglevel attribute to logging.DEBUG. + Can apply to either a TestCase or an individual test method.""" + target.loglevel = logging.DEBUG + return target diff --git a/tests/util/test_lock.py b/tests/util/test_lock.py index 5623d78423..6a1e521b1e 100644 --- a/tests/util/test_lock.py +++ b/tests/util/test_lock.py @@ -15,7 +15,7 @@ from twisted.internet import defer -from twisted.trial import unittest +from tests import unittest from synapse.util.lockutils import LockManager @@ -105,4 +105,4 @@ class LockManagerTestCase(unittest.TestCase): pass with (yield self.lock_manager.lock(key)): - pass \ No newline at end of file + pass diff --git a/tests/utils.py b/tests/utils.py index d90214e418..bc5d35e56b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -16,12 +16,14 @@ from synapse.http.server import HttpServer from synapse.api.errors import cs_error, CodeMessageException, StoreError from synapse.api.constants import Membership +from synapse.storage import prepare_database from synapse.api.events.room import ( RoomMemberEvent, MessageEvent ) from twisted.internet import defer, reactor +from twisted.enterprise.adbapi import ConnectionPool from collections import namedtuple from mock import patch, Mock @@ -120,6 +122,18 @@ class MockClock(object): self.now += secs +class SQLiteMemoryDbPool(ConnectionPool, object): + def __init__(self): + super(SQLiteMemoryDbPool, self).__init__( + "sqlite3", ":memory:", + cp_min=1, + cp_max=1, + ) + + def prepare(self): + return self.runWithConnection(prepare_database) + + class MemoryDataStore(object): Room = namedtuple( diff --git a/webclient/CAPTCHA_SETUP b/webclient/CAPTCHA_SETUP new file mode 100644 index 0000000000..ebc8a5f3b0 --- /dev/null +++ b/webclient/CAPTCHA_SETUP @@ -0,0 +1,46 @@ +Captcha can be enabled for this web client / home server. This file explains how to do that. +The captcha mechanism used is Google's ReCaptcha. This requires API keys from Google. + +Getting keys +------------ +Requires a public/private key pair from: + +https://developers.google.com/recaptcha/ + + +Setting Private ReCaptcha Key +----------------------------- +The private key is a config option on the home server config. If it is not +visible, you can generate it via --generate-config. Set the following value: + + recaptcha_private_key: YOUR_PRIVATE_KEY + +In addition, you MUST enable captchas via: + + enable_registration_captcha: true + +Setting Public ReCaptcha Key +---------------------------- +The web client will look for the global variable webClientConfig for config +options. You should put your ReCaptcha public key there like so: + +webClientConfig = { + useCaptcha: true, + recaptcha_public_key: "YOUR_PUBLIC_KEY" +} + +This should be put in webclient/config.js which is already .gitignored, rather +than in the web client source files. You MUST set useCaptcha to true else a +ReCaptcha widget will not be generated. + +Configuring IP used for auth +---------------------------- +The ReCaptcha API requires that the IP address of the user who solved the +captcha is sent. If the client is connecting through a proxy or load balancer, +it may be required to use the X-Forwarded-For (XFF) header instead of the origin +IP address. This can be configured as an option on the home server like so: + + captcha_ip_origin_is_x_forwarded: true + + + diff --git a/webclient/README b/webclient/README index 0f893b1712..ef79b25708 100644 --- a/webclient/README +++ b/webclient/README @@ -1,12 +1,13 @@ Basic Usage ----------- -The Synapse web client needs to be hosted by a basic HTTP server. - -You can use the Python simple HTTP server:: +The web client should automatically run when running the home server. +Alternatively, you can run it stand-alone: $ python -m SimpleHTTPServer Then, open this URL in a WEB browser:: http://127.0.0.1:8000/ + + diff --git a/webclient/app-controller.js b/webclient/app-controller.js index ea48cbb011..0e823b43e7 100644 --- a/webclient/app-controller.js +++ b/webclient/app-controller.js @@ -21,11 +21,17 @@ limitations under the License. 'use strict'; angular.module('MatrixWebClientController', ['matrixService', 'mPresence', 'eventStreamService']) -.controller('MatrixWebClientController', ['$scope', '$location', '$rootScope', 'matrixService', 'mPresence', 'eventStreamService', 'matrixPhoneService', - function($scope, $location, $rootScope, matrixService, mPresence, eventStreamService, matrixPhoneService) { +.controller('MatrixWebClientController', ['$scope', '$location', '$rootScope', '$timeout', '$animate', 'matrixService', 'mPresence', 'eventStreamService', 'eventHandlerService', 'matrixPhoneService', + function($scope, $location, $rootScope, $timeout, $animate, matrixService, mPresence, eventStreamService, eventHandlerService, matrixPhoneService) { // Check current URL to avoid to display the logout button on the login page $scope.location = $location.path(); + + // disable nganimate for the local and remote video elements because ngAnimate appears + // to be buggy and leaves animation classes on the video elements causing them to show + // when they should not (their animations are pure CSS3) + $animate.enabled(false, angular.element('#localVideo')); + $animate.enabled(false, angular.element('#remoteVideo')); // Update the location state when the ng location changed $rootScope.$on('$routeChangeSuccess', function (event, current, previous) { @@ -73,7 +79,10 @@ angular.module('MatrixWebClientController', ['matrixService', 'mPresence', 'even // Clean permanent data matrixService.setConfig({}); matrixService.saveConfig(); - + + // Reset cached data + eventHandlerService.reset(); + // And go to the login page $location.url("login"); }; @@ -89,26 +98,114 @@ angular.module('MatrixWebClientController', ['matrixService', 'mPresence', 'even $scope.user_id = matrixService.config().user_id; }; + $rootScope.$watch('currentCall', function(newVal, oldVal) { + if (!$rootScope.currentCall) { + // This causes the still frame to be flushed out of the video elements, + // avoiding a flash of the last frame of the previous call when starting the next + angular.element('#localVideo')[0].load(); + angular.element('#remoteVideo')[0].load(); + return; + } + + var roomMembers = angular.copy($rootScope.events.rooms[$rootScope.currentCall.room_id].members); + delete roomMembers[matrixService.config().user_id]; + + $rootScope.currentCall.user_id = Object.keys(roomMembers)[0]; + + // set it to the user ID until we fetch the display name + $rootScope.currentCall.userProfile = { displayname: $rootScope.currentCall.user_id }; + + matrixService.getProfile($rootScope.currentCall.user_id).then( + function(response) { + if (response.data.displayname) $rootScope.currentCall.userProfile.displayname = response.data.displayname; + if (response.data.avatar_url) $rootScope.currentCall.userProfile.avatar_url = response.data.avatar_url; + }, + function(error) { + $scope.feedback = "Can't load user profile"; + } + ); + }); + $rootScope.$watch('currentCall.state', function(newVal, oldVal) { + if (newVal == 'ringing') { + angular.element('#ringbackAudio')[0].pause(); + angular.element('#ringAudio')[0].load(); + angular.element('#ringAudio')[0].play(); + } else if (newVal == 'invite_sent') { + angular.element('#ringAudio')[0].pause(); + angular.element('#ringbackAudio')[0].load(); + angular.element('#ringbackAudio')[0].play(); + } else if (newVal == 'ended' && oldVal == 'connected') { + angular.element('#ringAudio')[0].pause(); + angular.element('#ringbackAudio')[0].pause(); + angular.element('#callendAudio')[0].play(); + $scope.videoMode = undefined; + } else if (newVal == 'ended' && oldVal == 'invite_sent' && $rootScope.currentCall.hangupParty == 'remote') { + angular.element('#ringAudio')[0].pause(); + angular.element('#ringbackAudio')[0].pause(); + angular.element('#busyAudio')[0].play(); + } else if (newVal == 'ended' && oldVal == 'invite_sent' && $rootScope.currentCall.hangupParty == 'local' && $rootScope.currentCall.hangupReason == 'invite_timeout') { + angular.element('#ringAudio')[0].pause(); + angular.element('#ringbackAudio')[0].pause(); + angular.element('#busyAudio')[0].play(); + } else if (oldVal == 'invite_sent') { + angular.element('#ringbackAudio')[0].pause(); + } else if (oldVal == 'ringing') { + angular.element('#ringAudio')[0].pause(); + } else if (newVal == 'connected') { + $timeout(function() { + if ($scope.currentCall.type == 'video') $scope.videoMode = 'large'; + }, 500); + } + + if ($rootScope.currentCall && $rootScope.currentCall.type == 'video' && $rootScope.currentCall.state != 'connected') { + $scope.videoMode = 'mini'; + } + }); + $rootScope.$watch('currentCall.type', function(newVal, oldVal) { + // need to listen for this too as the type of the call won't be know when it's created + if ($rootScope.currentCall && $rootScope.currentCall.type == 'video' && $rootScope.currentCall.state != 'connected') { + $scope.videoMode = 'mini'; + } + }); + $rootScope.$on(matrixPhoneService.INCOMING_CALL_EVENT, function(ngEvent, call) { - console.trace("incoming call"); + console.log("incoming call"); + if ($rootScope.currentCall && $rootScope.currentCall.state != 'ended') { + console.log("rejecting call because we're already in a call"); + call.hangup(); + return; + } call.onError = $scope.onCallError; call.onHangup = $scope.onCallHangup; + call.localVideoElement = angular.element('#localVideo')[0]; + call.remoteVideoElement = angular.element('#remoteVideo')[0]; $rootScope.currentCall = call; }); + $rootScope.$on(matrixPhoneService.REPLACED_CALL_EVENT, function(ngEvent, oldCall, newCall) { + console.log("call ID "+oldCall.call_id+" has been replaced by call ID "+newCall.call_id+"!"); + newCall.onError = $scope.onCallError; + newCall.onHangup = $scope.onCallHangup; + $rootScope.currentCall = newCall; + }); + $scope.answerCall = function() { - $scope.currentCall.answer(); + $rootScope.currentCall.answer(); }; $scope.hangupCall = function() { - $scope.currentCall.hangup(); - $scope.currentCall = undefined; + $rootScope.currentCall.hangup(); }; $rootScope.onCallError = function(errStr) { $scope.feedback = errStr; - } + }; - $rootScope.onCallHangup = function() { - } + $rootScope.onCallHangup = function(call) { + if (call == $rootScope.currentCall) { + $timeout(function(){ + if (call == $rootScope.currentCall) $rootScope.currentCall = undefined; + }, 4070); + } + }; }]); diff --git a/webclient/app.css b/webclient/app.css index dbee02f83d..bdf475d635 100755 --- a/webclient/app.css +++ b/webclient/app.css @@ -20,7 +20,12 @@ a:visited { color: #666; } a:hover { color: #000; } a:active { color: #000; } -#page { +textarea, input { + font-family: inherit; + font-size: inherit; +} + +.page { min-height: 100%; margin-bottom: -32px; /* to make room for the footer */ } @@ -34,9 +39,15 @@ a:active { color: #000; } padding-right: 20px; } +#unsupportedBrowser { + padding-top: 240px; + text-align: center; +} + #header { position: absolute; + z-index: 2; top: 0px; width: 100%; background-color: #333; @@ -44,7 +55,123 @@ a:active { color: #000; } } #callBar { - float: left; + float: left; + height: 32px; + margin: auto; + text-align: right; + line-height: 16px; +} + +.callIcon { + margin-left: 4px; + margin-right: 4px; + margin-top: 8px; +} + +#callEndedIcon { + transition:all linear 0.5s; +} + +#callEndedIcon { + transform: rotateZ(45deg); +} + +#callEndedIcon.ng-hide { + transform: rotateZ(0deg); +} + +#callPeerImage { + width: 32px; + height: 32px; + border: none; + float: left; +} + +#callPeerNameAndState { + float: left; + margin-left: 4px; +} + +#callState { + font-size: 60%; +} + +#callPeerName { + font-size: 80%; +} + +#videoBackground { + position: absolute; + height: 100%; + width: 100%; + top: 0px; + left: 0px; + z-index: 1; + background-color: rgba(0,0,0,0.0); + pointer-events: none; + transition: background-color linear 500ms; +} + +#videoBackground.large { + background-color: rgba(0,0,0,0.85); + pointer-events: auto; +} + +#videoContainer { + position: relative; + top: 32px; + max-width: 1280px; + margin: auto; +} + +#videoContainerPadding { + width: 1280px; +} + +#localVideo { + position: absolute; + width: 128px; + height: 72px; + z-index: 1; + transition: left linear 500ms, top linear 500ms, width linear 500ms, height linear 500ms; +} + +#localVideo.mini { + top: 0px; + left: 130px; +} + +#localVideo.large { + top: 70px; + left: 20px; +} + +#localVideo.ended { + -webkit-filter: grayscale(1); + filter: grayscale(1); +} + +#remoteVideo { + position: relative; + height: auto; + transition: left linear 500ms, top linear 500ms, width linear 500ms, height linear 500ms; +} + +#remoteVideo.mini { + left: 260px; + top: 0px; + width: 128px; +} + +#remoteVideo.large { + left: 0px; + top: 50px; + width: 100%; +} + +#remoteVideo.ended { + -webkit-filter: grayscale(1); + filter: grayscale(1); } #headerContent { @@ -54,6 +181,7 @@ a:active { color: #000; } text-align: right; height: 32px; line-height: 32px; + position: relative; } #headerContent a:link, @@ -105,6 +233,10 @@ a:active { color: #000; } text-align: center; } +#recaptcha_area { + margin: auto +} + #loginForm { text-align: left; padding: 1em; @@ -174,12 +306,6 @@ a:active { color: #000; } height: 100%; } -#roomName { - float: right; - font-size: 16px; - margin-top: 15px; -} - #roomHeader { margin: auto; padding-left: 20px; @@ -217,12 +343,80 @@ a:active { color: #000; } #mainInput { width: 100%; + resize: none; } .blink { background-color: #faa; } +.roomHighlight { + font-weight: bold; +} + +.publicTable { + max-width: 480px; + width: 100%; + border-collapse: collapse; +} +.publicTable tr { + width: 100%; +} +.publicTable td { + vertical-align: text-top; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; +} + +.publicRoomEntry { + max-width: 430px; +} + +.publicRoomJoinedUsers { + width: 5em; + text-align: right; + font-size: 12px; + color: #888; +} + +.publicRoomTopic { + color: #888; + font-size: 12px; + overflow: hidden; + padding-bottom: 5px; + border-bottom: 1px #ddd solid; +} + +#roomName { + font-size: 16px; + text-align: right; +} + +#roomTopic { + font-size: 13px; + text-align: right; +} + +.roomNameInput, .roomTopicInput { + width: 100%; +} + +.roomNameSection, .roomTopicSection { + float: right; + width: 100%; +} + +.roomNameSetNew, .roomTopicSetNew { + float: right; +} + +.roomHeaderInfo { + float: right; + margin-top: 15px; + width: 50%; +} + /*** Participant list ***/ #usersTableWrapper { @@ -419,6 +613,10 @@ a:active { color: #000; } text-align: left ! important; } +.bubble .message { + /* Wrap words and break lines on CR+LF */ + white-space: pre-wrap; +} .bubble .messagePending { opacity: 0.3 } @@ -426,6 +624,10 @@ a:active { color: #000; } color: #F00; } +.messageBing { + color: #00F; +} + #room-fullscreen-image { position: absolute; top: 0px; @@ -487,7 +689,11 @@ a:active { color: #000; } width: auto; } -.recentsRoomSummaryTS { +.recentsPublicRoom { + font-weight: bold; +} + +.recentsRoomSummaryUsersCount, .recentsRoomSummaryTS { color: #888; font-size: 12px; width: 7em; @@ -500,6 +706,11 @@ a:active { color: #000; } padding-bottom: 5px; } +/* Do not show users count in the recents fragment displayed on the room page */ +#roomPage .recentsRoomSummaryUsersCount { + width: 0em; +} + /*** Recents in the room page ***/ #roomRecentsTableWrapper { diff --git a/webclient/app.js b/webclient/app.js index d25e2a6234..31118304c6 100644 --- a/webclient/app.js +++ b/webclient/app.js @@ -16,6 +16,7 @@ limitations under the License. var matrixWebClient = angular.module('matrixWebClient', [ 'ngRoute', + 'ngAnimate', 'MatrixWebClientController', 'LoginController', 'RegisterController', @@ -79,7 +80,24 @@ matrixWebClient.config(['$routeProvider', '$provide', '$httpProvider', $httpProvider.interceptors.push('AccessTokenInterceptor'); }]); -matrixWebClient.run(['$location', 'matrixService', function($location, matrixService) { +matrixWebClient.run(['$location', '$rootScope', 'matrixService', function($location, $rootScope, matrixService) { + + // Check browser support + // Support IE from 9.0. AngularJS needs some tricks to run on IE8 and below + var version = parseFloat($.browser.version); + if ($.browser.msie && version < 9.0) { + $rootScope.unsupportedBrowser = { + browser: navigator.userAgent, + reason: "Internet Explorer is supported from version 9" + }; + } + // The app requires localStorage + if(typeof(Storage) === "undefined") { + $rootScope.unsupportedBrowser = { + browser: navigator.userAgent, + reason: "It does not support HTML local storage" + }; + } // If user auth details are not in cache, go to the login page if (!matrixService.isUserLoggedIn() && diff --git a/webclient/components/fileInput/file-input-directive.js b/webclient/components/fileInput/file-input-directive.js index 14e2f772f7..9c849a140f 100644 --- a/webclient/components/fileInput/file-input-directive.js +++ b/webclient/components/fileInput/file-input-directive.js @@ -31,13 +31,23 @@ angular.module('mFileInput', []) }, link: function(scope, element, attrs, ctrl) { - element.bind("click", function() { - element.find("input")[0].click(); - element.find("input").bind("change", function(e) { - scope.selectedFile = this.files[0]; - scope.$apply(); + + // Check if HTML5 file selection is supported + if (window.FileList) { + element.bind("click", function() { + element.find("input")[0].click(); + element.find("input").bind("change", function(e) { + scope.selectedFile = this.files[0]; + scope.$apply(); + }); }); - }); + } + else { + setTimeout(function() { + element.attr("disabled", true); + element.attr("title", "The app uses the HTML5 File API to send files. Your browser does not support it."); + }, 1); + } // Change the mouse icon on mouseover on this element element.css("cursor", "pointer"); diff --git a/webclient/components/matrix/event-handler-service.js b/webclient/components/matrix/event-handler-service.js index ee478d2eb0..21066e3d9f 100644 --- a/webclient/components/matrix/event-handler-service.js +++ b/webclient/components/matrix/event-handler-service.js @@ -27,7 +27,8 @@ Typically, this service will store events or broadcast them to any listeners if typically all the $on method would do is update its own $scope. */ angular.module('eventHandlerService', []) -.factory('eventHandlerService', ['matrixService', '$rootScope', '$q', function(matrixService, $rootScope, $q) { +.factory('eventHandlerService', ['matrixService', '$rootScope', '$q', '$timeout', 'mPresence', +function(matrixService, $rootScope, $q, $timeout, mPresence) { var ROOM_CREATE_EVENT = "ROOM_CREATE_EVENT"; var MSG_EVENT = "MSG_EVENT"; var MEMBER_EVENT = "MEMBER_EVENT"; @@ -35,21 +36,91 @@ angular.module('eventHandlerService', []) var POWERLEVEL_EVENT = "POWERLEVEL_EVENT"; var CALL_EVENT = "CALL_EVENT"; var NAME_EVENT = "NAME_EVENT"; + var TOPIC_EVENT = "TOPIC_EVENT"; + var RESET_EVENT = "RESET_EVENT"; // eventHandlerService has been resetted - var InitialSyncDeferred = $q.defer(); - - $rootScope.events = { - rooms: {} // will contain roomId: { messages:[], members:{userid1: event} } - }; + // used for dedupping events - could be expanded in future... + // FIXME: means that we leak memory over time (along with lots of the rest + // of the app, given we never try to reap memory yet) + var eventMap = {}; $rootScope.presence = {}; - var initRoom = function(room_id) { + // TODO: This is attached to the rootScope so .html can just go containsBingWord + // for determining classes so it is easy to highlight bing messages. It seems a + // bit strange to put the impl in this service though, but I can't think of a better + // file to put it in. + $rootScope.containsBingWord = function(content) { + if (!content || $.type(content) != "string") { + return false; + } + var bingWords = matrixService.config().bingWords; + var shouldBing = false; + + // case-insensitive name check for user_id OR display_name if they exist + var myUserId = matrixService.config().user_id; + if (myUserId) { + myUserId = myUserId.toLocaleLowerCase(); + } + var myDisplayName = matrixService.config().display_name; + if (myDisplayName) { + myDisplayName = myDisplayName.toLocaleLowerCase(); + } + if ( (myDisplayName && content.toLocaleLowerCase().indexOf(myDisplayName) != -1) || + (myUserId && content.toLocaleLowerCase().indexOf(myUserId) != -1) ) { + shouldBing = true; + } + + // bing word list check + if (bingWords && !shouldBing) { + for (var i=0; i eventTs) { + // ignore it, we have a newer one already. + latestData = false; + } + } + } + if (latestData) { + $rootScope.events.rooms[event.room_id][event.type] = event; + } + }; + + var handleRoomCreate = function(event, isLiveEvent) { // For now, we do not use the event data. Simply signal it to the app controllers $rootScope.$broadcast(ROOM_CREATE_EVENT, event, isLiveEvent); }; + var handleRoomAliases = function(event, isLiveEvent) { + matrixService.createRoomIdToAliasMapping(event.room_id, event.content.aliases[0]); + }; + var handleMessage = function(event, isLiveEvent) { - initRoom(event.room_id); - if (isLiveEvent) { - $rootScope.events.rooms[event.room_id].messages.push(event); + if (event.user_id === matrixService.config().user_id && + (event.content.msgtype === "m.text" || event.content.msgtype === "m.emote") ) { + // Assume we've already echoed it. So, there is a fake event in the messages list of the room + // Replace this fake event by the true one + var index = getRoomEventIndex(event.room_id, event.event_id); + if (index) { + $rootScope.events.rooms[event.room_id].messages[index] = event; + } + else { + $rootScope.events.rooms[event.room_id].messages.push(event); + } + } + else { + $rootScope.events.rooms[event.room_id].messages.push(event); + } + + if (window.Notification && event.user_id != matrixService.config().user_id) { + var shouldBing = $rootScope.containsBingWord(event.content.body); + + // Ideally we would notify only when the window is hidden (i.e. document.hidden = true). + // + // However, Chrome on Linux and OSX currently returns document.hidden = false unless the window is + // explicitly showing a different tab. So we need another metric to determine hiddenness - we + // simply use idle time. If the user has been idle enough that their presence goes to idle, then + // we also display notifs when things happen. + // + // This is far far better than notifying whenever anything happens anyway, otherwise you get spammed + // to death with notifications when the window is in the foreground, which is horrible UX (especially + // if you have not defined any bingers and so get notified for everything). + var isIdle = (document.hidden || matrixService.presence.unavailable === mPresence.getState()); + + // We need a way to let people get notifications for everything, if they so desire. The way to do this + // is to specify zero bingwords. + var bingWords = matrixService.config().bingWords; + if (bingWords === undefined || bingWords.length === 0) { + shouldBing = true; + } + + if (shouldBing && isIdle) { + console.log("Displaying notification for "+JSON.stringify(event)); + var member = $rootScope.events.rooms[event.room_id].members[event.user_id]; + var displayname = undefined; + if (member) { + displayname = member.displayname; + } + + var message = event.content.body; + if (event.content.msgtype === "m.emote") { + message = "* " + displayname + " " + message; + } + + var notification = new window.Notification( + (displayname || event.user_id) + + " (" + (matrixService.getRoomIdToAliasMapping(event.room_id) || event.room_id) + ")", // FIXME: don't leak room_ids here + { + "body": message, + "icon": member ? member.avatar_url : undefined + }); + $timeout(function() { + notification.close(); + }, 5 * 1000); + } + } } else { $rootScope.events.rooms[event.room_id].messages.unshift(event); @@ -84,21 +247,46 @@ angular.module('eventHandlerService', []) $rootScope.$broadcast(MSG_EVENT, event, isLiveEvent); }; - var handleRoomMember = function(event, isLiveEvent) { - initRoom(event.room_id); + var handleRoomMember = function(event, isLiveEvent, isStateEvent) { // add membership changes as if they were a room message if something interesting changed - if (event.content.prev !== event.content.membership) { - if (isLiveEvent) { - $rootScope.events.rooms[event.room_id].messages.push(event); + // Exception: Do not do this if the event is a room state event because such events already come + // as room messages events. Moreover, when they come as room messages events, they are relatively ordered + // with other other room messages + if (!isStateEvent) { + // could be a membership change, display name change, etc. + // Find out which one. + var memberChanges = undefined; + if (event.content.prev !== event.content.membership) { + memberChanges = "membership"; } - else { - $rootScope.events.rooms[event.room_id].messages.unshift(event); + else if (event.prev_content.displayname !== + event.content.displayname) { + memberChanges = "displayname"; + } + + // mark the key which changed + event.changedKey = memberChanges; + + // If there was a change we want to display, dump it in the message + // list. + if (memberChanges) { + if (isLiveEvent) { + $rootScope.events.rooms[event.room_id].messages.push(event); + } + else { + $rootScope.events.rooms[event.room_id].messages.unshift(event); + } } } - $rootScope.events.rooms[event.room_id].members[event.state_key] = event; - $rootScope.$broadcast(MEMBER_EVENT, event, isLiveEvent); + // Use data from state event or the latest data from the stream. + // Do not care of events that come when paginating back + if (isStateEvent || isLiveEvent) { + $rootScope.events.rooms[event.room_id].members[event.state_key] = event; + } + + $rootScope.$broadcast(MEMBER_EVENT, event, isLiveEvent, isStateEvent); }; var handlePresence = function(event, isLiveEvent) { @@ -107,8 +295,6 @@ angular.module('eventHandlerService', []) }; var handlePowerLevels = function(event, isLiveEvent) { - initRoom(event.room_id); - // Keep the latest data. Do not care of events that come when paginating back if (!$rootScope.events.rooms[event.room_id][event.type] || isLiveEvent) { $rootScope.events.rooms[event.room_id][event.type] = event; @@ -116,17 +302,48 @@ angular.module('eventHandlerService', []) } }; - var handleRoomName = function(event, isLiveEvent) { - console.log("handleRoomName " + isLiveEvent); - - initRoom(event.room_id); - - $rootScope.events.rooms[event.room_id][event.type] = event; + var handleRoomName = function(event, isLiveEvent, isStateEvent) { + console.log("handleRoomName room_id: " + event.room_id + " - isLiveEvent: " + isLiveEvent + " - name: " + event.content.name); + handleRoomDateEvent(event, isLiveEvent, !isStateEvent); $rootScope.$broadcast(NAME_EVENT, event, isLiveEvent); }; + + + var handleRoomTopic = function(event, isLiveEvent, isStateEvent) { + console.log("handleRoomTopic room_id: " + event.room_id + " - isLiveEvent: " + isLiveEvent + " - topic: " + event.content.topic); + handleRoomDateEvent(event, isLiveEvent, !isStateEvent); + $rootScope.$broadcast(TOPIC_EVENT, event, isLiveEvent); + }; var handleCallEvent = function(event, isLiveEvent) { $rootScope.$broadcast(CALL_EVENT, event, isLiveEvent); + if (event.type === 'm.call.invite') { + $rootScope.events.rooms[event.room_id].messages.push(event); + } + }; + + /** + * Get the index of the event in $rootScope.events.rooms[room_id].messages + * @param {type} room_id the room id + * @param {type} event_id the event id to look for + * @returns {Number | undefined} the index. undefined if not found. + */ + var getRoomEventIndex = function(room_id, event_id) { + var index; + + var room = $rootScope.events.rooms[room_id]; + if (room) { + // Start looking from the tail since the first goal of this function + // is to find a messaged among the latest ones + for (var i = room.messages.length - 1; i > 0; i--) { + var message = room.messages[i]; + if (event_id === message.event_id) { + index = i; + break; + } + } + } + return index; }; return { @@ -137,62 +354,206 @@ angular.module('eventHandlerService', []) POWERLEVEL_EVENT: POWERLEVEL_EVENT, CALL_EVENT: CALL_EVENT, NAME_EVENT: NAME_EVENT, + TOPIC_EVENT: TOPIC_EVENT, + RESET_EVENT: RESET_EVENT, + reset: function() { + reset(); + $rootScope.$broadcast(RESET_EVENT); + }, + + initRoom: function(room) { + initRoom(room.room_id, room); + }, - handleEvent: function(event, isLiveEvent) { - switch(event.type) { - case "m.room.create": - handleRoomCreate(event, isLiveEvent); - break; - case "m.room.message": - handleMessage(event, isLiveEvent); - break; - case "m.room.member": - handleRoomMember(event, isLiveEvent); - break; - case "m.presence": - handlePresence(event, isLiveEvent); - break; - case 'm.room.ops_levels': - case 'm.room.send_event_level': - case 'm.room.add_state_level': - case 'm.room.join_rules': - case 'm.room.power_levels': - handlePowerLevels(event, isLiveEvent); - break; - case 'm.room.name': - handleRoomName(event, isLiveEvent); - break; - default: - console.log("Unable to handle event type " + event.type); - console.log(JSON.stringify(event, undefined, 4)); - break; + handleEvent: function(event, isLiveEvent, isStateEvent) { + + // FIXME: /initialSync on a particular room is not yet available + // So initRoom on a new room is not called. Make sure the room data is initialised here + if (event.room_id) { + initRoom(event.room_id); } + + // Avoid duplicated events + // Needed for rooms where initialSync has not been done. + // In this case, we do not know where to start pagination. So, it starts from the END + // and we can have the same event (ex: joined, invitation) coming from the pagination + // AND from the event stream. + // FIXME: This workaround should be no more required when /initialSync on a particular room + // will be available (as opposite to the global /initialSync done at startup) + if (!isStateEvent) { // Do not consider state events + if (event.event_id && eventMap[event.event_id]) { + console.log("discarding duplicate event: " + JSON.stringify(event, undefined, 4)); + return; + } + else { + eventMap[event.event_id] = 1; + } + } + if (event.type.indexOf('m.call.') === 0) { handleCallEvent(event, isLiveEvent); } + else { + switch(event.type) { + case "m.room.create": + handleRoomCreate(event, isLiveEvent); + break; + case "m.room.aliases": + handleRoomAliases(event, isLiveEvent); + break; + case "m.room.message": + handleMessage(event, isLiveEvent); + break; + case "m.room.member": + handleRoomMember(event, isLiveEvent, isStateEvent); + break; + case "m.presence": + handlePresence(event, isLiveEvent); + break; + case 'm.room.ops_levels': + case 'm.room.send_event_level': + case 'm.room.add_state_level': + case 'm.room.join_rules': + case 'm.room.power_levels': + handlePowerLevels(event, isLiveEvent); + break; + case 'm.room.name': + handleRoomName(event, isLiveEvent, isStateEvent); + break; + case 'm.room.topic': + handleRoomTopic(event, isLiveEvent, isStateEvent); + break; + default: + console.log("Unable to handle event type " + event.type); + console.log(JSON.stringify(event, undefined, 4)); + break; + } + } }, // isLiveEvents determines whether notifications should be shown, whether // messages get appended to the start/end of lists, etc. - handleEvents: function(events, isLiveEvents) { + handleEvents: function(events, isLiveEvents, isStateEvents) { for (var i=0; i=0; i--) { + this.handleEvent(events[i], isLiveEvents, isLiveEvents); + } + // Store where to start pagination + $rootScope.events.rooms[room_id].pagination.earliest_token = messages.start; + } + }, + + handleInitialSyncDone: function(initialSyncData) { console.log("# handleInitialSyncDone"); - InitialSyncDeferred.resolve($rootScope.events, $rootScope.presence); + initialSyncDeferred.resolve(initialSyncData); }, // Returns a promise that resolves when the initialSync request has been processed waitForInitialSyncCompletion: function() { - return InitialSyncDeferred.promise; + return initialSyncDeferred.promise; }, resetRoomMessages: function(room_id) { resetRoomMessages(room_id); + }, + + /** + * Return the last message event of a room + * @param {String} room_id the room id + * @param {Boolean} filterFake true to not take into account fake messages + * @returns {undefined | Event} the last message event if available + */ + getLastMessage: function(room_id, filterEcho) { + var lastMessage; + + var room = $rootScope.events.rooms[room_id]; + if (room) { + for (var i = room.messages.length - 1; i >= 0; i--) { + var message = room.messages[i]; + + if (!filterEcho || undefined === message.echo_msg_state) { + lastMessage = message; + break; + } + } + } + + return lastMessage; + }, + + /** + * Compute the room users number, ie the number of members who has joined the room. + * @param {String} room_id the room id + * @returns {undefined | Number} the room users number if available + */ + getUsersCountInRoom: function(room_id) { + var memberCount; + + var room = $rootScope.events.rooms[room_id]; + if (room) { + memberCount = 0; + + for (var i in room.members) { + var member = room.members[i]; + + if ("join" === member.membership) { + memberCount = memberCount + 1; + } + } + } + + return memberCount; + }, + + /** + * Get the member object of a room member + * @param {String} room_id the room id + * @param {String} user_id the id of the user + * @returns {undefined | Object} the member object of this user in this room if he is part of the room + */ + getMember: function(room_id, user_id) { + var member; + + var room = $rootScope.events.rooms[room_id]; + if (room) { + member = room.members[user_id]; + } + return member; + }, + + setRoomVisibility: function(room_id, visible) { + if (!visible) { + return; + } + initRoom(room_id); + + var room = $rootScope.events.rooms[room_id]; + if (room) { + room.visibility = visible; + } } }; }]); diff --git a/webclient/components/matrix/event-stream-service.js b/webclient/components/matrix/event-stream-service.js index 1c0f7712b4..05469a3ded 100644 --- a/webclient/components/matrix/event-stream-service.js +++ b/webclient/components/matrix/event-stream-service.js @@ -104,14 +104,23 @@ angular.module('eventStreamService', []) settings.isActive = true; var deferred = $q.defer(); - // FIXME: We are discarding all the messages. - matrixService.rooms().then( + // Initial sync: get all information and the last 30 messages of all rooms of the user + // 30 messages should be enough to display a full page of messages in a room + // without requiring to make an additional request + matrixService.initialSync(30, false).then( function(response) { var rooms = response.data.rooms; for (var i = 0; i < rooms.length; ++i) { var room = rooms[i]; + + eventHandlerService.initRoom(room); + + if ("messages" in room) { + eventHandlerService.handleRoomMessages(room.room_id, room.messages, false); + } + if ("state" in room) { - eventHandlerService.handleEvents(room.state, false); + eventHandlerService.handleEvents(room.state, false, true); } } @@ -119,8 +128,9 @@ angular.module('eventStreamService', []) eventHandlerService.handleEvents(presence, false); // Initial sync is done - eventHandlerService.handleInitialSyncDone(); + eventHandlerService.handleInitialSyncDone(response); + // Start event streaming from that point settings.from = response.data.end; doEventStream(deferred); }, diff --git a/webclient/components/matrix/matrix-call.js b/webclient/components/matrix/matrix-call.js index 3e13e4e81f..7b5d9cffef 100644 --- a/webclient/components/matrix/matrix-call.js +++ b/webclient/components/matrix/matrix-call.js @@ -35,164 +35,319 @@ var forAllTracksOnStream = function(s, f) { forAllAudioTracksOnStream(s, f); } +navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; +window.RTCPeerConnection = window.RTCPeerConnection || window.webkitRTCPeerConnection; // but not mozRTCPeerConnection because its interface is not compatible +window.RTCSessionDescription = window.RTCSessionDescription || window.webkitRTCSessionDescription || window.mozRTCSessionDescription; +window.RTCIceCandidate = window.RTCIceCandidate || window.webkitRTCIceCandidate || window.mozRTCIceCandidate; + +// Returns true if the browser supports all required features to make WebRTC call +var isWebRTCSupported = function () { + return !!(navigator.getUserMedia || window.RTCPeerConnection || window.RTCSessionDescription || window.RTCIceCandidate); +}; + angular.module('MatrixCall', []) -.factory('MatrixCall', ['matrixService', 'matrixPhoneService', '$rootScope', function MatrixCallFactory(matrixService, matrixPhoneService, $rootScope) { +.factory('MatrixCall', ['matrixService', 'matrixPhoneService', '$rootScope', '$timeout', function MatrixCallFactory(matrixService, matrixPhoneService, $rootScope, $timeout) { + $rootScope.isWebRTCSupported = isWebRTCSupported(); + var MatrixCall = function(room_id) { this.room_id = room_id; this.call_id = "c" + new Date().getTime(); this.state = 'fledgling'; + this.didConnect = false; + + // a queue for candidates waiting to go out. We try to amalgamate candidates into a single candidate message where possible + this.candidateSendQueue = []; + this.candidateSendTries = 0; + + var self = this; + $rootScope.$watch(this.remoteVideoElement, function (oldValue, newValue) { + self.tryPlayRemoteStream(); + }); + } - navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; + MatrixCall.CALL_TIMEOUT = 60000; - window.RTCPeerConnection = window.RTCPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection; + MatrixCall.prototype.createPeerConnection = function() { + var stunServer = 'stun:stun.l.google.com:19302'; + var pc; + if (window.mozRTCPeerConnection) { + pc = new window.mozRTCPeerConnection({'url': stunServer}); + } else { + pc = new window.RTCPeerConnection({"iceServers":[{"urls":"stun:stun.l.google.com:19302"}]}); + } + var self = this; + pc.oniceconnectionstatechange = function() { self.onIceConnectionStateChanged(); }; + pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); }; + pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); }; + pc.onaddstream = function(s) { self.onAddStream(s); }; + return pc; + } - MatrixCall.prototype.placeCall = function() { - self = this; - matrixPhoneService.callPlaced(this); - navigator.getUserMedia({audio: true, video: false}, function(s) { self.gotUserMediaForInvite(s); }, function(e) { self.getUserMediaFailed(e); }); - self.state = 'wait_local_media'; + MatrixCall.prototype.getUserMediaVideoContraints = function(callType) { + switch (callType) { + case 'voice': + return ({audio: true, video: false}); + case 'video': + return ({audio: true, video: { + mandatory: { + minWidth: 640, + maxWidth: 640, + minHeight: 360, + maxHeight: 360, + } + }}); + } }; - MatrixCall.prototype.initWithInvite = function(msg) { - this.msg = msg; - this.peerConn = new window.RTCPeerConnection({"iceServers":[{"urls":"stun:stun.l.google.com:19302"}]}) - self= this; - this.peerConn.oniceconnectionstatechange = function() { self.onIceConnectionStateChanged(); }; - this.peerConn.onicecandidate = function(c) { self.gotLocalIceCandidate(c); }; - this.peerConn.onsignalingstatechange = function() { self.onSignallingStateChanged(); }; - this.peerConn.onaddstream = function(s) { self.onAddStream(s); }; - this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), self.onSetRemoteDescriptionSuccess, self.onSetRemoteDescriptionError); + MatrixCall.prototype.placeVoiceCall = function() { + this.placeCallWithConstraints(this.getUserMediaVideoContraints('voice')); + this.type = 'voice'; + }; + + MatrixCall.prototype.placeVideoCall = function(config) { + this.placeCallWithConstraints(this.getUserMediaVideoContraints('video')); + this.type = 'video'; + }; + + MatrixCall.prototype.placeCallWithConstraints = function(constraints) { + var self = this; + matrixPhoneService.callPlaced(this); + navigator.getUserMedia(constraints, function(s) { self.gotUserMediaForInvite(s); }, function(e) { self.getUserMediaFailed(e); }); + this.state = 'wait_local_media'; + this.direction = 'outbound'; + this.config = constraints; + }; + + MatrixCall.prototype.initWithInvite = function(event) { + this.msg = event.content; + this.peerConn = this.createPeerConnection(); + this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError); this.state = 'ringing'; + this.direction = 'inbound'; + + if (window.mozRTCPeerConnection) { + // firefox's RTCPeerConnection doesn't add streams until it starts getting media on them + // so we need to figure out whether a video channel has been offered by ourselves. + if (this.msg.offer.sdp.indexOf('m=video') > -1) { + this.type = 'video'; + } else { + this.type = 'voice'; + } + } + + var self = this; + $timeout(function() { + if (self.state == 'ringing') { + self.state = 'ended'; + self.hangupParty = 'remote'; // effectively + self.stopAllMedia(); + if (self.peerConn.signalingState != 'closed') self.peerConn.close(); + if (self.onHangup) self.onHangup(self); + } + }, this.msg.lifetime - event.age); + }; + + // perverse as it may seem, sometimes we want to instantiate a call with a hangup message + // (because when getting the state of the room on load, events come in reverse order and + // we want to remember that a call has been hung up) + MatrixCall.prototype.initWithHangup = function(event) { + this.msg = event.content; + this.state = 'ended'; }; MatrixCall.prototype.answer = function() { - console.trace("Answering call "+this.call_id); - self = this; - navigator.getUserMedia({audio: true, video: false}, function(s) { self.gotUserMediaForAnswer(s); }, function(e) { self.getUserMediaFailed(e); }); - this.state = 'wait_local_media'; + console.log("Answering call "+this.call_id); + + var self = this; + + var roomMembers = $rootScope.events.rooms[this.room_id].members; + if (roomMembers[matrixService.config().user_id].membership != 'join') { + console.log("We need to join the room before we can accept this call"); + matrixService.join(this.room_id).then(function() { + self.answer(); + }, function() { + console.log("Failed to join room: can't answer call!"); + self.onError("Unable to join room to answer call!"); + self.hangup(); + }); + return; + } + + if (!this.localAVStream && !this.waitForLocalAVStream) { + navigator.getUserMedia(this.getUserMediaVideoContraints(this.type), function(s) { self.gotUserMediaForAnswer(s); }, function(e) { self.getUserMediaFailed(e); }); + this.state = 'wait_local_media'; + } else if (this.localAVStream) { + this.gotUserMediaForAnswer(this.localAVStream); + } else if (this.waitForLocalAVStream) { + this.state = 'wait_local_media'; + } }; MatrixCall.prototype.stopAllMedia = function() { if (this.localAVStream) { forAllTracksOnStream(this.localAVStream, function(t) { - t.stop(); + if (t.stop) t.stop(); }); } if (this.remoteAVStream) { forAllTracksOnStream(this.remoteAVStream, function(t) { - t.stop(); + if (t.stop) t.stop(); }); } }; - MatrixCall.prototype.hangup = function() { - console.trace("Ending call "+this.call_id); + MatrixCall.prototype.hangup = function(reason, suppressEvent) { + console.log("Ending call "+this.call_id); + + // pausing now keeps the last frame (ish) of the video call in the video element + // rather than it just turning black straight away + if (this.remoteVideoElement) this.remoteVideoElement.pause(); + if (this.localVideoElement) this.localVideoElement.pause(); this.stopAllMedia(); + if (this.peerConn) this.peerConn.close(); + + this.hangupParty = 'local'; + this.hangupReason = reason; var content = { version: 0, call_id: this.call_id, + reason: reason }; - matrixService.sendEvent(this.room_id, 'm.call.hangup', undefined, content).then(this.messageSent, this.messageSendFailed); + this.sendEventWithRetry('m.call.hangup', content); this.state = 'ended'; + if (this.onHangup && !suppressEvent) this.onHangup(this); }; MatrixCall.prototype.gotUserMediaForInvite = function(stream) { + if (this.successor) { + this.successor.gotUserMediaForAnswer(stream); + return; + } + if (this.state == 'ended') return; + + if (this.localVideoElement && this.type == 'video') { + var vidTrack = stream.getVideoTracks()[0]; + this.localVideoElement.src = URL.createObjectURL(stream); + this.localVideoElement.muted = true; + this.localVideoElement.play(); + } + this.localAVStream = stream; var audioTracks = stream.getAudioTracks(); for (var i = 0; i < audioTracks.length; i++) { audioTracks[i].enabled = true; } - this.peerConn = new window.RTCPeerConnection({"iceServers":[{"urls":"stun:stun.l.google.com:19302"}]}) - self = this; - this.peerConn.oniceconnectionstatechange = function() { self.onIceConnectionStateChanged(); }; - this.peerConn.onsignalingstatechange = function() { self.onSignallingStateChanged(); }; - this.peerConn.onicecandidate = function(c) { self.gotLocalIceCandidate(c); }; - this.peerConn.onaddstream = function(s) { self.onAddStream(s); }; + this.peerConn = this.createPeerConnection(); this.peerConn.addStream(stream); + var self = this; this.peerConn.createOffer(function(d) { self.gotLocalOffer(d); }, function(e) { self.getLocalOfferFailed(e); }); - this.state = 'create_offer'; + $rootScope.$apply(function() { + self.state = 'create_offer'; + }); }; MatrixCall.prototype.gotUserMediaForAnswer = function(stream) { + if (this.state == 'ended') return; + + if (this.localVideoElement && this.type == 'video') { + var vidTrack = stream.getVideoTracks()[0]; + this.localVideoElement.src = URL.createObjectURL(stream); + this.localVideoElement.muted = true; + this.localVideoElement.play(); + } + this.localAVStream = stream; var audioTracks = stream.getAudioTracks(); for (var i = 0; i < audioTracks.length; i++) { audioTracks[i].enabled = true; } this.peerConn.addStream(stream); - self = this; + var self = this; var constraints = { 'mandatory': { 'OfferToReceiveAudio': true, - 'OfferToReceiveVideo': false + 'OfferToReceiveVideo': this.type == 'video' }, }; this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints); - this.state = 'create_answer'; + // This can't be in an apply() because it's called by a predecessor call under glare conditions :( + self.state = 'create_answer'; }; MatrixCall.prototype.gotLocalIceCandidate = function(event) { - console.trace(event); if (event.candidate) { - var content = { - version: 0, - call_id: this.call_id, - candidate: event.candidate - }; - matrixService.sendEvent(this.room_id, 'm.call.candidate', undefined, content).then(this.messageSent, this.messageSendFailed); + console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate); + this.sendCandidate(event.candidate); } } MatrixCall.prototype.gotRemoteIceCandidate = function(cand) { - console.trace("Got ICE candidate from remote: "+cand); - var candidateObject = new RTCIceCandidate({ - sdpMLineIndex: cand.label, - candidate: cand.candidate - }); - this.peerConn.addIceCandidate(candidateObject, function() {}, function(e) {}); + console.log("Got remote ICE "+cand.sdpMid+" candidate: "+cand.candidate); + if (this.state == 'ended') { + console.log("Ignoring remote ICE candidate because call has ended"); + return; + } + this.peerConn.addIceCandidate(new RTCIceCandidate(cand), function() {}, function(e) {}); }; MatrixCall.prototype.receivedAnswer = function(msg) { - this.peerConn.setRemoteDescription(new RTCSessionDescription(msg.answer), self.onSetRemoteDescriptionSuccess, self.onSetRemoteDescriptionError); + if (this.state == 'ended') return; + + this.peerConn.setRemoteDescription(new RTCSessionDescription(msg.answer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError); this.state = 'connecting'; }; + MatrixCall.prototype.gotLocalOffer = function(description) { - console.trace("Created offer: "+description); + console.log("Created offer: "+description); + + if (this.state == 'ended') { + console.log("Ignoring newly created offer on call ID "+this.call_id+" because the call has ended"); + return; + } + this.peerConn.setLocalDescription(description); var content = { version: 0, call_id: this.call_id, - offer: description + offer: description, + lifetime: MatrixCall.CALL_TIMEOUT }; - matrixService.sendEvent(this.room_id, 'm.call.invite', undefined, content).then(this.messageSent, this.messageSendFailed); - this.state = 'invite_sent'; + this.sendEventWithRetry('m.call.invite', content); + + var self = this; + $timeout(function() { + if (self.state == 'invite_sent') { + self.hangup('invite_timeout'); + } + }, MatrixCall.CALL_TIMEOUT); + + $rootScope.$apply(function() { + self.state = 'invite_sent'; + }); }; MatrixCall.prototype.createdAnswer = function(description) { - console.trace("Created answer: "+description); + console.log("Created answer: "+description); this.peerConn.setLocalDescription(description); var content = { version: 0, call_id: this.call_id, answer: description }; - matrixService.sendEvent(this.room_id, 'm.call.answer', undefined, content).then(this.messageSent, this.messageSendFailed); - this.state = 'connecting'; - }; - - MatrixCall.prototype.messageSent = function() { - }; - - MatrixCall.prototype.messageSendFailed = function(error) { + this.sendEventWithRetry('m.call.answer', content); + var self = this; + $rootScope.$apply(function() { + self.state = 'connecting'; + }); }; MatrixCall.prototype.getLocalOfferFailed = function(error) { @@ -200,37 +355,52 @@ angular.module('MatrixCall', []) }; MatrixCall.prototype.getUserMediaFailed = function() { - this.onError("Couldn't start capturing audio! Is your microphone set up?"); + this.onError("Couldn't start capturing! Is your microphone set up?"); + this.hangup(); }; MatrixCall.prototype.onIceConnectionStateChanged = function() { - console.trace("Ice connection state changed to: "+this.peerConn.iceConnectionState); + if (this.state == 'ended') return; // because ICE can still complete as we're ending the call + console.log("Ice connection state changed to: "+this.peerConn.iceConnectionState); // ideally we'd consider the call to be connected when we get media but chrome doesn't implement nay of the 'onstarted' events yet if (this.peerConn.iceConnectionState == 'completed' || this.peerConn.iceConnectionState == 'connected') { - this.state = 'connected'; - $rootScope.$apply(); + var self = this; + $rootScope.$apply(function() { + self.state = 'connected'; + self.didConnect = true; + }); + } else if (this.peerConn.iceConnectionState == 'failed') { + this.hangup('ice_failed'); } }; MatrixCall.prototype.onSignallingStateChanged = function() { - console.trace("Signalling state changed to: "+this.peerConn.signalingState); + console.log("call "+this.call_id+": Signalling state changed to: "+this.peerConn.signalingState); }; MatrixCall.prototype.onSetRemoteDescriptionSuccess = function() { - console.trace("Set remote description"); + console.log("Set remote description"); }; MatrixCall.prototype.onSetRemoteDescriptionError = function(e) { - console.trace("Failed to set remote description"+e); + console.log("Failed to set remote description"+e); }; MatrixCall.prototype.onAddStream = function(event) { - console.trace("Stream added"+event); + console.log("Stream added"+event); var s = event.stream; this.remoteAVStream = s; + if (this.direction == 'inbound') { + if (s.getVideoTracks().length > 0) { + this.type = 'video'; + } else { + this.type = 'voice'; + } + } + var self = this; forAllTracksOnStream(s, function(t) { // not currently implemented in chrome @@ -240,29 +410,145 @@ angular.module('MatrixCall', []) event.stream.onended = function(e) { self.onRemoteStreamEnded(e); }; // not currently implemented in chrome event.stream.onstarted = function(e) { self.onRemoteStreamStarted(e); }; - var player = new Audio(); - player.src = URL.createObjectURL(s); - player.play(); + + this.tryPlayRemoteStream(); + }; + + MatrixCall.prototype.tryPlayRemoteStream = function(event) { + if (this.remoteVideoElement && this.remoteAVStream) { + var player = this.remoteVideoElement; + player.src = URL.createObjectURL(this.remoteAVStream); + player.play(); + } }; MatrixCall.prototype.onRemoteStreamStarted = function(event) { - this.state = 'connected'; + var self = this; + $rootScope.$apply(function() { + self.state = 'connected'; + }); }; MatrixCall.prototype.onRemoteStreamEnded = function(event) { - this.state = 'ended'; - this.stopAllMedia(); - this.onHangup(); + console.log("Remote stream ended"); + var self = this; + $rootScope.$apply(function() { + self.state = 'ended'; + self.hangupParty = 'remote'; + self.stopAllMedia(); + if (self.peerConn.signalingState != 'closed') self.peerConn.close(); + if (self.onHangup) self.onHangup(self); + }); }; MatrixCall.prototype.onRemoteStreamTrackStarted = function(event) { - this.state = 'connected'; + var self = this; + $rootScope.$apply(function() { + self.state = 'connected'; + }); }; - MatrixCall.prototype.onHangupReceived = function() { + MatrixCall.prototype.onHangupReceived = function(msg) { + console.log("Hangup received"); + if (this.remoteVideoElement) this.remoteVideoElement.pause(); + if (this.localVideoElement) this.localVideoElement.pause(); this.state = 'ended'; + this.hangupParty = 'remote'; + this.hangupReason = msg.reason; this.stopAllMedia(); - this.onHangup(); + if (this.peerConn && this.peerConn.signalingState != 'closed') this.peerConn.close(); + if (this.onHangup) this.onHangup(this); + }; + + MatrixCall.prototype.replacedBy = function(newCall) { + console.log(this.call_id+" being replaced by "+newCall.call_id); + if (this.state == 'wait_local_media') { + console.log("Telling new call to wait for local media"); + newCall.waitForLocalAVStream = true; + } else if (this.state == 'create_offer') { + console.log("Handing local stream to new call"); + newCall.gotUserMediaForAnswer(this.localAVStream); + delete(this.localAVStream); + } else if (this.state == 'invite_sent') { + console.log("Handing local stream to new call"); + newCall.gotUserMediaForAnswer(this.localAVStream); + delete(this.localAVStream); + } + newCall.localVideoElement = this.localVideoElement; + newCall.remoteVideoElement = this.remoteVideoElement; + this.successor = newCall; + this.hangup(true); + }; + + MatrixCall.prototype.sendEventWithRetry = function(evType, content) { + var ev = { type:evType, content:content, tries:1 }; + var self = this; + matrixService.sendEvent(this.room_id, evType, undefined, content).then(this.eventSent, function(error) { self.eventSendFailed(ev, error); } ); + }; + + MatrixCall.prototype.eventSent = function() { + }; + + MatrixCall.prototype.eventSendFailed = function(ev, error) { + if (ev.tries > 5) { + console.log("Failed to send event of type "+ev.type+" on attempt "+ev.tries+". Giving up."); + return; + } + var delayMs = 500 * Math.pow(2, ev.tries); + console.log("Failed to send event of type "+ev.type+". Retrying in "+delayMs+"ms"); + ++ev.tries; + var self = this; + $timeout(function() { + matrixService.sendEvent(self.room_id, ev.type, undefined, ev.content).then(self.eventSent, function(error) { self.eventSendFailed(ev, error); } ); + }, delayMs); + }; + + // Sends candidates with are sent in a special way because we try to amalgamate them into one message + MatrixCall.prototype.sendCandidate = function(content) { + this.candidateSendQueue.push(content); + var self = this; + if (this.candidateSendTries == 0) $timeout(function() { self.sendCandidateQueue(); }, 100); + }; + + MatrixCall.prototype.sendCandidateQueue = function(content) { + if (this.candidateSendQueue.length == 0) return; + + var cands = this.candidateSendQueue; + this.candidateSendQueue = []; + ++this.candidateSendTries; + var content = { + version: 0, + call_id: this.call_id, + candidates: cands + }; + var self = this; + console.log("Attempting to send "+cands.length+" candidates"); + matrixService.sendEvent(self.room_id, 'm.call.candidates', undefined, content).then(function() { self.candsSent(); }, function(error) { self.candsSendFailed(cands, error); } ); + }; + + MatrixCall.prototype.candsSent = function() { + this.candidateSendTries = 0; + this.sendCandidateQueue(); + }; + + MatrixCall.prototype.candsSendFailed = function(cands, error) { + for (var i = 0; i < cands.length; ++i) { + this.candidateSendQueue.push(cands[i]); + } + + if (this.candidateSendTries > 5) { + console.log("Failed to send candidates on attempt "+this.candidateSendTries+". Giving up for now."); + this.candidateSendTries = 0; + return; + } + + var delayMs = 500 * Math.pow(2, this.candidateSendTries); + ++this.candidateSendTries; + console.log("Failed to send candidates. Retrying in "+delayMs+"ms"); + var self = this; + $timeout(function() { + self.sendCandidateQueue(); + }, delayMs); }; return MatrixCall; diff --git a/webclient/components/matrix/matrix-filter.js b/webclient/components/matrix/matrix-filter.js index 0922684e3b..328e3a7086 100644 --- a/webclient/components/matrix/matrix-filter.js +++ b/webclient/components/matrix/matrix-filter.js @@ -26,73 +26,101 @@ angular.module('matrixFilter', []) // If there is an alias, use it // TODO: only one alias is managed for now var alias = matrixService.getRoomIdToAliasMapping(room_id); - if (alias) { - roomName = alias; - } - if (undefined === roomName) { - // Else, build the name from its users - var room = $rootScope.events.rooms[room_id]; - if (room) { - var room_name_event = room["m.room.name"]; + var room = $rootScope.events.rooms[room_id]; + if (room) { + // Get name from room state date + var room_name_event = room["m.room.name"]; + if (room_name_event) { + roomName = room_name_event.content.name; + } + else if (alias) { + roomName = alias; + } + else if (room.members) { - if (room_name_event) { - roomName = room_name_event.content.name; - } - else if (room.members) { - // Limit the room renaming to 1:1 room - if (2 === Object.keys(room.members).length) { - for (var i in room.members) { - var member = room.members[i]; - if (member.state_key !== matrixService.config().user_id) { + var user_id = matrixService.config().user_id; - if (member.state_key in $rootScope.presence) { - // If the user is available in presence, use the displayname there - // as it is the most uptodate - roomName = $rootScope.presence[member.state_key].content.displayname; - } - else if (member.content.displayname) { - roomName = member.content.displayname; - } - else { - roomName = member.state_key; - } + // Else, build the name from its users + // Limit the room renaming to 1:1 room + if (2 === Object.keys(room.members).length) { + for (var i in room.members) { + var member = room.members[i]; + if (member.state_key !== user_id) { + + if (member.state_key in $rootScope.presence) { + // If the user is available in presence, use the displayname there + // as it is the most uptodate + roomName = $rootScope.presence[member.state_key].content.displayname; + } + else if (member.content.displayname) { + roomName = member.content.displayname; + } + else { + roomName = member.state_key; } } } - else if (1 === Object.keys(room.members).length) { + } + else if (1 === Object.keys(room.members).length) { + var otherUserId; + + if (Object.keys(room.members)[0] !== user_id) { + otherUserId = Object.keys(room.members)[0]; + } + else { // The other member may be in the invite list, get all invited users var invitedUserIDs = []; for (var i in room.messages) { var message = room.messages[i]; if ("m.room.member" === message.type && "invite" === message.membership) { - // Make sure there is no duplicate user - if (-1 === invitedUserIDs.indexOf(message.state_key)) { - invitedUserIDs.push(message.state_key); + // Filter out the current user + var member_id = message.state_key; + if (member_id === user_id) { + member_id = message.user_id; + } + if (member_id !== user_id) { + // Make sure there is no duplicate user + if (-1 === invitedUserIDs.indexOf(member_id)) { + invitedUserIDs.push(member_id); + } } } } - + // For now, only 1:1 room needs to be renamed. It means only 1 invited user if (1 === invitedUserIDs.length) { - var userID = invitedUserIDs[0]; - - // Try to resolve his displayname in presence global data - if (userID in $rootScope.presence) { - roomName = $rootScope.presence[userID].content.displayname; - } - else { - roomName = userID; - } + otherUserId = invitedUserIDs[0]; } } + + // Try to resolve his displayname in presence global data + if (otherUserId in $rootScope.presence) { + roomName = $rootScope.presence[otherUserId].content.displayname; + } + else { + roomName = otherUserId; + } } } } + // Always show the alias in the room displayed name + if (roomName && alias && alias !== roomName) { + roomName += " (" + alias + ")"; + } + if (undefined === roomName) { // By default, use the room ID roomName = room_id; + + // XXX: this is *INCREDIBLY* heavy logging for a function that calls every single + // time any kind of digest runs which refreshes a room name... + // commenting it out for now. + + // Log some information that lead to this leak + // console.log("Room ID leak for " + room_id); + // console.log("room object: " + JSON.stringify(room, undefined, 4)); } return roomName; @@ -120,7 +148,9 @@ angular.module('matrixFilter', []) var room = $rootScope.events.rooms[room_id]; if (room && (user_id in room.members)) { var member = room.members[user_id]; - displayName = member.content.displayname; + if (member.content.displayname) { + displayName = member.content.displayname; + } } } diff --git a/webclient/components/matrix/matrix-phone-service.js b/webclient/components/matrix/matrix-phone-service.js index ca86b473e7..06465ed821 100644 --- a/webclient/components/matrix/matrix-phone-service.js +++ b/webclient/components/matrix/matrix-phone-service.js @@ -22,45 +22,132 @@ angular.module('matrixPhoneService', []) }; matrixPhoneService.INCOMING_CALL_EVENT = "INCOMING_CALL_EVENT"; + matrixPhoneService.REPLACED_CALL_EVENT = "REPLACED_CALL_EVENT"; matrixPhoneService.allCalls = {}; + // a place to save candidates that come in for calls we haven't got invites for yet (when paginating backwards) + matrixPhoneService.candidatesByCall = {}; matrixPhoneService.callPlaced = function(call) { matrixPhoneService.allCalls[call.call_id] = call; }; $rootScope.$on(eventHandlerService.CALL_EVENT, function(ngEvent, event, isLive) { - if (!isLive) return; // until matrix supports expiring messages if (event.user_id == matrixService.config().user_id) return; + var msg = event.content; + if (event.type == 'm.call.invite') { + if (event.age == undefined || msg.lifetime == undefined) { + // if the event doesn't have either an age (the HS is too old) or a lifetime + // (the sending client was too old when it sent it) then fall back to old behaviour + if (!isLive) return; // until matrix supports expiring messages + } + + if (event.age > msg.lifetime) { + console.log("Ignoring expired call event of type "+event.type); + return; + } + + var call = undefined; + if (!isLive) { + // if this event wasn't live then this call may already be over + call = matrixPhoneService.allCalls[msg.call_id]; + if (call && call.state == 'ended') { + return; + } + } + var MatrixCall = $injector.get('MatrixCall'); var call = new MatrixCall(event.room_id); + + if (!isWebRTCSupported()) { + console.log("Incoming call ID "+msg.call_id+" but this browser doesn't support WebRTC"); + // don't hang up the call: there could be other clients connected that do support WebRTC and declining the + // the call on their behalf would be really annoying. + // instead, we broadcast a fake call event with a non-functional call object + $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call); + return; + } + call.call_id = msg.call_id; - call.initWithInvite(msg); + call.initWithInvite(event); matrixPhoneService.allCalls[call.call_id] = call; - $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call); + + // if we stashed candidate events for that call ID, play them back now + if (!isLive && matrixPhoneService.candidatesByCall[call.call_id] != undefined) { + for (var i = 0; i < matrixPhoneService.candidatesByCall[call.call_id].length; ++i) { + call.gotRemoteIceCandidate(matrixPhoneService.candidatesByCall[call.call_id][i]); + } + } + + // Were we trying to call that user (room)? + var existingCall; + var callIds = Object.keys(matrixPhoneService.allCalls); + for (var i = 0; i < callIds.length; ++i) { + var thisCallId = callIds[i]; + var thisCall = matrixPhoneService.allCalls[thisCallId]; + + if (call.room_id == thisCall.room_id && thisCall.direction == 'outbound' + && (thisCall.state == 'wait_local_media' || thisCall.state == 'create_offer' || thisCall.state == 'invite_sent')) { + existingCall = thisCall; + break; + } + } + + if (existingCall) { + // If we've only got to wait_local_media or create_offer and we've got an invite, + // pick the incoming call because we know we haven't sent our invite yet + // otherwise, pick whichever call has the lowest call ID (by string comparison) + if (existingCall.state == 'wait_local_media' || existingCall.state == 'create_offer' || existingCall.call_id > call.call_id) { + console.log("Glare detected: answering incoming call "+call.call_id+" and canceling outgoing call "+existingCall.call_id); + existingCall.replacedBy(call); + call.answer(); + $rootScope.$broadcast(matrixPhoneService.REPLACED_CALL_EVENT, existingCall, call); + } else { + console.log("Glare detected: rejecting incoming call "+call.call_id+" and keeping outgoing call "+existingCall.call_id); + call.hangup(); + } + } else { + $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call); + } } else if (event.type == 'm.call.answer') { var call = matrixPhoneService.allCalls[msg.call_id]; if (!call) { - console.trace("Got answer for unknown call ID "+msg.call_id); + console.log("Got answer for unknown call ID "+msg.call_id); return; } call.receivedAnswer(msg); - } else if (event.type == 'm.call.candidate') { + } else if (event.type == 'm.call.candidates') { var call = matrixPhoneService.allCalls[msg.call_id]; - if (!call) { - console.trace("Got candidate for unknown call ID "+msg.call_id); + if (!call && isLive) { + console.log("Got candidates for unknown call ID "+msg.call_id); return; + } else if (!call) { + if (matrixPhoneService.candidatesByCall[msg.call_id] == undefined) { + matrixPhoneService.candidatesByCall[msg.call_id] = []; + } + matrixPhoneService.candidatesByCall[msg.call_id] = matrixPhoneService.candidatesByCall[msg.call_id].concat(msg.candidates); + } else { + for (var i = 0; i < msg.candidates.length; ++i) { + call.gotRemoteIceCandidate(msg.candidates[i]); + } } - call.gotRemoteIceCandidate(msg.candidate); } else if (event.type == 'm.call.hangup') { var call = matrixPhoneService.allCalls[msg.call_id]; - if (!call) { - console.trace("Got hangup for unknown call ID "+msg.call_id); - return; + if (!call && isLive) { + console.log("Got hangup for unknown call ID "+msg.call_id); + } else if (!call) { + // if not live, store the fact that the call has ended because we're probably getting events backwards so + // the hangup will come before the invite + var MatrixCall = $injector.get('MatrixCall'); + var call = new MatrixCall(event.room_id); + call.call_id = msg.call_id; + call.initWithHangup(event); + matrixPhoneService.allCalls[msg.call_id] = call; + } else { + call.onHangupReceived(msg); + delete(matrixPhoneService.allCalls[msg.call_id]); } - call.onHangupReceived(); - matrixPhoneService.allCalls[msg.call_id] = undefined; } }); diff --git a/webclient/components/matrix/matrix-service.js b/webclient/components/matrix/matrix-service.js index 436ff5462a..069e02e939 100644 --- a/webclient/components/matrix/matrix-service.js +++ b/webclient/components/matrix/matrix-service.js @@ -36,6 +36,9 @@ angular.module('matrixService', []) */ var config; + var roomIdToAlias = {}; + var aliasToRoomId = {}; + // Current version of permanent storage var configVersion = 0; var prefixPath = "/_matrix/client/api/v1"; @@ -78,21 +81,155 @@ angular.module('matrixService', []) return $http(request); }; + + var doRegisterLogin = function(path, loginType, sessionId, userName, password, threepidCreds) { + var data = {}; + if (loginType === "m.login.recaptcha") { + var challengeToken = Recaptcha.get_challenge(); + var captchaEntry = Recaptcha.get_response(); + data = { + type: "m.login.recaptcha", + challenge: challengeToken, + response: captchaEntry + }; + } + else if (loginType === "m.login.email.identity") { + data = { + threepidCreds: threepidCreds + }; + } + else if (loginType === "m.login.password") { + data = { + user: userName, + password: password + }; + } + + if (sessionId) { + data.session = sessionId; + } + data.type = loginType; + console.log("doRegisterLogin >>> " + loginType); + return doRequest("POST", path, undefined, data); + }; return { /****** Home server API ******/ prefix: prefixPath, // Register an user - register: function(user_name, password, threepidCreds) { - // The REST path spec + register: function(user_name, password, threepidCreds, useCaptcha) { + // registration is composed of multiple requests, to check you can + // register, then to actually register. This deferred will fire when + // all the requests are done, along with the final response. + var deferred = $q.defer(); var path = "/register"; - - return doRequest("POST", path, undefined, { - user_id: user_name, - password: password, - threepidCreds: threepidCreds - }); + + // check we can actually register with this HS. + doRequest("GET", path, undefined, undefined).then( + function(response) { + console.log("/register [1] : "+JSON.stringify(response)); + var flows = response.data.flows; + var knownTypes = [ + "m.login.password", + "m.login.recaptcha", + "m.login.email.identity" + ]; + // if they entered 3pid creds, we want to use a flow which uses it. + var useThreePidFlow = threepidCreds != undefined; + var flowIndex = 0; + var firstRegType = undefined; + + for (var i=0; iPublic rooms - + + + + + + + + + + +
+ + {{ room.room_display_name }} + + +
+ {{ room.num_joined_members }} {{ room.num_joined_members == 1 ? 'user' : 'users' }} +
+
+ {{ room.topic }} +

diff --git a/webclient/img/green_phone.png b/webclient/img/green_phone.png new file mode 100644 index 0000000000..28807c749b Binary files /dev/null and b/webclient/img/green_phone.png differ diff --git a/webclient/img/red_phone.png b/webclient/img/red_phone.png new file mode 100644 index 0000000000..11fc44940c Binary files /dev/null and b/webclient/img/red_phone.png differ diff --git a/webclient/index.html b/webclient/index.html index 91b6bf27be..411c2762d3 100644 --- a/webclient/index.html +++ b/webclient/index.html @@ -10,12 +10,16 @@ - + + + + + @@ -41,23 +45,63 @@ +
+
+
+ + +
+