From 9927170787f9d467907b8767fea4d6812fb48f59 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 29 Apr 2015 15:57:09 +0100 Subject: [PATCH 01/33] Accept camelcase + underscores in binding too --- synapse/handlers/identity.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index cbb3184ca5..6200e10775 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -87,14 +87,29 @@ class IdentityHandler(BaseHandler): logger.debug("binding threepid %r to %s", creds, mxid) http_client = SimpleHttpClient(self.hs) data = None + + if 'id_server' in creds: + id_server = creds['id_server'] + elif 'idServer' in creds: + id_server = creds['idServer'] + else: + raise SynapseError(400, "No id_server in creds") + + if 'client_secret' in creds: + client_secret = creds['client_secret'] + elif 'clientSecret' in creds: + client_secret = creds['clientSecret'] + else: + raise SynapseError(400, "No client_secret in creds") + try: data = yield http_client.post_urlencoded_get_json( "https://%s%s" % ( - creds['id_server'], "/_matrix/identity/api/v1/3pid/bind" + id_server, "/_matrix/identity/api/v1/3pid/bind" ), { 'sid': creds['sid'], - 'client_secret': creds['client_secret'], + 'client_secret': client_secret, 'mxid': mxid, } ) From d76c058eea60ffc63ff7427e1de1e142a4b5b188 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 16:30:25 +0100 Subject: [PATCH 02/33] Fix invalid SQL to work in postgres land --- synapse/storage/appservice.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 63d1af4e86..bd285a6699 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -442,14 +442,16 @@ class ApplicationServiceTransactionStore(SQLBaseStore): # Monotonically increasing txn ids, so just select the smallest # one in the txns table (we delete them when they are sent) result = txn.execute( - "SELECT MIN(txn_id), * FROM application_services_txns WHERE as_id=?", + "SELECT * FROM application_services_txns WHERE as_id=?" + " ORDER BY txn_id ASC LIMIT 1", (service.id,) ) - entry = self.cursor_to_dict(result)[0] - if not entry or entry["txn_id"] is None: - # the min(txn_id) part will force a row, so entry may not be None + rows = self.cursor_to_dict(result) + if not rows: return None + entry = rows[0] + event_ids = json.loads(entry["event_ids"]) events = self._get_events_txn(txn, event_ids) From 884fb88e286008a1a8fef902cec928fad4a9cac9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 16:35:20 +0100 Subject: [PATCH 03/33] txn.execute doesn't return cursors --- synapse/storage/appservice.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index bd285a6699..e133cf5550 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -441,12 +441,12 @@ class ApplicationServiceTransactionStore(SQLBaseStore): def _get_oldest_unsent_txn(self, txn, service): # Monotonically increasing txn ids, so just select the smallest # one in the txns table (we delete them when they are sent) - result = txn.execute( + txn.execute( "SELECT * FROM application_services_txns WHERE as_id=?" " ORDER BY txn_id ASC LIMIT 1", (service.id,) ) - rows = self.cursor_to_dict(result) + rows = self.cursor_to_dict(txn) if not rows: return None From 0337eaf321a8519264f25a7ad14ee2e162a535b4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 16:43:39 +0100 Subject: [PATCH 04/33] txn.execute doesn't return cursors --- synapse/storage/appservice.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index e133cf5550..39b7881c40 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -355,11 +355,11 @@ class ApplicationServiceTransactionStore(SQLBaseStore): # being sent) last_txn_id = self._get_last_txn(txn, service.id) - result = txn.execute( + txn.execute( "SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?", (service.id,) ) - highest_txn_id = result.fetchone()[0] + highest_txn_id = txn.fetchone()[0] if highest_txn_id is None: highest_txn_id = 0 @@ -460,11 +460,11 @@ class ApplicationServiceTransactionStore(SQLBaseStore): ) def _get_last_txn(self, txn, service_id): - result = txn.execute( + txn.execute( "SELECT last_txn FROM application_services_state WHERE as_id=?", (service_id,) ) - last_txn_id = result.fetchone() + last_txn_id = txn.fetchone() if last_txn_id is None or last_txn_id[0] is None: # no row exists return 0 else: From 4669def000fbf16da94055c4f9cbc0d705259ba2 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 29 Apr 2015 16:49:44 +0100 Subject: [PATCH 05/33] Oops, forgot the schema delta file --- synapse/storage/schema/delta/17/user_threepids.sql | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 synapse/storage/schema/delta/17/user_threepids.sql diff --git a/synapse/storage/schema/delta/17/user_threepids.sql b/synapse/storage/schema/delta/17/user_threepids.sql new file mode 100644 index 0000000000..fa16589019 --- /dev/null +++ b/synapse/storage/schema/delta/17/user_threepids.sql @@ -0,0 +1,10 @@ +CREATE TABLE user_threepids ( + id INTEGER PRIMARY KEY NOT NULL, + user TEXT NOT NULL, + medium TEXT NOT NULL, + address TEXT NOT NULL, + validated_at INTEGER NOT NULL, + added_at INTEGER NOT NULL, + CONSTRAINT user_medium_address UNIQUE (user, medium, address) ON CONFLICT REPLACE +); +CREATE INDEX user_threepids_user ON user_threepids(user); From fb95035a6534d5a16776242b5f31ceef567ca2cc Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 29 Apr 2015 16:53:41 +0100 Subject: [PATCH 06/33] Be postgressive --- synapse/storage/schema/delta/17/user_threepids.sql | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/synapse/storage/schema/delta/17/user_threepids.sql b/synapse/storage/schema/delta/17/user_threepids.sql index fa16589019..bbb472ee8a 100644 --- a/synapse/storage/schema/delta/17/user_threepids.sql +++ b/synapse/storage/schema/delta/17/user_threepids.sql @@ -1,10 +1,9 @@ CREATE TABLE user_threepids ( - id INTEGER PRIMARY KEY NOT NULL, user TEXT NOT NULL, medium TEXT NOT NULL, address TEXT NOT NULL, - validated_at INTEGER NOT NULL, - added_at INTEGER NOT NULL, - CONSTRAINT user_medium_address UNIQUE (user, medium, address) ON CONFLICT REPLACE + validated_at BIGINT NOT NULL, + added_at BIGINT NOT NULL, + CONSTRAINT user_medium_address UNIQUE (user, medium, address) ); CREATE INDEX user_threepids_user ON user_threepids(user); From 61cd5d9045b767e2c1ef4e48e67ed0c70e1092fe Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 29 Apr 2015 16:57:14 +0100 Subject: [PATCH 07/33] Be more postgressive --- synapse/storage/registration.py | 4 ++-- synapse/storage/schema/delta/17/user_threepids.sql | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index a986c4816e..026ba217d6 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -181,7 +181,7 @@ class RegistrationStore(SQLBaseStore): @defer.inlineCallbacks def user_add_threepid(self, user_id, medium, address, validated_at, added_at): yield self._simple_upsert("user_threepids", { - "user": user_id, + "user_id": user_id, "medium": medium, "address": address, }, { @@ -193,7 +193,7 @@ class RegistrationStore(SQLBaseStore): def user_get_threepids(self, user_id): ret = yield self._simple_select_list( "user_threepids", { - "user": user_id + "user_id": user_id }, ['medium', 'address', 'validated_at', 'added_at'], 'user_get_threepids' diff --git a/synapse/storage/schema/delta/17/user_threepids.sql b/synapse/storage/schema/delta/17/user_threepids.sql index bbb472ee8a..f3d1efe4e0 100644 --- a/synapse/storage/schema/delta/17/user_threepids.sql +++ b/synapse/storage/schema/delta/17/user_threepids.sql @@ -1,9 +1,9 @@ CREATE TABLE user_threepids ( - user TEXT NOT NULL, + user_id TEXT NOT NULL, medium TEXT NOT NULL, address TEXT NOT NULL, validated_at BIGINT NOT NULL, added_at BIGINT NOT NULL, CONSTRAINT user_medium_address UNIQUE (user, medium, address) ); -CREATE INDEX user_threepids_user ON user_threepids(user); +CREATE INDEX user_threepids_user_id ON user_threepids(user_id); From f8c30faf256bb47c6725b7752e36e49c519d1adc Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 29 Apr 2015 16:58:42 +0100 Subject: [PATCH 08/33] Oops, update the contraint too --- synapse/storage/schema/delta/17/user_threepids.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/17/user_threepids.sql b/synapse/storage/schema/delta/17/user_threepids.sql index f3d1efe4e0..c17715ac80 100644 --- a/synapse/storage/schema/delta/17/user_threepids.sql +++ b/synapse/storage/schema/delta/17/user_threepids.sql @@ -4,6 +4,6 @@ CREATE TABLE user_threepids ( address TEXT NOT NULL, validated_at BIGINT NOT NULL, added_at BIGINT NOT NULL, - CONSTRAINT user_medium_address UNIQUE (user, medium, address) + CONSTRAINT user_medium_address UNIQUE (user_id, medium, address) ); CREATE INDEX user_threepids_user_id ON user_threepids(user_id); From 12d381bd5da730d9d4fb2aebcc9fb3d83f9456a7 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 29 Apr 2015 17:13:51 +0100 Subject: [PATCH 09/33] Decode the data json in the storage layer (was moved but this part was missed) --- synapse/push/pusherpool.py | 6 +++++- synapse/storage/pusher.py | 9 +++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 58fe6ad747..571d8b2f82 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -170,7 +170,11 @@ class PusherPool: def _start_pushers(self, pushers): logger.info("Starting %d pushers", len(pushers)) for pusherdict in pushers: - p = self._create_pusher(pusherdict) + try: + p = self._create_pusher(pusherdict) + except PusherConfigException: + logger.exception("Couldn't start a pusher: caught PusherConfigException") + continue if p: fullid = "%s:%s:%s" % ( pusherdict['app_id'], diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 2582a1da66..feb2870dfe 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -21,6 +21,7 @@ from synapse.api.errors import StoreError from syutil.jsonutil import encode_canonical_json import logging +import simplejson as json logger = logging.getLogger(__name__) @@ -48,6 +49,14 @@ class PusherStore(SQLBaseStore): ) rows = yield self._execute_and_decode("get_all_pushers", sql) + for r in rows: + dataJson = r['data'] + r['data'] = None + try: + r['data'] = json.loads(dataJson) + except: + logger.warn("Invalid JSON in data for pusher %d: %s", r['id'], dataJson) + pass defer.returnValue(rows) From a6e62cf6d0cb7d80c7b1fe99e279b27c2faf7855 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 17:37:11 +0100 Subject: [PATCH 10/33] Fix off by one in presence token handling --- synapse/handlers/presence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 42cd528908..9e15610401 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -881,7 +881,7 @@ class PresenceEventSource(object): # TODO(paul): limit for serial, user_ids in presence._remote_offline_serials: - if serial < from_key: + if serial <= from_key: break if serial > max_serial: From 472be886740edf5f9eb08f234063a7d57f70442b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 17:43:46 +0100 Subject: [PATCH 11/33] We store pusher data as bytes --- synapse/storage/pusher.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index feb2870dfe..57690c3ef5 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -53,7 +53,7 @@ class PusherStore(SQLBaseStore): dataJson = r['data'] r['data'] = None try: - r['data'] = json.loads(dataJson) + r['data'] = json.loads(str(dataJson).decode("UTF8")) except: logger.warn("Invalid JSON in data for pusher %d: %s", r['id'], dataJson) pass @@ -81,7 +81,7 @@ class PusherStore(SQLBaseStore): device_display_name=device_display_name, ts=pushkey_ts, lang=lang, - data=encode_canonical_json(data).decode("UTF-8"), + data=encode_canonical_json(data), ), insertion_values=dict( id=next_id, From 09177f4f2e237aa042cdb21cec91f4374d072332 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:03:42 +0100 Subject: [PATCH 12/33] Decode buffers in same thread --- synapse/storage/pusher.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 57690c3ef5..c51e08fa27 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -44,20 +44,26 @@ class PusherStore(SQLBaseStore): @defer.inlineCallbacks def get_all_pushers(self): - sql = ( - "SELECT * FROM pushers" + def get_pushers(txn): + txn.execute("SELECT * FROM pushers") + rows = self.cursor_to_dict(txn) + + for r in rows: + dataJson = r['data'] + r['data'] = None + try: + r['data'] = json.loads(str(dataJson).decode("UTF8")) + except Exception as e: + logger.warn( + "Invalid JSON in data for pusher %d: %s, %s", + r['id'], dataJson, e.message, + ) + pass + + rows = yield self.runInteraction( + get_pushers, + desc="get_all_pushers", ) - - rows = yield self._execute_and_decode("get_all_pushers", sql) - for r in rows: - dataJson = r['data'] - r['data'] = None - try: - r['data'] = json.loads(str(dataJson).decode("UTF8")) - except: - logger.warn("Invalid JSON in data for pusher %d: %s", r['id'], dataJson) - pass - defer.returnValue(rows) @defer.inlineCallbacks From 0618978238cfc458bbf075fe9e94f3f807007679 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:04:35 +0100 Subject: [PATCH 13/33] Typo, args wrong way round --- synapse/storage/pusher.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index c51e08fa27..a0fe8853e5 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -60,10 +60,7 @@ class PusherStore(SQLBaseStore): ) pass - rows = yield self.runInteraction( - get_pushers, - desc="get_all_pushers", - ) + rows = yield self.runInteraction("get_all_pushers", get_pushers) defer.returnValue(rows) @defer.inlineCallbacks From 3864b3a8e6596c8134d01e1d94841e710914f635 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:07:36 +0100 Subject: [PATCH 14/33] Actually return rows --- synapse/storage/pusher.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index a0fe8853e5..c2aaf60286 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -60,6 +60,8 @@ class PusherStore(SQLBaseStore): ) pass + return rows + rows = yield self.runInteraction("get_all_pushers", get_pushers) defer.returnValue(rows) From 46daf2d2000ebaaec813c88e73183ca1a5e5e58e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:22:20 +0100 Subject: [PATCH 15/33] Start pushers on reactor thread --- synapse/push/pusherpool.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 571d8b2f82..2e58f70ffa 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -19,6 +19,8 @@ from twisted.internet import defer from httppusher import HttpPusher from synapse.push import PusherConfigException +from synapse.util.async import run_on_reactor + import logging logger = logging.getLogger(__name__) @@ -48,6 +50,7 @@ class PusherPool: @defer.inlineCallbacks def start(self): + yield run_on_reactor() pushers = yield self.store.get_all_pushers() self._start_pushers(pushers) From c27d6ad6b5eaed43f1f20530a51f48ca2931063f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:25:24 +0100 Subject: [PATCH 16/33] Only start pushers when synapse has fully started --- synapse/app/homeserver.py | 6 +++++- synapse/push/pusherpool.py | 3 --- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5c6812f473..0987a76cfc 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -442,11 +442,15 @@ def setup(config_options): hs.start_listening() - hs.get_pusherpool().start() hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() + def on_start(): + hs.get_pusherpool().start() + + reactor.callWhenRunning(on_start) + return hs diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 2e58f70ffa..571d8b2f82 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -19,8 +19,6 @@ from twisted.internet import defer from httppusher import HttpPusher from synapse.push import PusherConfigException -from synapse.util.async import run_on_reactor - import logging logger = logging.getLogger(__name__) @@ -50,7 +48,6 @@ class PusherPool: @defer.inlineCallbacks def start(self): - yield run_on_reactor() pushers = yield self.store.get_all_pushers() self._start_pushers(pushers) From e7a6edb0ee34a72ce5781802744ff6149ff6a940 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:37:30 +0100 Subject: [PATCH 17/33] Revert previous --- synapse/app/homeserver.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 0987a76cfc..5c6812f473 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -442,15 +442,11 @@ def setup(config_options): hs.start_listening() + hs.get_pusherpool().start() hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() - def on_start(): - hs.get_pusherpool().start() - - reactor.callWhenRunning(on_start) - return hs From 3e71d13acf92b878f9b27665d9e4846105da3aa7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:37:42 +0100 Subject: [PATCH 18/33] Also log when we've started pushers --- synapse/push/pusherpool.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 571d8b2f82..0ab2f65972 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -186,6 +186,8 @@ class PusherPool: self.pushers[fullid] = p p.start() + logger.info("Started pushers") + @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_name): fullid = "%s:%s:%s" % (app_id, pushkey, user_name) From d2d61a82881da58a024f20d7c0db28300aa628dc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 19:15:23 +0100 Subject: [PATCH 19/33] Fix deadlock in id_generators. No idea why this was an actual deadlock. --- synapse/storage/util/id_generators.py | 30 ++++++++++++++------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 9d461d5e96..54aeff2b43 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -86,10 +86,10 @@ class StreamIdGenerator(object): with stream_id_gen.get_next_txn(txn) as stream_id: # ... persist event ... """ - with self._lock: - if not self._current_max: - self._compute_current_max(txn) + if not self._current_max: + self._compute_current_max(txn) + with self._lock: self._current_max += 1 next_id = self._current_max @@ -110,22 +110,24 @@ class StreamIdGenerator(object): """Returns the maximum stream id such that all stream ids less than or equal to it have been successfully persisted. """ + if not self._current_max: + yield store.runInteraction( + "_compute_current_max", + self._get_or_compute_current_max, + ) + with self._lock: if self._unfinished_ids: defer.returnValue(self._unfinished_ids[0] - 1) - if not self._current_max: - yield store.runInteraction( - "_compute_current_max", - self._compute_current_max, - ) - defer.returnValue(self._current_max) - def _compute_current_max(self, txn): - txn.execute("SELECT MAX(stream_ordering) FROM events") - val, = txn.fetchone() + def _get_or_compute_current_max(self, txn): + with self._lock: + txn.execute("SELECT MAX(stream_ordering) FROM events") + rows = txn.fetchall() + val, = rows[0] - self._current_max = int(val) if val else 1 + self._current_max = int(val) if val else 1 - return self._current_max + return self._current_max From 50f96f256f01dcdb549017f68eccb2ae8a285134 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 19:17:00 +0100 Subject: [PATCH 20/33] Also remove yield from within lock in the other generator --- synapse/storage/util/id_generators.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 54aeff2b43..a51268511c 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -30,15 +30,13 @@ class IdGenerator(object): @defer.inlineCallbacks def get_next(self): + if self._next_id is None: + yield self.store.runInteraction( + "IdGenerator_%s" % (self.table,), + self.get_next_txn, + ) + with self._lock: - if not self._next_id: - res = yield self.store._execute_and_decode( - "IdGenerator_%s" % (self.table,), - "SELECT MAX(%s) as mx FROM %s" % (self.column, self.table,) - ) - - self._next_id = (res and res[0] and res[0]["mx"]) or 1 - i = self._next_id self._next_id += 1 defer.returnValue(i) From 0ade2712d13236e557f94172c102e03545d8ac09 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 19:17:25 +0100 Subject: [PATCH 21/33] Typo --- synapse/storage/util/id_generators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index a51268511c..e40eb8a8c4 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -85,7 +85,7 @@ class StreamIdGenerator(object): # ... persist event ... """ if not self._current_max: - self._compute_current_max(txn) + self._get_or_compute_current_max(txn) with self._lock: self._current_max += 1 From 4a9dc5b2f55fa07e4f43445c3b993cf1fd330d71 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 19:27:02 +0100 Subject: [PATCH 22/33] pushkey' are also bytes. --- synapse/storage/pusher.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index c2aaf60286..752b451c46 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -29,15 +29,22 @@ logger = logging.getLogger(__name__) class PusherStore(SQLBaseStore): @defer.inlineCallbacks def get_pushers_by_app_id_and_pushkey(self, app_id, pushkey): - sql = ( - "SELECT * FROM pushers " - "WHERE app_id = ? AND pushkey = ?" - ) + def r(txn): + sql = ( + "SELECT * FROM pushers" + " WHERE app_id = ? AND pushkey = ?" + ) - rows = yield self._execute_and_decode( - "get_pushers_by_app_id_and_pushkey", - sql, - app_id, pushkey + txn.execute(sql, (app_id, pushkey,)) + rows = self.cursor_to_dict(txn) + + for r in rows: + r['pushkey'] = str(r['pushkey']).decode("UTF8") + + return rows + + rows = yield self.runInteraction( + "get_pushers_by_app_id_and_pushkey", r ) defer.returnValue(rows) @@ -60,6 +67,8 @@ class PusherStore(SQLBaseStore): ) pass + r['pushkey'] = str(r['pushkey']).decode("UTF8") + return rows rows = yield self.runInteraction("get_all_pushers", get_pushers) From b1ca784aca44270f21e23489541802eb5b9707b1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 19:41:14 +0100 Subject: [PATCH 23/33] Correctly decode, for sqlite and postgres, rows from pushers table --- synapse/storage/pusher.py | 43 ++++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 752b451c46..08ea62681b 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -22,11 +22,33 @@ from syutil.jsonutil import encode_canonical_json import logging import simplejson as json +import types logger = logging.getLogger(__name__) class PusherStore(SQLBaseStore): + def _decode_pushers_rows(self, rows): + for r in rows: + dataJson = r['data'] + r['data'] = None + try: + if isinstance(dataJson, types.BufferType): + dataJson = str(dataJson).decode("UTF8") + + r['data'] = json.loads(dataJson) + except Exception as e: + logger.warn( + "Invalid JSON in data for pusher %d: %s, %s", + r['id'], dataJson, e.message, + ) + pass + + if isinstance(r['pushkey'], types.BufferType): + r['pushkey'] = str(r['pushkey']).decode("UTF8") + + return rows + @defer.inlineCallbacks def get_pushers_by_app_id_and_pushkey(self, app_id, pushkey): def r(txn): @@ -38,10 +60,7 @@ class PusherStore(SQLBaseStore): txn.execute(sql, (app_id, pushkey,)) rows = self.cursor_to_dict(txn) - for r in rows: - r['pushkey'] = str(r['pushkey']).decode("UTF8") - - return rows + return self._decode_pushers_rows(rows) rows = yield self.runInteraction( "get_pushers_by_app_id_and_pushkey", r @@ -55,21 +74,7 @@ class PusherStore(SQLBaseStore): txn.execute("SELECT * FROM pushers") rows = self.cursor_to_dict(txn) - for r in rows: - dataJson = r['data'] - r['data'] = None - try: - r['data'] = json.loads(str(dataJson).decode("UTF8")) - except Exception as e: - logger.warn( - "Invalid JSON in data for pusher %d: %s, %s", - r['id'], dataJson, e.message, - ) - pass - - r['pushkey'] = str(r['pushkey']).decode("UTF8") - - return rows + return self._decode_pushers_rows(rows) rows = yield self.runInteraction("get_all_pushers", get_pushers) defer.returnValue(rows) From 1d7702833d27f2e87e6598b5de9aad878e716c4b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Apr 2015 10:16:12 +0100 Subject: [PATCH 24/33] Make simple query rather than long one and then throw away half the results --- synapse/handlers/message.py | 3 +-- synapse/storage/room.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 9667bb8674..22e19af17f 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -267,8 +267,7 @@ class MessageHandler(BaseHandler): user, pagination_config.get_source_config("presence"), None ) - public_rooms = yield self.store.get_rooms(is_public=True) - public_room_ids = [r["room_id"] for r in public_rooms] + public_room_ids = yield self.store.get_public_room_ids() limit = pagin_config.limit if limit is None: diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 48ebb33057..2e77b9d79b 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -75,6 +75,16 @@ class RoomStore(SQLBaseStore): allow_none=True, ) + def get_public_room_ids(self): + return self._simple_select_onecol( + table="rooms", + keyvalues={ + "is_public": True, + }, + retcol="room_id", + desc="get_public_room_ids", + ) + @defer.inlineCallbacks def get_rooms(self, is_public): """Retrieve a list of all public rooms. From 4b46fbec5beda46276219ba8142a12a42a1dc2c1 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 12:04:08 +0100 Subject: [PATCH 25/33] Doesn't look like this is used anymore --- synapse/http/server.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 05636e683b..5ea955e4ff 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -179,19 +179,6 @@ class JsonResource(HttpServer, resource.Resource): self._PathEntry(path_pattern, callback) ) - def start_listening(self, port): - """ Registers the http server with the twisted reactor. - - Args: - port (int): The port to listen on. - - """ - reactor.listenTCP( - port, - server.Site(self), - interface=self.hs.config.bind_host - ) - def render(self, request): """ This gets called by twisted every time someone sends us a request. """ From 0c1b7f843ba2fe30f2513ea8b94406f7b7e7f284 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 13:33:30 +0100 Subject: [PATCH 26/33] Unused import --- synapse/http/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 5ea955e4ff..93ecbd7589 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -24,7 +24,7 @@ from syutil.jsonutil import ( encode_canonical_json, encode_pretty_printed_json ) -from twisted.internet import defer, reactor +from twisted.internet import defer from twisted.web import server, resource from twisted.web.server import NOT_DONE_YET from twisted.web.util import redirectTo From d89a9f72833acfd472ec557a8fe3927320efea93 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 13:58:13 +0100 Subject: [PATCH 27/33] Add an access_log SYN-161 #resolve --- synapse/app/homeserver.py | 25 ++++++++++++++++++++++--- synapse/config/captcha.py | 2 ++ synapse/config/logger.py | 5 +++++ 3 files changed, 29 insertions(+), 3 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5c6812f473..0aa5c34c81 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -35,6 +35,7 @@ from twisted.enterprise import adbapi from twisted.web.resource import Resource from twisted.web.static import File from twisted.web.server import Site +from twisted.web.http import proxiedLogFormatter from synapse.http.server import JsonResource, RootRedirect from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource @@ -225,10 +226,18 @@ class SynapseHomeServer(HomeServer): def start_listening(self): config = self.get_config() + log_formatter = None + if config.captcha_ip_origin_is_x_forwarded: + log_formatter = proxiedLogFormatter + if not config.no_tls and config.bind_port is not None: reactor.listenSSL( config.bind_port, - Site(self.root_resource), + Site( + self.root_resource, + logPath=config.access_log_file, + logFormatter=log_formatter, + ), self.tls_context_factory, interface=config.bind_host ) @@ -237,7 +246,11 @@ class SynapseHomeServer(HomeServer): if config.unsecure_port is not None: reactor.listenTCP( config.unsecure_port, - Site(self.root_resource), + Site( + self.root_resource, + logPath=config.access_log_file, + logFormatter=log_formatter, + ), interface=config.bind_host ) logger.info("Synapse now listening on port %d", config.unsecure_port) @@ -245,7 +258,13 @@ class SynapseHomeServer(HomeServer): metrics_resource = self.get_resource_for_metrics() if metrics_resource and config.metrics_port is not None: reactor.listenTCP( - config.metrics_port, Site(metrics_resource), interface="127.0.0.1", + config.metrics_port, + Site( + metrics_resource, + logPath=config.access_log_file, + logFormatter=log_formatter, + ), + interface="127.0.0.1", ) logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port) diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py index 07fbfadc0f..456ce9c632 100644 --- a/synapse/config/captcha.py +++ b/synapse/config/captcha.py @@ -22,6 +22,8 @@ class CaptchaConfig(Config): self.recaptcha_private_key = args.recaptcha_private_key self.recaptcha_public_key = args.recaptcha_public_key self.enable_registration_captcha = args.enable_registration_captcha + + # XXX: This is used for more than just captcha self.captcha_ip_origin_is_x_forwarded = ( args.captcha_ip_origin_is_x_forwarded ) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 247b324816..559cbe7963 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -27,6 +27,7 @@ class LoggingConfig(Config): self.verbosity = int(args.verbose) if args.verbose else None self.log_config = self.abspath(args.log_config) self.log_file = self.abspath(args.log_file) + self.access_log_file = self.abspath(args.access_log_file) @classmethod def add_arguments(cls, parser): @@ -44,6 +45,10 @@ class LoggingConfig(Config): '--log-config', dest="log_config", default=None, help="Python logging config file" ) + logging_group.add_argument( + '--access-log-file', dest="access_log_file", default="access.log", + help="File to log server access to" + ) def setup_logging(self): log_format = ( From 2366d287806f267e078ab21bdf3abf2b67457f03 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Apr 2015 13:52:28 +0100 Subject: [PATCH 28/33] Don't needlessly join on state_events --- synapse/storage/room.py | 5 ++--- synapse/storage/state.py | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 2e77b9d79b..78572bbdd2 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -196,14 +196,13 @@ class RoomStore(SQLBaseStore): sql = ( "SELECT e.*, (%(redacted)s) AS redacted FROM events as e " "INNER JOIN current_state_events as c ON e.event_id = c.event_id " - "INNER JOIN state_events as s ON e.event_id = s.event_id " "WHERE c.room_id = ? " ) % { "redacted": del_sql, } - sql += " AND ((s.type = 'm.room.name' AND s.state_key = '')" - sql += " OR s.type = 'm.room.aliases')" + sql += " AND ((c.type = 'm.room.name' AND c.state_key = '')" + sql += " OR c.type = 'm.room.aliases')" args = (room_id,) results = yield self._execute_and_decode("get_current_state", sql, *args) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 553ba9dd1f..c282fcf7c1 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -136,17 +136,16 @@ class StateStore(SQLBaseStore): sql = ( "SELECT e.*, (%(redacted)s) AS redacted FROM events as e " "INNER JOIN current_state_events as c ON e.event_id = c.event_id " - "INNER JOIN state_events as s ON e.event_id = s.event_id " "WHERE c.room_id = ? " ) % { "redacted": del_sql, } if event_type and state_key is not None: - sql += " AND s.type = ? AND s.state_key = ? " + sql += " AND c.type = ? AND c.state_key = ? " args = (room_id, event_type, state_key) elif event_type: - sql += " AND s.type = ?" + sql += " AND c.type = ?" args = (room_id, event_type) else: args = (room_id, ) From 29400b45b98307699ae1aba9230eaf106ac6f021 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 30 Apr 2015 15:21:31 +0100 Subject: [PATCH 29/33] SYN-367: Use upsert rather than insert_or_replace --- synapse/storage/keys.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index cbe9339ccf..5bdf497b93 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -137,8 +137,13 @@ class KeyStore(SQLBaseStore): ts_valid_until_ms (int): The time when this json stops being valid. key_json (bytes): The encoded JSON. """ - return self._simple_insert( + return self._simple_upsert( table="server_keys_json", + keyvalues={ + "server_name": server_name, + "key_id": key_id, + "from_server": from_server, + }, values={ "server_name": server_name, "key_id": key_id, @@ -147,7 +152,6 @@ class KeyStore(SQLBaseStore): "ts_valid_until_ms": ts_expires_ms, "key_json": buffer(key_json_bytes), }, - or_replace=True, ) def get_server_keys_json(self, server_keys): From 3c4c2297887b973c012ff61a731b3bf6178d8d26 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Apr 2015 16:16:53 +0100 Subject: [PATCH 30/33] Don't use sub queries, it makes postgres sad --- synapse/storage/state.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index c282fcf7c1..95bc15c0dc 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -128,18 +128,12 @@ class StateStore(SQLBaseStore): @defer.inlineCallbacks def get_current_state(self, room_id, event_type=None, state_key=""): - del_sql = ( - "SELECT event_id FROM redactions WHERE redacts = e.event_id " - "LIMIT 1" - ) - sql = ( - "SELECT e.*, (%(redacted)s) AS redacted FROM events as e " - "INNER JOIN current_state_events as c ON e.event_id = c.event_id " - "WHERE c.room_id = ? " - ) % { - "redacted": del_sql, - } + "SELECT e.*, r.event_id FROM events as e" + " LEFT JOIN redactions as r ON r.redacts = e.event_id" + " INNER JOIN current_state_events as c ON e.event_id = c.event_id" + " WHERE c.room_id = ? " + ) if event_type and state_key is not None: sql += " AND c.type = ? AND c.state_key = ? " From 054aa0d58c22ae76d3e094fc2fd6495456ffd2cf Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 16:17:27 +0100 Subject: [PATCH 31/33] Do access log using python's logging stuff, just under a separate logger name --- synapse/app/homeserver.py | 42 ++++++++++++++++++++++++++------------- synapse/config/logger.py | 11 ++++++++++ 2 files changed, 39 insertions(+), 14 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 0aa5c34c81..3ce5fa4a43 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -35,7 +35,7 @@ from twisted.enterprise import adbapi from twisted.web.resource import Resource from twisted.web.static import File from twisted.web.server import Site -from twisted.web.http import proxiedLogFormatter +from twisted.web.http import proxiedLogFormatter, combinedLogFormatter from synapse.http.server import JsonResource, RootRedirect from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource @@ -226,17 +226,13 @@ class SynapseHomeServer(HomeServer): def start_listening(self): config = self.get_config() - log_formatter = None - if config.captcha_ip_origin_is_x_forwarded: - log_formatter = proxiedLogFormatter - if not config.no_tls and config.bind_port is not None: reactor.listenSSL( config.bind_port, - Site( + SynapseSite( + "synapse.access.https", + config, self.root_resource, - logPath=config.access_log_file, - logFormatter=log_formatter, ), self.tls_context_factory, interface=config.bind_host @@ -246,10 +242,10 @@ class SynapseHomeServer(HomeServer): if config.unsecure_port is not None: reactor.listenTCP( config.unsecure_port, - Site( + SynapseSite( + "synapse.access.http", + config, self.root_resource, - logPath=config.access_log_file, - logFormatter=log_formatter, ), interface=config.bind_host ) @@ -259,10 +255,10 @@ class SynapseHomeServer(HomeServer): if metrics_resource and config.metrics_port is not None: reactor.listenTCP( config.metrics_port, - Site( + SynapseSite( + "synapse.access.metrics", + config, metrics_resource, - logPath=config.access_log_file, - logFormatter=log_formatter, ), interface="127.0.0.1", ) @@ -484,6 +480,24 @@ class SynapseService(service.Service): return self._port.stopListening() +class SynapseSite(Site): + """ + Subclass of a twisted http Site that does access logging with python's + standard logging + """ + def __init__(self, logger_name, config, resource, *args, **kwargs): + Site.__init__(self, resource, *args, **kwargs) + if config.captcha_ip_origin_is_x_forwarded: + self._log_formatter = proxiedLogFormatter + else: + self._log_formatter = combinedLogFormatter + self.access_logger = logging.getLogger(logger_name) + + def log(self, request): + line = self._log_formatter(self._logDateTime, request) + self.access_logger.info(line) + + def run(hs): def in_thread(): diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 559cbe7963..077f20497a 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -83,6 +83,17 @@ class LoggingConfig(Config): handler.addFilter(LoggingContextFilter(request="")) logger.addHandler(handler) + + if self.access_log_file: + access_logger = logging.getLogger('synapse.access') + # we log to both files by default + access_logger.propagate = 1 + access_log_handler = logging.handlers.RotatingFileHandler( + self.access_log_file, maxBytes=(1000 * 1000 * 100), backupCount=3 + ) + access_log_formatter = logging.Formatter('%(message)s') + access_log_handler.setFormatter(access_log_formatter) + access_logger.addHandler(access_log_handler) else: with open(self.log_config, 'r') as f: logging.config.dictConfig(yaml.load(f)) From 5b02f334519964ffae6812df5413fcdae84db6ba Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 16:20:12 +0100 Subject: [PATCH 32/33] Undo changes to logger config, ie. remove the access_log_file option: decision is to support this through log_config rather tan adding an option. --- synapse/config/logger.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 077f20497a..247b324816 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -27,7 +27,6 @@ class LoggingConfig(Config): self.verbosity = int(args.verbose) if args.verbose else None self.log_config = self.abspath(args.log_config) self.log_file = self.abspath(args.log_file) - self.access_log_file = self.abspath(args.access_log_file) @classmethod def add_arguments(cls, parser): @@ -45,10 +44,6 @@ class LoggingConfig(Config): '--log-config', dest="log_config", default=None, help="Python logging config file" ) - logging_group.add_argument( - '--access-log-file', dest="access_log_file", default="access.log", - help="File to log server access to" - ) def setup_logging(self): log_format = ( @@ -83,17 +78,6 @@ class LoggingConfig(Config): handler.addFilter(LoggingContextFilter(request="")) logger.addHandler(handler) - - if self.access_log_file: - access_logger = logging.getLogger('synapse.access') - # we log to both files by default - access_logger.propagate = 1 - access_log_handler = logging.handlers.RotatingFileHandler( - self.access_log_file, maxBytes=(1000 * 1000 * 100), backupCount=3 - ) - access_log_formatter = logging.Formatter('%(message)s') - access_log_handler.setFormatter(access_log_formatter) - access_logger.addHandler(access_log_handler) else: with open(self.log_config, 'r') as f: logging.config.dictConfig(yaml.load(f)) From 69d40636517d0634e6e4c598d6897bf367b1d634 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Apr 2015 16:47:38 +0100 Subject: [PATCH 33/33] Add get_rooms_for_user cache --- synapse/storage/roommember.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 8ea5756d61..831169e220 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -65,6 +65,7 @@ class RoomMemberStore(SQLBaseStore): ) self.get_rooms_for_user.invalidate(target_user_id) + self.get_joined_hosts_for_room.invalidate(event.room_id) def get_room_member(self, user_id, room_id): """Retrieve the current state of a room member. @@ -162,6 +163,7 @@ class RoomMemberStore(SQLBaseStore): RoomsForUser(**r) for r in self.cursor_to_dict(txn) ] + @cached() def get_joined_hosts_for_room(self, room_id): return self.runInteraction( "get_joined_hosts_for_room",