From 39242090e30637a69f0bb304ee100a8080c394c8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 17:33:12 +0100 Subject: [PATCH 01/13] Add measure blocks to notifier --- synapse/notifier.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/synapse/notifier.py b/synapse/notifier.py index 40a148994f..b1de293dbc 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -20,6 +20,7 @@ from synapse.api.errors import AuthError from synapse.util.logutils import log_function from synapse.util.async import ObservableDeferred from synapse.util.logcontext import PreserveLoggingContext +from synapse.util.metrics import Measure from synapse.types import StreamToken from synapse.visibility import filter_events_for_client import synapse.metrics @@ -231,30 +232,32 @@ class Notifier(object): Will wake up all listeners for the given users and rooms. """ with PreserveLoggingContext(): - user_streams = set() + with Measure(self.clock, "on_new_event"): + user_streams = set() - for user in users: - user_stream = self.user_to_user_stream.get(str(user)) - if user_stream is not None: - user_streams.add(user_stream) + for user in users: + user_stream = self.user_to_user_stream.get(str(user)) + if user_stream is not None: + user_streams.add(user_stream) - for room in rooms: - user_streams |= self.room_to_user_streams.get(room, set()) + for room in rooms: + user_streams |= self.room_to_user_streams.get(room, set()) - time_now_ms = self.clock.time_msec() - for user_stream in user_streams: - try: - user_stream.notify(stream_key, new_token, time_now_ms) - except: - logger.exception("Failed to notify listener") + time_now_ms = self.clock.time_msec() + for user_stream in user_streams: + try: + user_stream.notify(stream_key, new_token, time_now_ms) + except: + logger.exception("Failed to notify listener") - self.notify_replication() + self.notify_replication() def on_new_replication_data(self): """Used to inform replication listeners that something has happend without waking up any of the normal user event streams""" with PreserveLoggingContext(): - self.notify_replication() + with Measure(self.clock, "on_new_replication_data"): + self.notify_replication() @defer.inlineCallbacks def wait_for_events(self, user_id, timeout, callback, room_ids=None, From 2426c2f21a4c026806676de4ea88b70b9520659e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 17:38:15 +0100 Subject: [PATCH 02/13] Measure keyrings --- synapse/crypto/keyring.py | 70 ++++++++++++++++++++------------------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 7cd11cfae7..0bae6fdeed 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -22,6 +22,7 @@ from synapse.util.logcontext import ( preserve_context_over_deferred, preserve_context_over_fn, PreserveLoggingContext, preserve_fn ) +from synapse.util.metrics import Measure from twisted.internet import defer @@ -115,42 +116,43 @@ class Keyring(object): @defer.inlineCallbacks def handle_key_deferred(verify_request): - server_name = verify_request.server_name - try: - _, key_id, verify_key = yield verify_request.deferred - except IOError as e: - logger.warn( - "Got IOError when downloading keys for %s: %s %s", - server_name, type(e).__name__, str(e.message), - ) - raise SynapseError( - 502, - "Error downloading keys for %s" % (server_name,), - Codes.UNAUTHORIZED, - ) - except Exception as e: - logger.exception( - "Got Exception when downloading keys for %s: %s %s", - server_name, type(e).__name__, str(e.message), - ) - raise SynapseError( - 401, - "No key for %s with id %s" % (server_name, key_ids), - Codes.UNAUTHORIZED, - ) + with Measure(self.clock, "handle_key_deferred"): + server_name = verify_request.server_name + try: + _, key_id, verify_key = yield verify_request.deferred + except IOError as e: + logger.warn( + "Got IOError when downloading keys for %s: %s %s", + server_name, type(e).__name__, str(e.message), + ) + raise SynapseError( + 502, + "Error downloading keys for %s" % (server_name,), + Codes.UNAUTHORIZED, + ) + except Exception as e: + logger.exception( + "Got Exception when downloading keys for %s: %s %s", + server_name, type(e).__name__, str(e.message), + ) + raise SynapseError( + 401, + "No key for %s with id %s" % (server_name, key_ids), + Codes.UNAUTHORIZED, + ) - json_object = verify_request.json_object + json_object = verify_request.json_object - try: - verify_signed_json(json_object, server_name, verify_key) - except: - raise SynapseError( - 401, - "Invalid signature for server %s with key %s:%s" % ( - server_name, verify_key.alg, verify_key.version - ), - Codes.UNAUTHORIZED, - ) + try: + verify_signed_json(json_object, server_name, verify_key) + except: + raise SynapseError( + 401, + "Invalid signature for server %s with key %s:%s" % ( + server_name, verify_key.alg, verify_key.version + ), + Codes.UNAUTHORIZED, + ) server_to_deferred = { server_name: defer.Deferred() From 47dd8f02a16ea3f29f4233925d0616eab5e37d2a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 17:40:23 +0100 Subject: [PATCH 03/13] Measure _get_event_from_row --- synapse/storage/events.py | 86 ++++++++++++++++++++------------------- 1 file changed, 44 insertions(+), 42 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index ad026b5e0b..aa23fc00f5 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -22,6 +22,7 @@ from synapse.events.utils import prune_event from synapse.util.async import ObservableDeferred from synapse.util.logcontext import preserve_fn, PreserveLoggingContext from synapse.util.logutils import log_function +from synapse.util.metrics import Measure from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError @@ -1132,56 +1133,57 @@ class EventsStore(SQLBaseStore): @defer.inlineCallbacks def _get_event_from_row(self, internal_metadata, js, redacted, rejected_reason=None): - d = json.loads(js) - internal_metadata = json.loads(internal_metadata) + with Measure(self._clock, "_get_event_from_row"): + d = json.loads(js) + internal_metadata = json.loads(internal_metadata) - if rejected_reason: - rejected_reason = yield self._simple_select_one_onecol( - table="rejections", - keyvalues={"event_id": rejected_reason}, - retcol="reason", - desc="_get_event_from_row_rejected_reason", + if rejected_reason: + rejected_reason = yield self._simple_select_one_onecol( + table="rejections", + keyvalues={"event_id": rejected_reason}, + retcol="reason", + desc="_get_event_from_row_rejected_reason", + ) + + original_ev = FrozenEvent( + d, + internal_metadata_dict=internal_metadata, + rejected_reason=rejected_reason, ) - original_ev = FrozenEvent( - d, - internal_metadata_dict=internal_metadata, - rejected_reason=rejected_reason, - ) + redacted_event = None + if redacted: + redacted_event = prune_event(original_ev) - redacted_event = None - if redacted: - redacted_event = prune_event(original_ev) + redaction_id = yield self._simple_select_one_onecol( + table="redactions", + keyvalues={"redacts": redacted_event.event_id}, + retcol="event_id", + desc="_get_event_from_row_redactions", + ) - redaction_id = yield self._simple_select_one_onecol( - table="redactions", - keyvalues={"redacts": redacted_event.event_id}, - retcol="event_id", - desc="_get_event_from_row_redactions", + redacted_event.unsigned["redacted_by"] = redaction_id + # Get the redaction event. + + because = yield self.get_event( + redaction_id, + check_redacted=False, + allow_none=True, + ) + + if because: + # It's fine to do add the event directly, since get_pdu_json + # will serialise this field correctly + redacted_event.unsigned["redacted_because"] = because + + cache_entry = _EventCacheEntry( + event=original_ev, + redacted_event=redacted_event, ) - redacted_event.unsigned["redacted_by"] = redaction_id - # Get the redaction event. + self._get_event_cache.prefill((original_ev.event_id,), cache_entry) - because = yield self.get_event( - redaction_id, - check_redacted=False, - allow_none=True, - ) - - if because: - # It's fine to do add the event directly, since get_pdu_json - # will serialise this field correctly - redacted_event.unsigned["redacted_because"] = because - - cache_entry = _EventCacheEntry( - event=original_ev, - redacted_event=redacted_event, - ) - - self._get_event_cache.prefill((original_ev.event_id,), cache_entry) - - defer.returnValue(cache_entry) + defer.returnValue(cache_entry) @defer.inlineCallbacks def count_daily_messages(self): From 39b900b31614ba21e6a24abafba33a51b763534b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 17:51:08 +0100 Subject: [PATCH 04/13] Measure http.server render --- synapse/http/server.py | 58 ++++++++++++++++++++++-------------------- 1 file changed, 30 insertions(+), 28 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 2b3c05a740..f3357d2ff7 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -19,6 +19,7 @@ from synapse.api.errors import ( ) from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.caches import intern_dict +from synapse.util.metrics import Measure import synapse.metrics import synapse.events @@ -234,41 +235,42 @@ class JsonResource(HttpServer, resource.Resource): request_metrics = RequestMetrics() request_metrics.start(self.clock) - # Loop through all the registered callbacks to check if the method - # and path regex match - for path_entry in self.path_regexs.get(request.method, []): - m = path_entry.pattern.match(request.path) - if not m: - continue + with Measure(self.clock, "http.render"): + # Loop through all the registered callbacks to check if the method + # and path regex match + for path_entry in self.path_regexs.get(request.method, []): + m = path_entry.pattern.match(request.path) + if not m: + continue - # We found a match! Trigger callback and then return the - # returned response. We pass both the request and any - # matched groups from the regex to the callback. + # We found a match! Trigger callback and then return the + # returned response. We pass both the request and any + # matched groups from the regex to the callback. - callback = path_entry.callback + callback = path_entry.callback - servlet_instance = getattr(callback, "__self__", None) - if servlet_instance is not None: - servlet_classname = servlet_instance.__class__.__name__ - else: - servlet_classname = "%r" % callback + servlet_instance = getattr(callback, "__self__", None) + if servlet_instance is not None: + servlet_classname = servlet_instance.__class__.__name__ + else: + servlet_classname = "%r" % callback - kwargs = intern_dict({ - name: urllib.unquote(value).decode("UTF-8") if value else value - for name, value in m.groupdict().items() - }) + kwargs = intern_dict({ + name: urllib.unquote(value).decode("UTF-8") if value else value + for name, value in m.groupdict().items() + }) - callback_return = yield callback(request, **kwargs) - if callback_return is not None: - code, response = callback_return - self._send_response(request, code, response) + callback_return = yield callback(request, **kwargs) + if callback_return is not None: + code, response = callback_return + self._send_response(request, code, response) - try: - request_metrics.stop(self.clock, request, servlet_classname) - except: - pass + try: + request_metrics.stop(self.clock, request, servlet_classname) + except: + pass - return + return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. raise UnrecognizedRequestError() From 04fc8bbcb04673f73c0783bbcdfa3b54cbcb4615 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 17:56:44 +0100 Subject: [PATCH 05/13] Update keyring Measure --- synapse/crypto/keyring.py | 162 +++++++++++++++++++------------------- 1 file changed, 81 insertions(+), 81 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 0bae6fdeed..1735ca9345 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -116,43 +116,42 @@ class Keyring(object): @defer.inlineCallbacks def handle_key_deferred(verify_request): - with Measure(self.clock, "handle_key_deferred"): - server_name = verify_request.server_name - try: - _, key_id, verify_key = yield verify_request.deferred - except IOError as e: - logger.warn( - "Got IOError when downloading keys for %s: %s %s", - server_name, type(e).__name__, str(e.message), - ) - raise SynapseError( - 502, - "Error downloading keys for %s" % (server_name,), - Codes.UNAUTHORIZED, - ) - except Exception as e: - logger.exception( - "Got Exception when downloading keys for %s: %s %s", - server_name, type(e).__name__, str(e.message), - ) - raise SynapseError( - 401, - "No key for %s with id %s" % (server_name, key_ids), - Codes.UNAUTHORIZED, - ) + server_name = verify_request.server_name + try: + _, key_id, verify_key = yield verify_request.deferred + except IOError as e: + logger.warn( + "Got IOError when downloading keys for %s: %s %s", + server_name, type(e).__name__, str(e.message), + ) + raise SynapseError( + 502, + "Error downloading keys for %s" % (server_name,), + Codes.UNAUTHORIZED, + ) + except Exception as e: + logger.exception( + "Got Exception when downloading keys for %s: %s %s", + server_name, type(e).__name__, str(e.message), + ) + raise SynapseError( + 401, + "No key for %s with id %s" % (server_name, key_ids), + Codes.UNAUTHORIZED, + ) - json_object = verify_request.json_object + json_object = verify_request.json_object - try: - verify_signed_json(json_object, server_name, verify_key) - except: - raise SynapseError( - 401, - "Invalid signature for server %s with key %s:%s" % ( - server_name, verify_key.alg, verify_key.version - ), - Codes.UNAUTHORIZED, - ) + try: + verify_signed_json(json_object, server_name, verify_key) + except: + raise SynapseError( + 401, + "Invalid signature for server %s with key %s:%s" % ( + server_name, verify_key.alg, verify_key.version + ), + Codes.UNAUTHORIZED, + ) server_to_deferred = { server_name: defer.Deferred() @@ -245,59 +244,60 @@ class Keyring(object): @defer.inlineCallbacks def do_iterations(): - merged_results = {} + with Measure(self.clock, "get_server_verify_keys"): + merged_results = {} - missing_keys = {} - for verify_request in verify_requests: - missing_keys.setdefault(verify_request.server_name, set()).update( - verify_request.key_ids - ) - - for fn in key_fetch_fns: - results = yield fn(missing_keys.items()) - merged_results.update(results) - - # We now need to figure out which verify requests we have keys - # for and which we don't missing_keys = {} - requests_missing_keys = [] for verify_request in verify_requests: - server_name = verify_request.server_name - result_keys = merged_results[server_name] + missing_keys.setdefault(verify_request.server_name, set()).update( + verify_request.key_ids + ) - if verify_request.deferred.called: - # We've already called this deferred, which probably - # means that we've already found a key for it. - continue + for fn in key_fetch_fns: + results = yield fn(missing_keys.items()) + merged_results.update(results) - for key_id in verify_request.key_ids: - if key_id in result_keys: - with PreserveLoggingContext(): - verify_request.deferred.callback(( - server_name, - key_id, - result_keys[key_id], - )) - break - else: - # The else block is only reached if the loop above - # doesn't break. - missing_keys.setdefault(server_name, set()).update( - verify_request.key_ids - ) - requests_missing_keys.append(verify_request) + # We now need to figure out which verify requests we have keys + # for and which we don't + missing_keys = {} + requests_missing_keys = [] + for verify_request in verify_requests: + server_name = verify_request.server_name + result_keys = merged_results[server_name] - if not missing_keys: - break + if verify_request.deferred.called: + # We've already called this deferred, which probably + # means that we've already found a key for it. + continue - for verify_request in requests_missing_keys.values(): - verify_request.deferred.errback(SynapseError( - 401, - "No key for %s with id %s" % ( - verify_request.server_name, verify_request.key_ids, - ), - Codes.UNAUTHORIZED, - )) + for key_id in verify_request.key_ids: + if key_id in result_keys: + with PreserveLoggingContext(): + verify_request.deferred.callback(( + server_name, + key_id, + result_keys[key_id], + )) + break + else: + # The else block is only reached if the loop above + # doesn't break. + missing_keys.setdefault(server_name, set()).update( + verify_request.key_ids + ) + requests_missing_keys.append(verify_request) + + if not missing_keys: + break + + for verify_request in requests_missing_keys.values(): + verify_request.deferred.errback(SynapseError( + 401, + "No key for %s with id %s" % ( + verify_request.server_name, verify_request.key_ids, + ), + Codes.UNAUTHORIZED, + )) def on_err(err): for verify_request in verify_requests: From 37adde32dce5b51d2d273525f89a6a494dd00bb3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 18:02:19 +0100 Subject: [PATCH 06/13] Move defer.returnValue out of Measure --- synapse/storage/events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index aa23fc00f5..97aef25321 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1183,7 +1183,7 @@ class EventsStore(SQLBaseStore): self._get_event_cache.prefill((original_ev.event_id,), cache_entry) - defer.returnValue(cache_entry) + defer.returnValue(cache_entry) @defer.inlineCallbacks def count_daily_messages(self): From afbf6b33fc22896df1048d8d8e1f8c790411126b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 18:06:31 +0100 Subject: [PATCH 07/13] defer.returnValue must not be called within Measure --- synapse/handlers/sync.py | 14 +++++++------- synapse/notifier.py | 3 +-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 0ee4ebe504..c8dfd02e7b 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -464,10 +464,10 @@ class SyncHandler(object): else: state = {} - defer.returnValue({ - (e.type, e.state_key): e - for e in sync_config.filter_collection.filter_room_state(state.values()) - }) + defer.returnValue({ + (e.type, e.state_key): e + for e in sync_config.filter_collection.filter_room_state(state.values()) + }) @defer.inlineCallbacks def unread_notifs_for_room_id(self, room_id, sync_config): @@ -485,9 +485,9 @@ class SyncHandler(object): ) defer.returnValue(notifs) - # There is no new information in this period, so your notification - # count is whatever it was last time. - defer.returnValue(None) + # There is no new information in this period, so your notification + # count is whatever it was last time. + defer.returnValue(None) @defer.inlineCallbacks def generate_sync_result(self, sync_config, since_token=None, full_state=False): diff --git a/synapse/notifier.py b/synapse/notifier.py index b1de293dbc..c48024096d 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -256,8 +256,7 @@ class Notifier(object): """Used to inform replication listeners that something has happend without waking up any of the normal user event streams""" with PreserveLoggingContext(): - with Measure(self.clock, "on_new_replication_data"): - self.notify_replication() + self.notify_replication() @defer.inlineCallbacks def wait_for_events(self, user_id, timeout, callback, room_ids=None, From 8731197e54f4f004f19f87fd5fa5b4ed7ad5b8be Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 18:13:07 +0100 Subject: [PATCH 08/13] Only abort Measure on Exceptions --- synapse/util/metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 76f301f549..4ea930d3e8 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -87,7 +87,7 @@ class Measure(object): self.db_txn_duration = self.start_context.db_txn_duration def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None or not self.start_context: + if isinstance(exc_type, Exception) or not self.start_context: return duration = self.clock.time_msec() - self.start From 6d1a94d218cca7a039e77318951833c9e45dc043 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 18:40:31 +0100 Subject: [PATCH 09/13] Remove redundant measure --- synapse/http/server.py | 57 +++++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index f3357d2ff7..74840de55c 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -235,42 +235,41 @@ class JsonResource(HttpServer, resource.Resource): request_metrics = RequestMetrics() request_metrics.start(self.clock) - with Measure(self.clock, "http.render"): - # Loop through all the registered callbacks to check if the method - # and path regex match - for path_entry in self.path_regexs.get(request.method, []): - m = path_entry.pattern.match(request.path) - if not m: - continue + # Loop through all the registered callbacks to check if the method + # and path regex match + for path_entry in self.path_regexs.get(request.method, []): + m = path_entry.pattern.match(request.path) + if not m: + continue - # We found a match! Trigger callback and then return the - # returned response. We pass both the request and any - # matched groups from the regex to the callback. + # We found a match! Trigger callback and then return the + # returned response. We pass both the request and any + # matched groups from the regex to the callback. - callback = path_entry.callback + callback = path_entry.callback - servlet_instance = getattr(callback, "__self__", None) - if servlet_instance is not None: - servlet_classname = servlet_instance.__class__.__name__ - else: - servlet_classname = "%r" % callback + servlet_instance = getattr(callback, "__self__", None) + if servlet_instance is not None: + servlet_classname = servlet_instance.__class__.__name__ + else: + servlet_classname = "%r" % callback - kwargs = intern_dict({ - name: urllib.unquote(value).decode("UTF-8") if value else value - for name, value in m.groupdict().items() - }) + kwargs = intern_dict({ + name: urllib.unquote(value).decode("UTF-8") if value else value + for name, value in m.groupdict().items() + }) - callback_return = yield callback(request, **kwargs) - if callback_return is not None: - code, response = callback_return - self._send_response(request, code, response) + callback_return = yield callback(request, **kwargs) + if callback_return is not None: + code, response = callback_return + self._send_response(request, code, response) - try: - request_metrics.stop(self.clock, request, servlet_classname) - except: - pass + try: + request_metrics.stop(self.clock, request, servlet_classname) + except: + pass - return + return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. raise UnrecognizedRequestError() From 27e0178da9e5c61c5221faf4a7ad1b5be5684531 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 19 Aug 2016 18:49:37 +0100 Subject: [PATCH 10/13] Add a top level measure --- synapse/http/server.py | 89 +++++++++++++++++++++--------------------- 1 file changed, 45 insertions(+), 44 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 74840de55c..2cba13e69f 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -104,54 +104,55 @@ def wrap_request_handler(request_handler, report_metrics): _next_request_id += 1 with LoggingContext(request_id) as request_context: - if report_metrics: - request_metrics = RequestMetrics() - request_metrics.start(self.clock) + with Measure(self.clock, "wrapped_request_handler"): + if report_metrics: + request_metrics = RequestMetrics() + request_metrics.start(self.clock) - request_context.request = request_id - with request.processing(): - try: - with PreserveLoggingContext(request_context): - yield request_handler(self, request) - except CodeMessageException as e: - code = e.code - if isinstance(e, SynapseError): - logger.info( - "%s SynapseError: %s - %s", request, code, e.msg - ) - else: - logger.exception(e) - outgoing_responses_counter.inc(request.method, str(code)) - respond_with_json( - request, code, cs_exception(e), send_cors=True, - pretty_print=_request_user_agent_is_curl(request), - version_string=self.version_string, - ) - except: - logger.exception( - "Failed handle request %s.%s on %r: %r", - request_handler.__module__, - request_handler.__name__, - self, - request - ) - respond_with_json( - request, - 500, - { - "error": "Internal server error", - "errcode": Codes.UNKNOWN, - }, - send_cors=True - ) - finally: + request_context.request = request_id + with request.processing(): try: - if report_metrics: - request_metrics.stop( - self.clock, request, self.__class__.__name__ + with PreserveLoggingContext(request_context): + yield request_handler(self, request) + except CodeMessageException as e: + code = e.code + if isinstance(e, SynapseError): + logger.info( + "%s SynapseError: %s - %s", request, code, e.msg ) + else: + logger.exception(e) + outgoing_responses_counter.inc(request.method, str(code)) + respond_with_json( + request, code, cs_exception(e), send_cors=True, + pretty_print=_request_user_agent_is_curl(request), + version_string=self.version_string, + ) except: - pass + logger.exception( + "Failed handle request %s.%s on %r: %r", + request_handler.__module__, + request_handler.__name__, + self, + request + ) + respond_with_json( + request, + 500, + { + "error": "Internal server error", + "errcode": Codes.UNKNOWN, + }, + send_cors=True + ) + finally: + try: + if report_metrics: + request_metrics.stop( + self.clock, request, self.__class__.__name__ + ) + except: + pass return wrapped_request_handler From a093fab253ca0e67d9ae861e5b8cf393ea7783f9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 22 Aug 2016 10:18:12 +0100 Subject: [PATCH 11/13] Use top level measure --- synapse/http/server.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 2cba13e69f..d2efd9f3e4 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -222,7 +222,7 @@ class JsonResource(HttpServer, resource.Resource): # It does its own metric reporting because _async_render dispatches to # a callback and it's the class name of that callback we want to report # against rather than the JsonResource itself. - @request_handler(report_metrics=False) + @request_handler() @defer.inlineCallbacks def _async_render(self, request): """ This gets called from render() every time someone sends us a request. @@ -233,9 +233,6 @@ class JsonResource(HttpServer, resource.Resource): self._send_response(request, 200, {}) return - request_metrics = RequestMetrics() - request_metrics.start(self.clock) - # Loop through all the registered callbacks to check if the method # and path regex match for path_entry in self.path_regexs.get(request.method, []): @@ -249,12 +246,6 @@ class JsonResource(HttpServer, resource.Resource): callback = path_entry.callback - servlet_instance = getattr(callback, "__self__", None) - if servlet_instance is not None: - servlet_classname = servlet_instance.__class__.__name__ - else: - servlet_classname = "%r" % callback - kwargs = intern_dict({ name: urllib.unquote(value).decode("UTF-8") if value else value for name, value in m.groupdict().items() @@ -265,11 +256,6 @@ class JsonResource(HttpServer, resource.Resource): code, response = callback_return self._send_response(request, code, response) - try: - request_metrics.stop(self.clock, request, servlet_classname) - except: - pass - return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. From 8c52160b07968ed1070452d902e758b01e276bbd Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 22 Aug 2016 10:44:45 +0100 Subject: [PATCH 12/13] Allow request handlers to override metric name --- synapse/http/server.py | 52 +++++++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 21 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index d2efd9f3e4..db2127cf8f 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -75,12 +75,12 @@ response_db_txn_duration = metrics.register_distribution( _next_request_id = 0 -def request_handler(report_metrics=True): +def request_handler(include_metrics=False): """Decorator for ``wrap_request_handler``""" - return lambda request_handler: wrap_request_handler(request_handler, report_metrics) + return lambda request_handler: wrap_request_handler(request_handler, include_metrics) -def wrap_request_handler(request_handler, report_metrics): +def wrap_request_handler(request_handler, include_metrics=False): """Wraps a method that acts as a request handler with the necessary logging and exception handling. @@ -105,15 +105,17 @@ def wrap_request_handler(request_handler, report_metrics): with LoggingContext(request_id) as request_context: with Measure(self.clock, "wrapped_request_handler"): - if report_metrics: - request_metrics = RequestMetrics() - request_metrics.start(self.clock) + request_metrics = RequestMetrics() + request_metrics.start(self.clock, name=self.__class__.__name__) request_context.request = request_id with request.processing(): try: with PreserveLoggingContext(request_context): - yield request_handler(self, request) + if include_metrics: + yield request_handler(self, request, request_metrics) + else: + yield request_handler(self, request) except CodeMessageException as e: code = e.code if isinstance(e, SynapseError): @@ -147,10 +149,9 @@ def wrap_request_handler(request_handler, report_metrics): ) finally: try: - if report_metrics: - request_metrics.stop( - self.clock, request, self.__class__.__name__ - ) + request_metrics.stop( + self.clock, request, self.__class__.__name__ + ) except: pass return wrapped_request_handler @@ -222,9 +223,9 @@ class JsonResource(HttpServer, resource.Resource): # It does its own metric reporting because _async_render dispatches to # a callback and it's the class name of that callback we want to report # against rather than the JsonResource itself. - @request_handler() + @request_handler(include_metrics=True) @defer.inlineCallbacks - def _async_render(self, request): + def _async_render(self, request, request_metrics): """ This gets called from render() every time someone sends us a request. This checks if anyone has registered a callback for that method and path. @@ -256,6 +257,14 @@ class JsonResource(HttpServer, resource.Resource): code, response = callback_return self._send_response(request, code, response) + servlet_instance = getattr(callback, "__self__", None) + if servlet_instance is not None: + servlet_classname = servlet_instance.__class__.__name__ + else: + servlet_classname = "%r" % callback + + request_metrics.name = servlet_classname + return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. @@ -286,11 +295,12 @@ class JsonResource(HttpServer, resource.Resource): class RequestMetrics(object): - def start(self, clock): + def start(self, clock, name): self.start = clock.time_msec() self.start_context = LoggingContext.current_context() + self.name = name - def stop(self, clock, request, servlet_classname): + def stop(self, clock, request): context = LoggingContext.current_context() tag = "" @@ -304,26 +314,26 @@ class RequestMetrics(object): ) return - incoming_requests_counter.inc(request.method, servlet_classname, tag) + incoming_requests_counter.inc(request.method, self.name, tag) response_timer.inc_by( clock.time_msec() - self.start, request.method, - servlet_classname, tag + self.name, tag ) ru_utime, ru_stime = context.get_resource_usage() response_ru_utime.inc_by( - ru_utime, request.method, servlet_classname, tag + ru_utime, request.method, self.name, tag ) response_ru_stime.inc_by( - ru_stime, request.method, servlet_classname, tag + ru_stime, request.method, self.name, tag ) response_db_txn_count.inc_by( - context.db_txn_count, request.method, servlet_classname, tag + context.db_txn_count, request.method, self.name, tag ) response_db_txn_duration.inc_by( - context.db_txn_duration, request.method, servlet_classname, tag + context.db_txn_duration, request.method, self.name, tag ) From 33f3624ff76dc71e73c997ffef0059b1d004a5e1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 22 Aug 2016 10:49:31 +0100 Subject: [PATCH 13/13] Add exception logging. Fix typo --- synapse/http/server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index db2127cf8f..168e53ce0c 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -150,10 +150,10 @@ def wrap_request_handler(request_handler, include_metrics=False): finally: try: request_metrics.stop( - self.clock, request, self.__class__.__name__ + self.clock, request ) - except: - pass + except Exception as e: + logger.warn("Failed to stop metrics: %r", e) return wrapped_request_handler