Merge pull request #4075 from matrix-org/rav/fix_pusher_logcontexts

Clean up the way logcontexts and threads work in the pushers
This commit is contained in:
Richard van der Hoff 2018-10-24 09:53:57 +01:00 committed by GitHub
commit e0b9d5f0af
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 142 additions and 141 deletions

1
changelog.d/4075.misc Normal file
View file

@ -0,0 +1 @@
Clean up threading and logcontexts in pushers

View file

@ -161,11 +161,11 @@ class PusherReplicationHandler(ReplicationClientHandler):
else: else:
yield self.start_pusher(row.user_id, row.app_id, row.pushkey) yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
elif stream_name == "events": elif stream_name == "events":
self.pusher_pool.on_new_notifications( yield self.pusher_pool.on_new_notifications(
token, token, token, token,
) )
elif stream_name == "receipts": elif stream_name == "receipts":
self.pusher_pool.on_new_receipts( yield self.pusher_pool.on_new_receipts(
token, token, set(row.room_id for row in rows) token, token, set(row.room_id for row in rows)
) )
except Exception: except Exception:
@ -183,7 +183,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
def start_pusher(self, user_id, app_id, pushkey): def start_pusher(self, user_id, app_id, pushkey):
key = "%s:%s" % (app_id, pushkey) key = "%s:%s" % (app_id, pushkey)
logger.info("Starting pusher %r / %r", user_id, key) logger.info("Starting pusher %r / %r", user_id, key)
return self.pusher_pool._refresh_pusher(app_id, pushkey, user_id) return self.pusher_pool.start_pusher_by_id(app_id, pushkey, user_id)
def start(config_options): def start(config_options):

View file

@ -2520,7 +2520,7 @@ class FederationHandler(BaseHandler):
if not backfilled: # Never notify for backfilled events if not backfilled: # Never notify for backfilled events
for event, _ in event_and_contexts: for event, _ in event_and_contexts:
self._notify_persisted_event(event, max_stream_id) yield self._notify_persisted_event(event, max_stream_id)
def _notify_persisted_event(self, event, max_stream_id): def _notify_persisted_event(self, event, max_stream_id):
"""Checks to see if notifier/pushers should be notified about the """Checks to see if notifier/pushers should be notified about the
@ -2553,7 +2553,7 @@ class FederationHandler(BaseHandler):
extra_users=extra_users extra_users=extra_users
) )
self.pusher_pool.on_new_notifications( return self.pusher_pool.on_new_notifications(
event_stream_id, max_stream_id, event_stream_id, max_stream_id,
) )

View file

@ -779,7 +779,7 @@ class EventCreationHandler(object):
event, context=context event, context=context
) )
self.pusher_pool.on_new_notifications( yield self.pusher_pool.on_new_notifications(
event_stream_id, max_stream_id, event_stream_id, max_stream_id,
) )

View file

@ -119,7 +119,7 @@ class ReceiptsHandler(BaseHandler):
"receipt_key", max_batch_id, rooms=affected_room_ids "receipt_key", max_batch_id, rooms=affected_room_ids
) )
# Note that the min here shouldn't be relied upon to be accurate. # Note that the min here shouldn't be relied upon to be accurate.
self.hs.get_pusherpool().on_new_receipts( yield self.hs.get_pusherpool().on_new_receipts(
min_batch_id, max_batch_id, affected_room_ids, min_batch_id, max_batch_id, affected_room_ids,
) )

View file

@ -18,8 +18,7 @@ import logging
from twisted.internet import defer from twisted.internet import defer
from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.internet.error import AlreadyCalled, AlreadyCancelled
from synapse.util.logcontext import LoggingContext from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -71,18 +70,11 @@ class EmailPusher(object):
# See httppusher # See httppusher
self.max_stream_ordering = None self.max_stream_ordering = None
self.processing = False self._is_processing = False
@defer.inlineCallbacks
def on_started(self): def on_started(self):
if self.mailer is not None: if self.mailer is not None:
try: self._start_processing()
self.throttle_params = yield self.store.get_throttle_params_by_room(
self.pusher_id
)
yield self._process()
except Exception:
logger.exception("Error starting email pusher")
def on_stop(self): def on_stop(self):
if self.timed_call: if self.timed_call:
@ -92,31 +84,40 @@ class EmailPusher(object):
pass pass
self.timed_call = None self.timed_call = None
@defer.inlineCallbacks
def on_new_notifications(self, min_stream_ordering, max_stream_ordering): def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering) self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
yield self._process() self._start_processing()
def on_new_receipts(self, min_stream_id, max_stream_id): def on_new_receipts(self, min_stream_id, max_stream_id):
# We could wake up and cancel the timer but there tend to be quite a # We could wake up and cancel the timer but there tend to be quite a
# lot of read receipts so it's probably less work to just let the # lot of read receipts so it's probably less work to just let the
# timer fire # timer fire
return defer.succeed(None) pass
@defer.inlineCallbacks
def on_timer(self): def on_timer(self):
self.timed_call = None self.timed_call = None
yield self._process() self._start_processing()
def _start_processing(self):
if self._is_processing:
return
run_as_background_process("emailpush.process", self._process)
@defer.inlineCallbacks @defer.inlineCallbacks
def _process(self): def _process(self):
if self.processing: # we should never get here if we are already processing
return assert not self._is_processing
with LoggingContext("emailpush._process"):
with Measure(self.clock, "emailpush._process"):
try: try:
self.processing = True self._is_processing = True
if self.throttle_params is None:
# this is our first loop: load up the throttle params
self.throttle_params = yield self.store.get_throttle_params_by_room(
self.pusher_id
)
# if the max ordering changes while we're running _unsafe_process, # if the max ordering changes while we're running _unsafe_process,
# call it again, and so on until we've caught up. # call it again, and so on until we've caught up.
while True: while True:
@ -128,7 +129,7 @@ class EmailPusher(object):
if self.max_stream_ordering == starting_max_ordering: if self.max_stream_ordering == starting_max_ordering:
break break
finally: finally:
self.processing = False self._is_processing = False
@defer.inlineCallbacks @defer.inlineCallbacks
def _unsafe_process(self): def _unsafe_process(self):

View file

@ -22,9 +22,8 @@ from prometheus_client import Counter
from twisted.internet import defer from twisted.internet import defer
from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.internet.error import AlreadyCalled, AlreadyCancelled
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.push import PusherConfigException from synapse.push import PusherConfigException
from synapse.util.logcontext import LoggingContext
from synapse.util.metrics import Measure
from . import push_rule_evaluator, push_tools from . import push_rule_evaluator, push_tools
@ -61,7 +60,7 @@ class HttpPusher(object):
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
self.failing_since = pusherdict['failing_since'] self.failing_since = pusherdict['failing_since']
self.timed_call = None self.timed_call = None
self.processing = False self._is_processing = False
# This is the highest stream ordering we know it's safe to process. # This is the highest stream ordering we know it's safe to process.
# When new events arrive, we'll be given a window of new events: we # When new events arrive, we'll be given a window of new events: we
@ -92,34 +91,27 @@ class HttpPusher(object):
self.data_minus_url.update(self.data) self.data_minus_url.update(self.data)
del self.data_minus_url['url'] del self.data_minus_url['url']
@defer.inlineCallbacks
def on_started(self): def on_started(self):
try: self._start_processing()
yield self._process()
except Exception:
logger.exception("Error starting http pusher")
@defer.inlineCallbacks
def on_new_notifications(self, min_stream_ordering, max_stream_ordering): def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering or 0) self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering or 0)
yield self._process() self._start_processing()
@defer.inlineCallbacks
def on_new_receipts(self, min_stream_id, max_stream_id): def on_new_receipts(self, min_stream_id, max_stream_id):
# Note that the min here shouldn't be relied upon to be accurate. # Note that the min here shouldn't be relied upon to be accurate.
# We could check the receipts are actually m.read receipts here, # We could check the receipts are actually m.read receipts here,
# but currently that's the only type of receipt anyway... # but currently that's the only type of receipt anyway...
with LoggingContext("push.on_new_receipts"): run_as_background_process("http_pusher.on_new_receipts", self._update_badge)
with Measure(self.clock, "push.on_new_receipts"):
badge = yield push_tools.get_badge_count(
self.hs.get_datastore(), self.user_id
)
yield self._send_badge(badge)
@defer.inlineCallbacks @defer.inlineCallbacks
def _update_badge(self):
badge = yield push_tools.get_badge_count(self.hs.get_datastore(), self.user_id)
yield self._send_badge(badge)
def on_timer(self): def on_timer(self):
yield self._process() self._start_processing()
def on_stop(self): def on_stop(self):
if self.timed_call: if self.timed_call:
@ -129,15 +121,19 @@ class HttpPusher(object):
pass pass
self.timed_call = None self.timed_call = None
@defer.inlineCallbacks def _start_processing(self):
def _process(self): if self._is_processing:
if self.processing:
return return
with LoggingContext("push._process"): run_as_background_process("httppush.process", self._process)
with Measure(self.clock, "push._process"):
@defer.inlineCallbacks
def _process(self):
# we should never get here if we are already processing
assert not self._is_processing
try: try:
self.processing = True self._is_processing = True
# if the max ordering changes while we're running _unsafe_process, # if the max ordering changes while we're running _unsafe_process,
# call it again, and so on until we've caught up. # call it again, and so on until we've caught up.
while True: while True:
@ -149,7 +145,7 @@ class HttpPusher(object):
if self.max_stream_ordering == starting_max_ordering: if self.max_stream_ordering == starting_max_ordering:
break break
finally: finally:
self.processing = False self._is_processing = False
@defer.inlineCallbacks @defer.inlineCallbacks
def _unsafe_process(self): def _unsafe_process(self):

View file

@ -20,24 +20,39 @@ from twisted.internet import defer
from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.push.pusher import PusherFactory from synapse.push.pusher import PusherFactory
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class PusherPool: class PusherPool:
"""
The pusher pool. This is responsible for dispatching notifications of new events to
the http and email pushers.
It provides three methods which are designed to be called by the rest of the
application: `start`, `on_new_notifications`, and `on_new_receipts`: each of these
delegates to each of the relevant pushers.
Note that it is expected that each pusher will have its own 'processing' loop which
will send out the notifications in the background, rather than blocking until the
notifications are sent; accordingly Pusher.on_started, Pusher.on_new_notifications and
Pusher.on_new_receipts are not expected to return deferreds.
"""
def __init__(self, _hs): def __init__(self, _hs):
self.hs = _hs self.hs = _hs
self.pusher_factory = PusherFactory(_hs) self.pusher_factory = PusherFactory(_hs)
self.start_pushers = _hs.config.start_pushers self._should_start_pushers = _hs.config.start_pushers
self.store = self.hs.get_datastore() self.store = self.hs.get_datastore()
self.clock = self.hs.get_clock() self.clock = self.hs.get_clock()
self.pushers = {} self.pushers = {}
@defer.inlineCallbacks
def start(self): def start(self):
pushers = yield self.store.get_all_pushers() """Starts the pushers off in a background process.
self._start_pushers(pushers) """
if not self._should_start_pushers:
logger.info("Not starting pushers because they are disabled in the config")
return
run_as_background_process("start_pushers", self._start_pushers)
@defer.inlineCallbacks @defer.inlineCallbacks
def add_pusher(self, user_id, access_token, kind, app_id, def add_pusher(self, user_id, access_token, kind, app_id,
@ -86,7 +101,7 @@ class PusherPool:
last_stream_ordering=last_stream_ordering, last_stream_ordering=last_stream_ordering,
profile_tag=profile_tag, profile_tag=profile_tag,
) )
yield self._refresh_pusher(app_id, pushkey, user_id) yield self.start_pusher_by_id(app_id, pushkey, user_id)
@defer.inlineCallbacks @defer.inlineCallbacks
def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey,
@ -123,45 +138,23 @@ class PusherPool:
p['app_id'], p['pushkey'], p['user_name'], p['app_id'], p['pushkey'], p['user_name'],
) )
def on_new_notifications(self, min_stream_id, max_stream_id):
run_as_background_process(
"on_new_notifications",
self._on_new_notifications, min_stream_id, max_stream_id,
)
@defer.inlineCallbacks @defer.inlineCallbacks
def _on_new_notifications(self, min_stream_id, max_stream_id): def on_new_notifications(self, min_stream_id, max_stream_id):
try: try:
users_affected = yield self.store.get_push_action_users_in_range( users_affected = yield self.store.get_push_action_users_in_range(
min_stream_id, max_stream_id min_stream_id, max_stream_id
) )
deferreds = []
for u in users_affected: for u in users_affected:
if u in self.pushers: if u in self.pushers:
for p in self.pushers[u].values(): for p in self.pushers[u].values():
deferreds.append( p.on_new_notifications(min_stream_id, max_stream_id)
run_in_background(
p.on_new_notifications,
min_stream_id, max_stream_id,
)
)
yield make_deferred_yieldable(
defer.gatherResults(deferreds, consumeErrors=True),
)
except Exception: except Exception:
logger.exception("Exception in pusher on_new_notifications") logger.exception("Exception in pusher on_new_notifications")
def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
run_as_background_process(
"on_new_receipts",
self._on_new_receipts, min_stream_id, max_stream_id, affected_room_ids,
)
@defer.inlineCallbacks @defer.inlineCallbacks
def _on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids):
try: try:
# Need to subtract 1 from the minimum because the lower bound here # Need to subtract 1 from the minimum because the lower bound here
# is not inclusive # is not inclusive
@ -171,26 +164,20 @@ class PusherPool:
# This returns a tuple, user_id is at index 3 # This returns a tuple, user_id is at index 3
users_affected = set([r[3] for r in updated_receipts]) users_affected = set([r[3] for r in updated_receipts])
deferreds = []
for u in users_affected: for u in users_affected:
if u in self.pushers: if u in self.pushers:
for p in self.pushers[u].values(): for p in self.pushers[u].values():
deferreds.append( p.on_new_receipts(min_stream_id, max_stream_id)
run_in_background(
p.on_new_receipts,
min_stream_id, max_stream_id,
)
)
yield make_deferred_yieldable(
defer.gatherResults(deferreds, consumeErrors=True),
)
except Exception: except Exception:
logger.exception("Exception in pusher on_new_receipts") logger.exception("Exception in pusher on_new_receipts")
@defer.inlineCallbacks @defer.inlineCallbacks
def _refresh_pusher(self, app_id, pushkey, user_id): def start_pusher_by_id(self, app_id, pushkey, user_id):
"""Look up the details for the given pusher, and start it"""
if not self._should_start_pushers:
return
resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( resultlist = yield self.store.get_pushers_by_app_id_and_pushkey(
app_id, pushkey app_id, pushkey
) )
@ -201,21 +188,39 @@ class PusherPool:
p = r p = r
if p: if p:
self._start_pusher(p)
self._start_pushers([p]) @defer.inlineCallbacks
def _start_pushers(self):
"""Start all the pushers
def _start_pushers(self, pushers): Returns:
if not self.start_pushers: Deferred
logger.info("Not starting pushers because they are disabled in the config") """
return pushers = yield self.store.get_all_pushers()
logger.info("Starting %d pushers", len(pushers)) logger.info("Starting %d pushers", len(pushers))
for pusherdict in pushers: for pusherdict in pushers:
self._start_pusher(pusherdict)
logger.info("Started pushers")
def _start_pusher(self, pusherdict):
"""Start the given pusher
Args:
pusherdict (dict):
Returns:
None
"""
try: try:
p = self.pusher_factory.create_pusher(pusherdict) p = self.pusher_factory.create_pusher(pusherdict)
except Exception: except Exception:
logger.exception("Couldn't start a pusher: caught Exception") logger.exception("Couldn't start a pusher: caught Exception")
continue return
if p:
if not p:
return
appid_pushkey = "%s:%s" % ( appid_pushkey = "%s:%s" % (
pusherdict['app_id'], pusherdict['app_id'],
pusherdict['pushkey'], pusherdict['pushkey'],
@ -225,9 +230,7 @@ class PusherPool:
if appid_pushkey in byuser: if appid_pushkey in byuser:
byuser[appid_pushkey].on_stop() byuser[appid_pushkey].on_stop()
byuser[appid_pushkey] = p byuser[appid_pushkey] = p
run_in_background(p.on_started) p.on_started()
logger.info("Started pushers")
@defer.inlineCallbacks @defer.inlineCallbacks
def remove_pusher(self, app_id, pushkey, user_id): def remove_pusher(self, app_id, pushkey, user_id):