Update black to 19.10b0 (#6304)

* update version of black and also fix the mypy config being overridden
This commit is contained in:
Amber Brown 2019-11-01 02:43:24 +11:00 committed by GitHub
parent dfe0cd71b6
commit 020add5099
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
41 changed files with 191 additions and 166 deletions

1
changelog.d/6304.misc Normal file
View file

@ -0,0 +1 @@
Update the version of black used to 19.10b0.

View file

@ -78,7 +78,7 @@ class InputOutput(object):
m = re.match("^join (\S+)$", line) m = re.match("^join (\S+)$", line)
if m: if m:
# The `sender` wants to join a room. # The `sender` wants to join a room.
room_name, = m.groups() (room_name,) = m.groups()
self.print_line("%s joining %s" % (self.user, room_name)) self.print_line("%s joining %s" % (self.user, room_name))
self.server.join_room(room_name, self.user, self.user) self.server.join_room(room_name, self.user, self.user)
# self.print_line("OK.") # self.print_line("OK.")
@ -105,7 +105,7 @@ class InputOutput(object):
m = re.match("^backfill (\S+)$", line) m = re.match("^backfill (\S+)$", line)
if m: if m:
# we want to backfill a room # we want to backfill a room
room_name, = m.groups() (room_name,) = m.groups()
self.print_line("backfill %s" % room_name) self.print_line("backfill %s" % room_name)
self.server.backfill(room_name) self.server.backfill(room_name)
return return

View file

@ -1,8 +1,11 @@
[mypy] [mypy]
namespace_packages=True namespace_packages = True
plugins=mypy_zope:plugin plugins = mypy_zope:plugin
follow_imports=skip follow_imports = normal
mypy_path=stubs check_untyped_defs = True
show_error_codes = True
show_traceback = True
mypy_path = stubs
[mypy-zope] [mypy-zope]
ignore_missing_imports = True ignore_missing_imports = True

View file

@ -192,15 +192,16 @@ class PerDestinationQueue(object):
# We have to keep 2 free slots for presence and rr_edus # We have to keep 2 free slots for presence and rr_edus
limit = MAX_EDUS_PER_TRANSACTION - 2 limit = MAX_EDUS_PER_TRANSACTION - 2
device_update_edus, dev_list_id = ( device_update_edus, dev_list_id = yield self._get_device_update_edus(
yield self._get_device_update_edus(limit) limit
) )
limit -= len(device_update_edus) limit -= len(device_update_edus)
to_device_edus, device_stream_id = ( (
yield self._get_to_device_message_edus(limit) to_device_edus,
) device_stream_id,
) = yield self._get_to_device_message_edus(limit)
pending_edus = device_update_edus + to_device_edus pending_edus = device_update_edus + to_device_edus

View file

@ -38,9 +38,10 @@ class AccountDataEventSource(object):
{"type": "m.tag", "content": {"tags": room_tags}, "room_id": room_id} {"type": "m.tag", "content": {"tags": room_tags}, "room_id": room_id}
) )
account_data, room_account_data = ( (
yield self.store.get_updated_account_data_for_user(user_id, last_stream_id) account_data,
) room_account_data,
) = yield self.store.get_updated_account_data_for_user(user_id, last_stream_id)
for account_data_type, content in account_data.items(): for account_data_type, content in account_data.items():
results.append({"type": account_data_type, "content": content}) results.append({"type": account_data_type, "content": content})

View file

@ -73,7 +73,10 @@ class ApplicationServicesHandler(object):
try: try:
limit = 100 limit = 100
while True: while True:
upper_bound, events = yield self.store.get_new_events_for_appservice( (
upper_bound,
events,
) = yield self.store.get_new_events_for_appservice(
self.current_max, limit self.current_max, limit
) )

View file

@ -119,9 +119,10 @@ class E2eKeysHandler(object):
else: else:
query_list.append((user_id, None)) query_list.append((user_id, None))
user_ids_not_in_cache, remote_results = ( (
yield self.store.get_user_devices_from_cache(query_list) user_ids_not_in_cache,
) remote_results,
) = yield self.store.get_user_devices_from_cache(query_list)
for user_id, devices in iteritems(remote_results): for user_id, devices in iteritems(remote_results):
user_devices = results.setdefault(user_id, {}) user_devices = results.setdefault(user_id, {})
for device_id, device in iteritems(devices): for device_id, device in iteritems(devices):
@ -688,17 +689,21 @@ class E2eKeysHandler(object):
try: try:
# get our self-signing key to verify the signatures # get our self-signing key to verify the signatures
_, self_signing_key_id, self_signing_verify_key = yield self._get_e2e_cross_signing_verify_key( (
user_id, "self_signing" _,
) self_signing_key_id,
self_signing_verify_key,
) = yield self._get_e2e_cross_signing_verify_key(user_id, "self_signing")
# get our master key, since we may have received a signature of it. # get our master key, since we may have received a signature of it.
# We need to fetch it here so that we know what its key ID is, so # We need to fetch it here so that we know what its key ID is, so
# that we can check if a signature that was sent is a signature of # that we can check if a signature that was sent is a signature of
# the master key or of a device # the master key or of a device
master_key, _, master_verify_key = yield self._get_e2e_cross_signing_verify_key( (
user_id, "master" master_key,
) _,
master_verify_key,
) = yield self._get_e2e_cross_signing_verify_key(user_id, "master")
# fetch our stored devices. This is used to 1. verify # fetch our stored devices. This is used to 1. verify
# signatures on the master key, and 2. to compare with what # signatures on the master key, and 2. to compare with what
@ -838,9 +843,11 @@ class E2eKeysHandler(object):
try: try:
# get our user-signing key to verify the signatures # get our user-signing key to verify the signatures
user_signing_key, user_signing_key_id, user_signing_verify_key = yield self._get_e2e_cross_signing_verify_key( (
user_id, "user_signing" user_signing_key,
) user_signing_key_id,
user_signing_verify_key,
) = yield self._get_e2e_cross_signing_verify_key(user_id, "user_signing")
except SynapseError as e: except SynapseError as e:
failure = _exception_to_failure(e) failure = _exception_to_failure(e)
for user, devicemap in signatures.items(): for user, devicemap in signatures.items():
@ -859,7 +866,11 @@ class E2eKeysHandler(object):
try: try:
# get the target user's master key, to make sure it matches # get the target user's master key, to make sure it matches
# what was sent # what was sent
master_key, master_key_id, _ = yield self._get_e2e_cross_signing_verify_key( (
master_key,
master_key_id,
_,
) = yield self._get_e2e_cross_signing_verify_key(
target_user, "master", user_id target_user, "master", user_id
) )

View file

@ -352,11 +352,12 @@ class FederationHandler(BaseHandler):
# note that if any of the missing prevs share missing state or # note that if any of the missing prevs share missing state or
# auth events, the requests to fetch those events are deduped # auth events, the requests to fetch those events are deduped
# by the get_pdu_cache in federation_client. # by the get_pdu_cache in federation_client.
remote_state, got_auth_chain = ( (
yield self.federation_client.get_state_for_room( remote_state,
got_auth_chain,
) = yield self.federation_client.get_state_for_room(
origin, room_id, p origin, room_id, p
) )
)
# we want the state *after* p; get_state_for_room returns the # we want the state *after* p; get_state_for_room returns the
# state *before* p. # state *before* p.

View file

@ -128,8 +128,8 @@ class InitialSyncHandler(BaseHandler):
tags_by_room = yield self.store.get_tags_for_user(user_id) tags_by_room = yield self.store.get_tags_for_user(user_id)
account_data, account_data_by_room = ( account_data, account_data_by_room = yield self.store.get_account_data_for_user(
yield self.store.get_account_data_for_user(user_id) user_id
) )
public_room_ids = yield self.store.get_public_room_ids() public_room_ids = yield self.store.get_public_room_ids()

View file

@ -76,9 +76,10 @@ class MessageHandler(object):
Raises: Raises:
SynapseError if something went wrong. SynapseError if something went wrong.
""" """
membership, membership_event_id = yield self.auth.check_in_room_or_world_readable( (
room_id, user_id membership,
) membership_event_id,
) = yield self.auth.check_in_room_or_world_readable(room_id, user_id)
if membership == Membership.JOIN: if membership == Membership.JOIN:
data = yield self.state.get_current_state(room_id, event_type, state_key) data = yield self.state.get_current_state(room_id, event_type, state_key)
@ -153,9 +154,10 @@ class MessageHandler(object):
% (user_id, room_id, at_token), % (user_id, room_id, at_token),
) )
else: else:
membership, membership_event_id = ( (
yield self.auth.check_in_room_or_world_readable(room_id, user_id) membership,
) membership_event_id,
) = yield self.auth.check_in_room_or_world_readable(room_id, user_id)
if membership == Membership.JOIN: if membership == Membership.JOIN:
state_ids = yield self.store.get_filtered_current_state_ids( state_ids = yield self.store.get_filtered_current_state_ids(

View file

@ -212,9 +212,10 @@ class PaginationHandler(object):
source_config = pagin_config.get_source_config("room") source_config = pagin_config.get_source_config("room")
with (yield self.pagination_lock.read(room_id)): with (yield self.pagination_lock.read(room_id)):
membership, member_event_id = yield self.auth.check_in_room_or_world_readable( (
room_id, user_id membership,
) member_event_id,
) = yield self.auth.check_in_room_or_world_readable(room_id, user_id)
if source_config.direction == "b": if source_config.direction == "b":
# if we're going backwards, we might need to backfill. This # if we're going backwards, we might need to backfill. This
@ -297,10 +298,8 @@ class PaginationHandler(object):
} }
if state: if state:
chunk["state"] = ( chunk["state"] = yield self._event_serializer.serialize_events(
yield self._event_serializer.serialize_events(
state, time_now, as_client_event=as_client_event state, time_now, as_client_event=as_client_event
) )
)
return chunk return chunk

View file

@ -396,8 +396,8 @@ class RegistrationHandler(BaseHandler):
room_id = room_identifier room_id = room_identifier
elif RoomAlias.is_valid(room_identifier): elif RoomAlias.is_valid(room_identifier):
room_alias = RoomAlias.from_string(room_identifier) room_alias = RoomAlias.from_string(room_identifier)
room_id, remote_room_hosts = ( room_id, remote_room_hosts = yield room_member_handler.lookup_room_alias(
yield room_member_handler.lookup_room_alias(room_alias) room_alias
) )
room_id = room_id.to_string() room_id = room_id.to_string()
else: else:

View file

@ -147,8 +147,10 @@ class RoomCreationHandler(BaseHandler):
# we create and auth the tombstone event before properly creating the new # we create and auth the tombstone event before properly creating the new
# room, to check our user has perms in the old room. # room, to check our user has perms in the old room.
tombstone_event, tombstone_context = ( (
yield self.event_creation_handler.create_event( tombstone_event,
tombstone_context,
) = yield self.event_creation_handler.create_event(
requester, requester,
{ {
"type": EventTypes.Tombstone, "type": EventTypes.Tombstone,
@ -162,7 +164,6 @@ class RoomCreationHandler(BaseHandler):
}, },
token_id=requester.access_token_id, token_id=requester.access_token_id,
) )
)
old_room_version = yield self.store.get_room_version(old_room_id) old_room_version = yield self.store.get_room_version(old_room_id)
yield self.auth.check_from_context( yield self.auth.check_from_context(
old_room_version, tombstone_event, tombstone_context old_room_version, tombstone_event, tombstone_context

View file

@ -759,8 +759,12 @@ class RoomMemberHandler(object):
if room_avatar_event: if room_avatar_event:
room_avatar_url = room_avatar_event.content.get("url", "") room_avatar_url = room_avatar_event.content.get("url", "")
token, public_keys, fallback_public_key, display_name = ( (
yield self.identity_handler.ask_id_server_for_third_party_invite( token,
public_keys,
fallback_public_key,
display_name,
) = yield self.identity_handler.ask_id_server_for_third_party_invite(
requester=requester, requester=requester,
id_server=id_server, id_server=id_server,
medium=medium, medium=medium,
@ -775,7 +779,6 @@ class RoomMemberHandler(object):
inviter_avatar_url=inviter_avatar_url, inviter_avatar_url=inviter_avatar_url,
id_access_token=id_access_token, id_access_token=id_access_token,
) )
)
yield self.event_creation_handler.create_and_send_nonmember_event( yield self.event_creation_handler.create_and_send_nonmember_event(
requester, requester,

View file

@ -396,16 +396,12 @@ class SearchHandler(BaseHandler):
time_now = self.clock.time_msec() time_now = self.clock.time_msec()
for context in contexts.values(): for context in contexts.values():
context["events_before"] = ( context["events_before"] = yield self._event_serializer.serialize_events(
yield self._event_serializer.serialize_events(
context["events_before"], time_now context["events_before"], time_now
) )
) context["events_after"] = yield self._event_serializer.serialize_events(
context["events_after"] = (
yield self._event_serializer.serialize_events(
context["events_after"], time_now context["events_after"], time_now
) )
)
state_results = {} state_results = {}
if include_state: if include_state:

View file

@ -108,7 +108,10 @@ class StatsHandler(StateDeltasHandler):
user_deltas = {} user_deltas = {}
# Then count deltas for total_events and total_event_bytes. # Then count deltas for total_events and total_event_bytes.
room_count, user_count = yield self.store.get_changes_room_total_events_and_bytes( (
room_count,
user_count,
) = yield self.store.get_changes_room_total_events_and_bytes(
self.pos, max_pos self.pos, max_pos
) )

View file

@ -1206,11 +1206,12 @@ class SyncHandler(object):
since_token = sync_result_builder.since_token since_token = sync_result_builder.since_token
if since_token and not sync_result_builder.full_state: if since_token and not sync_result_builder.full_state:
account_data, account_data_by_room = ( (
yield self.store.get_updated_account_data_for_user( account_data,
account_data_by_room,
) = yield self.store.get_updated_account_data_for_user(
user_id, since_token.account_data_key user_id, since_token.account_data_key
) )
)
push_rules_changed = yield self.store.have_push_rules_changed_for_user( push_rules_changed = yield self.store.have_push_rules_changed_for_user(
user_id, int(since_token.push_rules_key) user_id, int(since_token.push_rules_key)
@ -1221,9 +1222,10 @@ class SyncHandler(object):
sync_config.user sync_config.user
) )
else: else:
account_data, account_data_by_room = ( (
yield self.store.get_account_data_for_user(sync_config.user.to_string()) account_data,
) account_data_by_room,
) = yield self.store.get_account_data_for_user(sync_config.user.to_string())
account_data["m.push_rules"] = yield self.push_rules_for_user( account_data["m.push_rules"] = yield self.push_rules_for_user(
sync_config.user sync_config.user

View file

@ -185,7 +185,7 @@ DEFAULT_LOGGERS = {"synapse": {"level": "INFO"}}
def parse_drain_configs( def parse_drain_configs(
drains: dict drains: dict,
) -> typing.Generator[DrainConfiguration, None, None]: ) -> typing.Generator[DrainConfiguration, None, None]:
""" """
Parse the drain configurations. Parse the drain configurations.

View file

@ -149,9 +149,10 @@ class BulkPushRuleEvaluator(object):
room_members = yield self.store.get_joined_users_from_context(event, context) room_members = yield self.store.get_joined_users_from_context(event, context)
(power_levels, sender_power_level) = ( (
yield self._get_power_levels_and_sender_level(event, context) power_levels,
) sender_power_level,
) = yield self._get_power_levels_and_sender_level(event, context)
evaluator = PushRuleEvaluatorForEvent( evaluator = PushRuleEvaluatorForEvent(
event, len(room_members), sender_power_level, power_levels event, len(room_members), sender_power_level, power_levels

View file

@ -234,15 +234,13 @@ class EmailPusher(object):
return return
self.last_stream_ordering = last_stream_ordering self.last_stream_ordering = last_stream_ordering
pusher_still_exists = ( pusher_still_exists = yield self.store.update_pusher_last_stream_ordering_and_success(
yield self.store.update_pusher_last_stream_ordering_and_success(
self.app_id, self.app_id,
self.email, self.email,
self.user_id, self.user_id,
last_stream_ordering, last_stream_ordering,
self.clock.time_msec(), self.clock.time_msec(),
) )
)
if not pusher_still_exists: if not pusher_still_exists:
# The pusher has been deleted while we were processing, so # The pusher has been deleted while we were processing, so
# lets just stop and return. # lets just stop and return.

View file

@ -211,15 +211,13 @@ class HttpPusher(object):
http_push_processed_counter.inc() http_push_processed_counter.inc()
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
self.last_stream_ordering = push_action["stream_ordering"] self.last_stream_ordering = push_action["stream_ordering"]
pusher_still_exists = ( pusher_still_exists = yield self.store.update_pusher_last_stream_ordering_and_success(
yield self.store.update_pusher_last_stream_ordering_and_success(
self.app_id, self.app_id,
self.pushkey, self.pushkey,
self.user_id, self.user_id,
self.last_stream_ordering, self.last_stream_ordering,
self.clock.time_msec(), self.clock.time_msec(),
) )
)
if not pusher_still_exists: if not pusher_still_exists:
# The pusher has been deleted while we were processing, so # The pusher has been deleted while we were processing, so
# lets just stop and return. # lets just stop and return.

View file

@ -103,9 +103,7 @@ class PusherPool:
# create the pusher setting last_stream_ordering to the current maximum # create the pusher setting last_stream_ordering to the current maximum
# stream ordering in event_push_actions, so it will process # stream ordering in event_push_actions, so it will process
# pushes from this point onwards. # pushes from this point onwards.
last_stream_ordering = ( last_stream_ordering = yield self.store.get_latest_push_action_stream_ordering()
yield self.store.get_latest_push_action_stream_ordering()
)
yield self.store.add_pusher( yield self.store.add_pusher(
user_id=user_id, user_id=user_id,

View file

@ -203,11 +203,12 @@ class LoginRestServlet(RestServlet):
address = address.lower() address = address.lower()
# Check for login providers that support 3pid login types # Check for login providers that support 3pid login types
canonical_user_id, callback_3pid = ( (
yield self.auth_handler.check_password_provider_3pid( canonical_user_id,
callback_3pid,
) = yield self.auth_handler.check_password_provider_3pid(
medium, address, login_submission["password"] medium, address, login_submission["password"]
) )
)
if canonical_user_id: if canonical_user_id:
# Authentication through password provider and 3pid succeeded # Authentication through password provider and 3pid succeeded
result = yield self._register_device_with_callback( result = yield self._register_device_with_callback(
@ -280,8 +281,8 @@ class LoginRestServlet(RestServlet):
def do_token_login(self, login_submission): def do_token_login(self, login_submission):
token = login_submission["token"] token = login_submission["token"]
auth_handler = self.auth_handler auth_handler = self.auth_handler
user_id = ( user_id = yield auth_handler.validate_short_term_login_token_and_get_user_id(
yield auth_handler.validate_short_term_login_token_and_get_user_id(token) token
) )
result = yield self._register_device_with_callback(user_id, login_submission) result = yield self._register_device_with_callback(user_id, login_submission)

View file

@ -148,7 +148,7 @@ class PasswordResetSubmitTokenServlet(RestServlet):
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.store = hs.get_datastore() self.store = hs.get_datastore()
if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL: if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.failure_email_template, = load_jinja2_templates( (self.failure_email_template,) = load_jinja2_templates(
self.config.email_template_dir, self.config.email_template_dir,
[self.config.email_password_reset_template_failure_html], [self.config.email_password_reset_template_failure_html],
) )
@ -479,7 +479,7 @@ class AddThreepidEmailSubmitTokenServlet(RestServlet):
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.store = hs.get_datastore() self.store = hs.get_datastore()
if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL: if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.failure_email_template, = load_jinja2_templates( (self.failure_email_template,) = load_jinja2_templates(
self.config.email_template_dir, self.config.email_template_dir,
[self.config.email_add_threepid_template_failure_html], [self.config.email_add_threepid_template_failure_html],
) )

View file

@ -247,13 +247,13 @@ class RegistrationSubmitTokenServlet(RestServlet):
self.store = hs.get_datastore() self.store = hs.get_datastore()
if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL: if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.failure_email_template, = load_jinja2_templates( (self.failure_email_template,) = load_jinja2_templates(
self.config.email_template_dir, self.config.email_template_dir,
[self.config.email_registration_template_failure_html], [self.config.email_registration_template_failure_html],
) )
if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL: if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
self.failure_email_template, = load_jinja2_templates( (self.failure_email_template,) = load_jinja2_templates(
self.config.email_template_dir, self.config.email_template_dir,
[self.config.email_registration_template_failure_html], [self.config.email_registration_template_failure_html],
) )

View file

@ -102,7 +102,7 @@ class RemoteKey(DirectServeResource):
@wrap_json_request_handler @wrap_json_request_handler
async def _async_render_GET(self, request): async def _async_render_GET(self, request):
if len(request.postpath) == 1: if len(request.postpath) == 1:
server, = request.postpath (server,) = request.postpath
query = {server.decode("ascii"): {}} query = {server.decode("ascii"): {}}
elif len(request.postpath) == 2: elif len(request.postpath) == 2:
server, key_id = request.postpath server, key_id = request.postpath

View file

@ -39,7 +39,7 @@ class HomeServer(object):
def get_state_resolution_handler(self) -> synapse.state.StateResolutionHandler: def get_state_resolution_handler(self) -> synapse.state.StateResolutionHandler:
pass pass
def get_deactivate_account_handler( def get_deactivate_account_handler(
self self,
) -> synapse.handlers.deactivate_account.DeactivateAccountHandler: ) -> synapse.handlers.deactivate_account.DeactivateAccountHandler:
pass pass
def get_room_creation_handler(self) -> synapse.handlers.room.RoomCreationHandler: def get_room_creation_handler(self) -> synapse.handlers.room.RoomCreationHandler:
@ -47,32 +47,32 @@ class HomeServer(object):
def get_room_member_handler(self) -> synapse.handlers.room_member.RoomMemberHandler: def get_room_member_handler(self) -> synapse.handlers.room_member.RoomMemberHandler:
pass pass
def get_event_creation_handler( def get_event_creation_handler(
self self,
) -> synapse.handlers.message.EventCreationHandler: ) -> synapse.handlers.message.EventCreationHandler:
pass pass
def get_set_password_handler( def get_set_password_handler(
self self,
) -> synapse.handlers.set_password.SetPasswordHandler: ) -> synapse.handlers.set_password.SetPasswordHandler:
pass pass
def get_federation_sender(self) -> synapse.federation.sender.FederationSender: def get_federation_sender(self) -> synapse.federation.sender.FederationSender:
pass pass
def get_federation_transport_client( def get_federation_transport_client(
self self,
) -> synapse.federation.transport.client.TransportLayerClient: ) -> synapse.federation.transport.client.TransportLayerClient:
pass pass
def get_media_repository_resource( def get_media_repository_resource(
self self,
) -> synapse.rest.media.v1.media_repository.MediaRepositoryResource: ) -> synapse.rest.media.v1.media_repository.MediaRepositoryResource:
pass pass
def get_media_repository( def get_media_repository(
self self,
) -> synapse.rest.media.v1.media_repository.MediaRepository: ) -> synapse.rest.media.v1.media_repository.MediaRepository:
pass pass
def get_server_notices_manager( def get_server_notices_manager(
self self,
) -> synapse.server_notices.server_notices_manager.ServerNoticesManager: ) -> synapse.server_notices.server_notices_manager.ServerNoticesManager:
pass pass
def get_server_notices_sender( def get_server_notices_sender(
self self,
) -> synapse.server_notices.server_notices_sender.ServerNoticesSender: ) -> synapse.server_notices.server_notices_sender.ServerNoticesSender:
pass pass

View file

@ -317,7 +317,7 @@ class DataStore(
) u ) u
""" """
txn.execute(sql, (time_from,)) txn.execute(sql, (time_from,))
count, = txn.fetchone() (count,) = txn.fetchone()
return count return count
def count_r30_users(self): def count_r30_users(self):
@ -396,7 +396,7 @@ class DataStore(
txn.execute(sql, (thirty_days_ago_in_secs, thirty_days_ago_in_secs)) txn.execute(sql, (thirty_days_ago_in_secs, thirty_days_ago_in_secs))
count, = txn.fetchone() (count,) = txn.fetchone()
results["all"] = count results["all"] = count
return results return results

View file

@ -863,7 +863,7 @@ class EventPushActionsStore(EventPushActionsWorkerStore):
) )
stream_row = txn.fetchone() stream_row = txn.fetchone()
if stream_row: if stream_row:
offset_stream_ordering, = stream_row (offset_stream_ordering,) = stream_row
rotate_to_stream_ordering = min( rotate_to_stream_ordering = min(
self.stream_ordering_day_ago, offset_stream_ordering self.stream_ordering_day_ago, offset_stream_ordering
) )

View file

@ -1125,7 +1125,7 @@ class EventsStore(
AND stream_ordering > ? AND stream_ordering > ?
""" """
txn.execute(sql, (self.stream_ordering_day_ago,)) txn.execute(sql, (self.stream_ordering_day_ago,))
count, = txn.fetchone() (count,) = txn.fetchone()
return count return count
ret = yield self.runInteraction("count_messages", _count_messages) ret = yield self.runInteraction("count_messages", _count_messages)
@ -1146,7 +1146,7 @@ class EventsStore(
""" """
txn.execute(sql, (like_clause, self.stream_ordering_day_ago)) txn.execute(sql, (like_clause, self.stream_ordering_day_ago))
count, = txn.fetchone() (count,) = txn.fetchone()
return count return count
ret = yield self.runInteraction("count_daily_sent_messages", _count_messages) ret = yield self.runInteraction("count_daily_sent_messages", _count_messages)
@ -1161,7 +1161,7 @@ class EventsStore(
AND stream_ordering > ? AND stream_ordering > ?
""" """
txn.execute(sql, (self.stream_ordering_day_ago,)) txn.execute(sql, (self.stream_ordering_day_ago,))
count, = txn.fetchone() (count,) = txn.fetchone()
return count return count
ret = yield self.runInteraction("count_daily_active_rooms", _count) ret = yield self.runInteraction("count_daily_active_rooms", _count)
@ -1646,7 +1646,7 @@ class EventsStore(
""", """,
(room_id,), (room_id,),
) )
min_depth, = txn.fetchone() (min_depth,) = txn.fetchone()
logger.info("[purge] updating room_depth to %d", min_depth) logger.info("[purge] updating room_depth to %d", min_depth)

View file

@ -438,7 +438,7 @@ class EventsBackgroundUpdatesStore(BackgroundUpdateStore):
if not rows: if not rows:
return 0 return 0
upper_event_id, = rows[-1] (upper_event_id,) = rows[-1]
# Update the redactions with the received_ts. # Update the redactions with the received_ts.
# #

View file

@ -249,7 +249,7 @@ class GroupServerStore(SQLBaseStore):
WHERE group_id = ? AND category_id = ? WHERE group_id = ? AND category_id = ?
""" """
txn.execute(sql, (group_id, category_id)) txn.execute(sql, (group_id, category_id))
order, = txn.fetchone() (order,) = txn.fetchone()
if existing: if existing:
to_update = {} to_update = {}
@ -509,7 +509,7 @@ class GroupServerStore(SQLBaseStore):
WHERE group_id = ? AND role_id = ? WHERE group_id = ? AND role_id = ?
""" """
txn.execute(sql, (group_id, role_id)) txn.execute(sql, (group_id, role_id))
order, = txn.fetchone() (order,) = txn.fetchone()
if existing: if existing:
to_update = {} to_update = {}

View file

@ -171,7 +171,7 @@ class MonthlyActiveUsersStore(SQLBaseStore):
sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users"
txn.execute(sql) txn.execute(sql)
count, = txn.fetchone() (count,) = txn.fetchone()
return count return count
return self.runInteraction("count_users", _count_users) return self.runInteraction("count_users", _count_users)

View file

@ -143,7 +143,7 @@ class PushRulesWorkerStore(
" WHERE user_id = ? AND ? < stream_id" " WHERE user_id = ? AND ? < stream_id"
) )
txn.execute(sql, (user_id, last_id)) txn.execute(sql, (user_id, last_id))
count, = txn.fetchone() (count,) = txn.fetchone()
return bool(count) return bool(count)
return self.runInteraction( return self.runInteraction(

View file

@ -459,7 +459,7 @@ class RegistrationWorkerStore(SQLBaseStore):
WHERE appservice_id IS NULL WHERE appservice_id IS NULL
""" """
) )
count, = txn.fetchone() (count,) = txn.fetchone()
return count return count
ret = yield self.runInteraction("count_users", _count_users) ret = yield self.runInteraction("count_users", _count_users)

View file

@ -927,7 +927,7 @@ class RoomMemberBackgroundUpdateStore(BackgroundUpdateStore):
if not row or not row[0]: if not row or not row[0]:
return processed, True return processed, True
next_room, = row (next_room,) = row
sql = """ sql = """
UPDATE current_state_events UPDATE current_state_events

View file

@ -672,7 +672,7 @@ class SearchStore(SearchBackgroundUpdateStore):
) )
) )
txn.execute(query, (value, search_query)) txn.execute(query, (value, search_query))
headline, = txn.fetchall()[0] (headline,) = txn.fetchall()[0]
# Now we need to pick the possible highlights out of the haedline # Now we need to pick the possible highlights out of the haedline
# result. # result.

View file

@ -725,17 +725,19 @@ class StateGroupWorkerStore(
member_filter, non_member_filter = state_filter.get_member_split() member_filter, non_member_filter = state_filter.get_member_split()
# Now we look them up in the member and non-member caches # Now we look them up in the member and non-member caches
non_member_state, incomplete_groups_nm, = ( (
yield self._get_state_for_groups_using_cache( non_member_state,
incomplete_groups_nm,
) = yield self._get_state_for_groups_using_cache(
groups, self._state_group_cache, state_filter=non_member_filter groups, self._state_group_cache, state_filter=non_member_filter
) )
)
member_state, incomplete_groups_m, = ( (
yield self._get_state_for_groups_using_cache( member_state,
incomplete_groups_m,
) = yield self._get_state_for_groups_using_cache(
groups, self._state_group_members_cache, state_filter=member_filter groups, self._state_group_members_cache, state_filter=member_filter
) )
)
state = dict(non_member_state) state = dict(non_member_state)
for group in groups: for group in groups:
@ -1076,7 +1078,7 @@ class StateBackgroundUpdateStore(
" WHERE id < ? AND room_id = ?", " WHERE id < ? AND room_id = ?",
(state_group, room_id), (state_group, room_id),
) )
prev_group, = txn.fetchone() (prev_group,) = txn.fetchone()
new_last_state_group = state_group new_last_state_group = state_group
if prev_group: if prev_group:

View file

@ -773,7 +773,7 @@ class StatsStore(StateDeltasStore):
(room_id,), (room_id,),
) )
current_state_events_count, = txn.fetchone() (current_state_events_count,) = txn.fetchone()
users_in_room = self.get_users_in_room_txn(txn, room_id) users_in_room = self.get_users_in_room_txn(txn, room_id)
@ -863,7 +863,7 @@ class StatsStore(StateDeltasStore):
""", """,
(user_id,), (user_id,),
) )
count, = txn.fetchone() (count,) = txn.fetchone()
return count, pos return count, pos
joined_rooms, pos = yield self.runInteraction( joined_rooms, pos = yield self.runInteraction(

View file

@ -46,7 +46,7 @@ def _load_current_id(db_conn, table, column, step=1):
cur.execute("SELECT MAX(%s) FROM %s" % (column, table)) cur.execute("SELECT MAX(%s) FROM %s" % (column, table))
else: else:
cur.execute("SELECT MIN(%s) FROM %s" % (column, table)) cur.execute("SELECT MIN(%s) FROM %s" % (column, table))
val, = cur.fetchone() (val,) = cur.fetchone()
cur.close() cur.close()
current_id = int(val) if val else step current_id = int(val) if val else step
return (max if step > 0 else min)(current_id, step) return (max if step > 0 else min)(current_id, step)

View file

@ -114,7 +114,7 @@ skip_install = True
basepython = python3.6 basepython = python3.6
deps = deps =
flake8 flake8
black==19.3b0 # We pin so that our tests don't start failing on new releases of black. black==19.10b0 # We pin so that our tests don't start failing on new releases of black.
commands = commands =
python -m black --check --diff . python -m black --check --diff .
/bin/sh -c "flake8 synapse tests scripts scripts-dev synctl {env:PEP8SUFFIX:}" /bin/sh -c "flake8 synapse tests scripts scripts-dev synctl {env:PEP8SUFFIX:}"
@ -167,6 +167,6 @@ deps =
env = env =
MYPYPATH = stubs/ MYPYPATH = stubs/
extras = all extras = all
commands = mypy --show-traceback --check-untyped-defs --show-error-codes --follow-imports=normal \ commands = mypy \
synapse/logging/ \ synapse/logging/ \
synapse/config/ synapse/config/