From 646324437543c096e737777c81b4fe4b45c3e1a7 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 27 Apr 2022 14:03:44 +0100 Subject: [PATCH] Remove unused `# type: ignore`s (#12531) Over time we've begun to use newer versions of mypy, typeshed, stub packages---and of course we've improved our own annotations. This makes some type ignore comments no longer necessary. I have removed them. There was one exception: a module that imports `select.epoll`. The ignore is redundant on Linux, but I've kept it ignored for those of us who work on the source tree using not-Linux. (#11771) I'm more interested in the config line which enforces this. I want unused ignores to be reported, because I think it's useful feedback when annotating to know when you've fixed a problem you had to previously ignore. * Installing extras before typechecking Lacking an easy way to install all extras generically, let's bite the bullet and make install the hand-maintained `all` extra before typechecking. Now that https://github.com/matrix-org/backend-meta/pull/6 is merged to the release/v1 branch. --- .github/workflows/tests.yml | 8 ++------ changelog.d/12531.misc | 1 + mypy.ini | 6 ++++++ stubs/sortedcontainers/sorteddict.pyi | 4 +--- synapse/app/_base.py | 4 ++-- synapse/config/server.py | 6 ++---- synapse/federation/federation_server.py | 4 ++-- synapse/federation/transport/client.py | 10 +++++----- synapse/handlers/auth.py | 2 +- synapse/handlers/oidc.py | 2 +- synapse/handlers/search.py | 6 +++--- synapse/http/server.py | 4 ++-- synapse/module_api/__init__.py | 4 +++- .../databases/main/monthly_active_users.py | 6 +++--- synapse/storage/prepare_database.py | 4 ++-- synapse/util/caches/ttlcache.py | 2 +- tests/handlers/test_register.py | 9 +++------ tests/module_api/test_account_data_manager.py | 17 +++++++++++------ .../replication/test_federation_sender_shard.py | 8 ++++---- tests/test_utils/__init__.py | 8 ++++---- tests/test_utils/logging_setup.py | 2 +- 21 files changed, 60 insertions(+), 57 deletions(-) create mode 100644 changelog.d/12531.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cad4cb6d77..efa35b71df 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -20,13 +20,9 @@ jobs: - run: scripts-dev/config-lint.sh lint: - # This does a vanilla `poetry install` - no extras. I'm slightly anxious - # that we might skip some typechecks on code that uses extras. However, - # I think the right way to fix this is to mark any extras needed for - # typechecking as development dependencies. To detect this, we ought to - # turn up mypy's strictness: disallow unknown imports and be accept fewer - # uses of `Any`. uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1" + with: + typechecking-extras: "all" lint-crlf: runs-on: ubuntu-latest diff --git a/changelog.d/12531.misc b/changelog.d/12531.misc new file mode 100644 index 0000000000..412fc9b6dc --- /dev/null +++ b/changelog.d/12531.misc @@ -0,0 +1 @@ +Remove unused `# type: ignore`s. diff --git a/mypy.ini b/mypy.ini index a663bf6975..280f1e898e 100644 --- a/mypy.ini +++ b/mypy.ini @@ -7,6 +7,7 @@ show_error_codes = True show_traceback = True mypy_path = stubs warn_unreachable = True +warn_unused_ignores = True local_partial_types = True no_implicit_optional = True @@ -134,6 +135,11 @@ disallow_untyped_defs = True [mypy-synapse.metrics.*] disallow_untyped_defs = True +[mypy-synapse.metrics._reactor_metrics] +# This module imports select.epoll. That exists on Linux, but doesn't on macOS. +# See https://github.com/matrix-org/synapse/pull/11771. +warn_unused_ignores = False + [mypy-synapse.module_api.*] disallow_untyped_defs = True diff --git a/stubs/sortedcontainers/sorteddict.pyi b/stubs/sortedcontainers/sorteddict.pyi index e18d617281..3a4f9c3076 100644 --- a/stubs/sortedcontainers/sorteddict.pyi +++ b/stubs/sortedcontainers/sorteddict.pyi @@ -115,9 +115,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]): def __getitem__(self, index: slice) -> List[_KT_co]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... -class SortedItemsView( # type: ignore - ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]] -): +class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]): def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... @overload def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ... diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 37321f9133..d28b87a3f4 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -48,7 +48,6 @@ from twisted.logger import LoggingFile, LogLevel from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.python.threadpool import ThreadPool -import synapse from synapse.api.constants import MAX_PDU_SIZE from synapse.app import check_bind_error from synapse.app.phone_stats_home import start_phone_stats_home @@ -60,6 +59,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.logging.context import PreserveLoggingContext +from synapse.logging.opentracing import init_tracer from synapse.metrics import install_gc_manager, register_threadpool from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.jemalloc import setup_jemalloc_stats @@ -431,7 +431,7 @@ async def start(hs: "HomeServer") -> None: refresh_certificate(hs) # Start the tracer - synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa + init_tracer(hs) # noqa # Instantiate the modules so they can register their web resources to the module API # before we start the listeners. diff --git a/synapse/config/server.py b/synapse/config/server.py index d771045b52..b6cd326416 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -186,7 +186,7 @@ KNOWN_RESOURCES = { class HttpResourceConfig: names: List[str] = attr.ib( factory=list, - validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore + validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), ) compress: bool = attr.ib( default=False, @@ -231,9 +231,7 @@ class ManholeConfig: class LimitRemoteRoomsConfig: enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False) complexity: Union[float, int] = attr.ib( - validator=attr.validators.instance_of( - (float, int) # type: ignore[arg-type] # noqa - ), + validator=attr.validators.instance_of((float, int)), # noqa default=1.0, ) complexity_error: str = attr.ib( diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index beab1227b8..884b5d60b4 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -268,8 +268,8 @@ class FederationServer(FederationBase): transaction_id=transaction_id, destination=destination, origin=origin, - origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore - pdus=transaction_data.get("pdus"), # type: ignore + origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore[arg-type] + pdus=transaction_data.get("pdus"), edus=transaction_data.get("edus"), ) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 1421050b9a..9ce06dfa28 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -229,21 +229,21 @@ class TransportLayerClient: """ logger.debug( "send_data dest=%s, txid=%s", - transaction.destination, # type: ignore - transaction.transaction_id, # type: ignore + transaction.destination, + transaction.transaction_id, ) - if transaction.destination == self.server_name: # type: ignore + if transaction.destination == self.server_name: raise RuntimeError("Transport layer cannot send to itself!") # FIXME: This is only used by the tests. The actual json sent is # generated by the json_data_callback. json_data = transaction.get_dict() - path = _create_v1_path("/send/%s", transaction.transaction_id) # type: ignore + path = _create_v1_path("/send/%s", transaction.transaction_id) return await self.client.put_json( - transaction.destination, # type: ignore + transaction.destination, path=path, data=json_data, json_data_callback=json_data_callback, diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 86991d26ce..22678d486d 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -481,7 +481,7 @@ class AuthHandler: sid = authdict["session"] # Convert the URI and method to strings. - uri = request.uri.decode("utf-8") # type: ignore + uri = request.uri.decode("utf-8") method = request.method.decode("utf-8") # If there's no session ID, create a new session. diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 724b9cfcb4..f6ffb7d18d 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -966,7 +966,7 @@ class OidcProvider: "Mapping provider does not support de-duplicating Matrix IDs" ) - attributes = await self._user_mapping_provider.map_user_attributes( # type: ignore + attributes = await self._user_mapping_provider.map_user_attributes( userinfo, token ) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 102dd4b57d..5619f8f50e 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -357,7 +357,7 @@ class SearchHandler: itertools.chain( # The events_before and events_after for each context. itertools.chain.from_iterable( - itertools.chain(context["events_before"], context["events_after"]) # type: ignore[arg-type] + itertools.chain(context["events_before"], context["events_after"]) for context in contexts.values() ), # The returned events. @@ -373,10 +373,10 @@ class SearchHandler: for context in contexts.values(): context["events_before"] = self._event_serializer.serialize_events( - context["events_before"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type] + context["events_before"], time_now, bundle_aggregations=aggregations ) context["events_after"] = self._event_serializer.serialize_events( - context["events_after"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type] + context["events_after"], time_now, bundle_aggregations=aggregations ) results = [ diff --git a/synapse/http/server.py b/synapse/http/server.py index 31ca841889..b8a7a0f5df 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -295,7 +295,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta): if isawaitable(raw_callback_return): callback_return = await raw_callback_return else: - callback_return = raw_callback_return # type: ignore + callback_return = raw_callback_return return callback_return @@ -469,7 +469,7 @@ class JsonResource(DirectServeJsonResource): if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)): callback_return = await raw_callback_return else: - callback_return = raw_callback_return # type: ignore + callback_return = raw_callback_return return callback_return diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 8f9e629274..7e6e23db73 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -109,6 +109,7 @@ from synapse.storage.state import StateFilter from synapse.types import ( DomainSpecificString, JsonDict, + JsonMapping, Requester, StateMap, UserID, @@ -151,6 +152,7 @@ __all__ = [ "PRESENCE_ALL_USERS", "LoginResponse", "JsonDict", + "JsonMapping", "EventBase", "StateMap", "ProfileInfo", @@ -1419,7 +1421,7 @@ class AccountDataManager: f"{user_id} is not local to this homeserver; can't access account data for remote users." ) - async def get_global(self, user_id: str, data_type: str) -> Optional[JsonDict]: + async def get_global(self, user_id: str, data_type: str) -> Optional[JsonMapping]: """ Gets some global account data, of a specified type, for the specified user. diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index 4f1c22c71b..5beb8f1d4b 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -232,10 +232,10 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): # is racy. # Have resolved to invalidate the whole cache for now and do # something about it if and when the perf becomes significant - self._invalidate_all_cache_and_stream( # type: ignore[attr-defined] + self._invalidate_all_cache_and_stream( txn, self.user_last_seen_monthly_active ) - self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) # type: ignore[attr-defined] + self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) reserved_users = await self.get_registered_reserved_users() await self.db_pool.runInteraction( @@ -363,7 +363,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore): if self._limit_usage_by_mau or self._mau_stats_only: # Trial users and guests should not be included as part of MAU group - is_guest = await self.is_guest(user_id) # type: ignore[attr-defined] + is_guest = await self.is_guest(user_id) if is_guest: return is_trial = await self.is_trial_user(user_id) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index e3153d1a4a..546d6bae6e 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -501,11 +501,11 @@ def _upgrade_existing_database( if hasattr(module, "run_create"): logger.info("Running %s:run_create", relative_path) - module.run_create(cur, database_engine) # type: ignore + module.run_create(cur, database_engine) if not is_empty and hasattr(module, "run_upgrade"): logger.info("Running %s:run_upgrade", relative_path) - module.run_upgrade(cur, database_engine, config=config) # type: ignore + module.run_upgrade(cur, database_engine, config=config) elif ext == ".pyc" or file_name == "__pycache__": # Sometimes .pyc files turn up anyway even though we've # disabled their generation; e.g. from distribution package diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 0b9ac26b69..f6b3ee31e4 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -107,7 +107,7 @@ class TTLCache(Generic[KT, VT]): self._metrics.inc_hits() return e.value, e.expiry_time, e.ttl - def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore + def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: """Remove a value from the cache If key is in the cache, remove it and return its value, else return default. diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 45fd30cf43..b6ba19c739 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -193,8 +193,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): @override_config({"limit_usage_by_mau": True}) def test_get_or_create_user_mau_not_blocked(self): - # Type ignore: mypy doesn't like us assigning to methods. - self.store.count_monthly_users = Mock( # type: ignore[assignment] + self.store.count_monthly_users = Mock( return_value=make_awaitable(self.hs.config.server.max_mau_value - 1) ) # Ensure does not throw exception @@ -202,8 +201,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): @override_config({"limit_usage_by_mau": True}) def test_get_or_create_user_mau_blocked(self): - # Type ignore: mypy doesn't like us assigning to methods. - self.store.get_monthly_active_count = Mock( # type: ignore[assignment] + self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.lots_of_users) ) self.get_failure( @@ -211,8 +209,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): ResourceLimitError, ) - # Type ignore: mypy doesn't like us assigning to methods. - self.store.get_monthly_active_count = Mock( # type: ignore[assignment] + self.store.get_monthly_active_count = Mock( return_value=make_awaitable(self.hs.config.server.max_mau_value) ) self.get_failure( diff --git a/tests/module_api/test_account_data_manager.py b/tests/module_api/test_account_data_manager.py index bec018d9e7..89009bea8c 100644 --- a/tests/module_api/test_account_data_manager.py +++ b/tests/module_api/test_account_data_manager.py @@ -11,8 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from twisted.test.proto_helpers import MemoryReactor + from synapse.api.errors import SynapseError from synapse.rest import admin +from synapse.server import HomeServer +from synapse.util import Clock from tests.unittest import HomeserverTestCase @@ -22,7 +26,9 @@ class ModuleApiTestCase(HomeserverTestCase): admin.register_servlets, ] - def prepare(self, reactor, clock, homeserver) -> None: + def prepare( + self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer + ) -> None: self._store = homeserver.get_datastores().main self._module_api = homeserver.get_module_api() self._account_data_mgr = self._module_api.account_data_manager @@ -91,7 +97,7 @@ class ModuleApiTestCase(HomeserverTestCase): ) with self.assertRaises(TypeError): # This throws an exception because it's a frozen dict. - the_data["wombat"] = False + the_data["wombat"] = False # type: ignore[index] def test_put_global(self) -> None: """ @@ -143,15 +149,14 @@ class ModuleApiTestCase(HomeserverTestCase): with self.assertRaises(TypeError): # The account data type must be a string. self.get_success_or_raise( - self._module_api.account_data_manager.put_global( - self.user_id, 42, {} # type: ignore - ) + self._module_api.account_data_manager.put_global(self.user_id, 42, {}) # type: ignore[arg-type] ) with self.assertRaises(TypeError): # The account data dict must be a dict. + # noinspection PyTypeChecker self.get_success_or_raise( self._module_api.account_data_manager.put_global( - self.user_id, "test.data", 42 # type: ignore + self.user_id, "test.data", 42 # type: ignore[arg-type] ) ) diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index ba1a63c0d6..6104a55aa1 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -102,8 +102,8 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase): for i in range(20): server_name = "other_server_%d" % (i,) room = self.create_room_with_remote_server(user, token, server_name) - mock_client1.reset_mock() # type: ignore[attr-defined] - mock_client2.reset_mock() # type: ignore[attr-defined] + mock_client1.reset_mock() + mock_client2.reset_mock() self.create_and_send_event(room, UserID.from_string(user)) self.replicate() @@ -167,8 +167,8 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase): for i in range(20): server_name = "other_server_%d" % (i,) room = self.create_room_with_remote_server(user, token, server_name) - mock_client1.reset_mock() # type: ignore[attr-defined] - mock_client2.reset_mock() # type: ignore[attr-defined] + mock_client1.reset_mock() + mock_client2.reset_mock() self.get_success( typing_handler.started_typing( diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index f05a373aa0..0d0d6faf0d 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -52,7 +52,7 @@ def make_awaitable(result: TV) -> Awaitable[TV]: This uses Futures as they can be awaited multiple times so can be returned to multiple callers. """ - future = Future() # type: ignore + future: Future[TV] = Future() future.set_result(result) return future @@ -69,7 +69,7 @@ def setup_awaitable_errors() -> Callable[[], None]: # State shared between unraisablehook and check_for_unraisable_exceptions. unraisable_exceptions = [] - orig_unraisablehook = sys.unraisablehook # type: ignore + orig_unraisablehook = sys.unraisablehook def unraisablehook(unraisable): unraisable_exceptions.append(unraisable.exc_value) @@ -78,11 +78,11 @@ def setup_awaitable_errors() -> Callable[[], None]: """ A method to be used as a clean-up that fails a test-case if there are any new unraisable exceptions. """ - sys.unraisablehook = orig_unraisablehook # type: ignore + sys.unraisablehook = orig_unraisablehook if unraisable_exceptions: raise unraisable_exceptions.pop() - sys.unraisablehook = unraisablehook # type: ignore + sys.unraisablehook = unraisablehook return cleanup diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index 51a197a8c6..9228454c9e 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -27,7 +27,7 @@ class ToTwistedHandler(logging.Handler): def emit(self, record): log_entry = self.format(record) log_level = record.levelname.lower().replace("warning", "warn") - self.tx_log.emit( # type: ignore + self.tx_log.emit( twisted.logger.LogLevel.levelWithName(log_level), "{entry}", entry=log_entry )