diff --git a/changelog.d/18604.misc b/changelog.d/18604.misc new file mode 100644 index 00000000000..c06fb23af5c --- /dev/null +++ b/changelog.d/18604.misc @@ -0,0 +1 @@ +Refactor cache metrics to be homeserver-scoped. diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py index 3126bc9f27a..f97a71caf7c 100644 --- a/synapse/api/auth/base.py +++ b/synapse/api/auth/base.py @@ -172,7 +172,7 @@ async def validate_appservice_can_control_user_id( """ # It's ok if the app service is trying to use the sender from their registration - if app_service.sender == user_id: + if app_service.sender.to_string() == user_id: pass # Check to make sure the app service is allowed to control the user elif not app_service.is_interested_in_user(user_id): diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 0cfdf15d60a..ad8f4e04f6a 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -176,6 +176,7 @@ def __init__(self, hs: "HomeServer"): assert self._config.client_id, "No client_id provided" assert auth_method is not None, "Invalid client_auth_method provided" + self.server_name = hs.hostname self._clock = hs.get_clock() self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname @@ -206,8 +207,9 @@ def __init__(self, hs: "HomeServer"): # In this case, the device still exists and it's not the end of the world for # the old access token to continue working for a short time. self._introspection_cache: ResponseCache[str] = ResponseCache( - self._clock, - "token_introspection", + clock=self._clock, + name="token_introspection", + server_name=self.server_name, timeout_ms=120_000, # don't log because the keys are access tokens enable_logging=False, diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 6ee5240c4ee..2d8d382e68c 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -78,7 +78,7 @@ def __init__( self, token: str, id: str, - sender: str, + sender: UserID, url: Optional[str] = None, namespaces: Optional[JsonDict] = None, hs_token: Optional[str] = None, @@ -96,6 +96,8 @@ def __init__( self.hs_token = hs_token # The full Matrix ID for this application service's sender. self.sender = sender + # The application service user should be part of the server's domain. + self.server_name = sender.domain # nb must be called this for @cached self.namespaces = self._check_namespaces(namespaces) self.id = id self.ip_range_whitelist = ip_range_whitelist @@ -223,7 +225,7 @@ def is_interested_in_user( """ return ( # User is the appservice's configured sender_localpart user - user_id == self.sender + user_id == self.sender.to_string() # User is in the appservice's user namespace or self.is_user_in_namespace(user_id) ) @@ -347,7 +349,7 @@ def is_room_id_in_namespace(self, room_id: str) -> bool: def is_exclusive_user(self, user_id: str) -> bool: return ( self._is_exclusive(ApplicationService.NS_USERS, user_id) - or user_id == self.sender + or user_id == self.sender.to_string() ) def is_interested_in_protocol(self, protocol: str) -> bool: diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 45371d6f3bc..8c21e0951ac 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -126,11 +126,15 @@ class ApplicationServiceApi(SimpleHttpClient): def __init__(self, hs: "HomeServer"): super().__init__(hs) + self.server_name = hs.hostname self.clock = hs.get_clock() self.config = hs.config.appservice self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS + clock=hs.get_clock(), + name="as_protocol_meta", + server_name=self.server_name, + timeout_ms=HOUR_IN_MS, ) def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]: diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index ab3f4e15fe9..9d7fc0995a9 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -319,7 +319,7 @@ async def _compute_msc3202_otk_counts_and_fallback_keys( users: Set[str] = set() # The sender is always included - users.add(service.sender) + users.add(service.sender.to_string()) # All AS users that would receive the PDUs or EDUs sent to these rooms # are classed as 'interesting'. diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index dda6bcd1b79..81dbd330cc4 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -122,8 +122,7 @@ def _load_appservice( localpart = as_info["sender_localpart"] if urlparse.quote(localpart) != localpart: raise ValueError("sender_localpart needs characters which are not URL encoded.") - user = UserID(localpart, hostname) - user_id = user.to_string() + user_id = UserID(localpart, hostname) # Rate limiting for users of this AS is on by default (excludes sender) rate_limited = as_info.get("rate_limited") diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 2d1da70793c..35c5ac63119 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -137,13 +137,14 @@ def __init__(self, hs: "HomeServer"): self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() - self.hostname = hs.hostname + self.server_name = hs.hostname self.signing_key = hs.signing_key # Cache mapping `event_id` to a tuple of the event itself and the `pull_origin` # (which server we pulled the event from) self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache( cache_name="get_pdu_cache", + server_name=self.server_name, clock=self._clock, max_len=1000, expiry_ms=120 * 1000, @@ -162,6 +163,7 @@ def __init__(self, hs: "HomeServer"): Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]], ] = ExpiringCache( cache_name="get_room_hierarchy_cache", + server_name=self.server_name, clock=self._clock, max_len=1000, expiry_ms=5 * 60 * 1000, @@ -1068,7 +1070,7 @@ async def send_request(destination: str) -> Tuple[str, EventBase, RoomVersion]: # there's some we never care about ev = builder.create_local_event_from_event_dict( self._clock, - self.hostname, + self.server_name, self.signing_key, room_version=room_version, event_dict=pdu_dict, diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2a7f5b2c4d5..3e6b8b84936 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -159,7 +159,10 @@ def __init__(self, hs: "HomeServer"): # We cache results for transaction with the same ID self._transaction_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "fed_txn_handler", timeout_ms=30000 + clock=hs.get_clock(), + name="fed_txn_handler", + server_name=self.server_name, + timeout_ms=30000, ) self.transaction_actions = TransactionActions(self.store) @@ -169,10 +172,18 @@ def __init__(self, hs: "HomeServer"): # We cache responses to state queries, as they take a while and often # come in waves. self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( - ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + ResponseCache( + clock=hs.get_clock(), + name="state_resp", + server_name=self.server_name, + timeout_ms=30000, + ) ) self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "state_ids_resp", timeout_ms=30000 + clock=hs.get_clock(), + name="state_ids_resp", + server_name=self.server_name, + timeout_ms=30000, ) self._federation_metrics_domains = ( diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 5aefc73abab..8c5308b522c 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -846,7 +846,7 @@ async def _is_unknown_user(self, user_id: str) -> bool: # user not found; could be the AS though, so check. services = self.store.get_app_services() - service_list = [s for s in services if s.sender == user_id] + service_list = [s for s in services if s.sender.to_string() == user_id] return len(service_list) == 0 async def _check_user_exists(self, user_id: str) -> bool: diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index e825626558a..65fbb48768e 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -1215,6 +1215,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater): "Handles incoming device list updates from federation and updates the DB" def __init__(self, hs: "HomeServer", device_handler: DeviceHandler): + self.server_name = hs.hostname self.store = hs.get_datastores().main self.federation = hs.get_federation_client() self.server_name = hs.hostname # nb must be called this for @measure_func @@ -1235,6 +1236,7 @@ def __init__(self, hs: "HomeServer", device_handler: DeviceHandler): # resyncs. self._seen_updates: ExpiringCache[str, Set[str]] = ExpiringCache( cache_name="device_update_edu", + server_name=self.server_name, clock=self.clock, max_len=10000, expiry_ms=30 * 60 * 1000, diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 74c697960fe..11284ccd0bc 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -406,7 +406,7 @@ def can_modify_alias(self, alias: RoomAlias, user_id: Optional[str] = None) -> b ] for service in interested_services: - if user_id == service.sender: + if user_id == service.sender.to_string(): # this user IS the app service so they can do whatever they like return True elif service.is_exclusive_alias(alias.to_string()): diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index bd3c87f5f4e..75d64d2d50b 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -60,6 +60,7 @@ class InitialSyncHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.store = hs.get_datastores().main self.auth = hs.get_auth() self.state_handler = hs.get_state_handler() @@ -77,7 +78,11 @@ def __init__(self, hs: "HomeServer"): bool, bool, ] - ] = ResponseCache(hs.get_clock(), "initial_sync_cache") + ] = ResponseCache( + clock=hs.get_clock(), + name="initial_sync_cache", + server_name=self.server_name, + ) self._event_serializer = hs.get_event_client_serializer() self._storage_controllers = hs.get_storage_controllers() self._state_storage_controller = self._storage_controllers.state diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7c76c187bc6..aa295fb6c8a 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -558,8 +558,9 @@ def __init__(self, hs: "HomeServer"): self._external_cache_joined_hosts_updates: Optional[ExpiringCache] = None if self._external_cache.is_enabled(): self._external_cache_joined_hosts_updates = ExpiringCache( - "_external_cache_joined_hosts_updates", - self.clock, + cache_name="_external_cache_joined_hosts_updates", + server_name=self.server_name, + clock=self.clock, expiry_ms=30 * 60 * 1000, ) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 76aa90e11ba..4958ab5e750 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -55,6 +55,7 @@ class ProfileHandler: """ def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname # nb must be called this for @cached self.store = hs.get_datastores().main self.clock = hs.get_clock() self.hs = hs diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index b063e301e69..d8c4d0c20e7 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -119,6 +119,7 @@ class EventContext: class RoomCreationHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.auth = hs.get_auth() @@ -175,7 +176,10 @@ def __init__(self, hs: "HomeServer"): # succession, only process the first attempt and return its result to # subsequent requests self._upgrade_response_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS + clock=hs.get_clock(), + name="room_upgrade", + server_name=self.server_name, + timeout_ms=FIVE_MINUTES_IN_MS, ) self._server_notices_mxid = hs.config.servernotices.server_notices_mxid diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 07eac71e2af..9d4307fb078 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -61,16 +61,26 @@ class RoomListHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname # nb must be called this for @cached self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.hs = hs self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.response_cache: ResponseCache[ Tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] - ] = ResponseCache(hs.get_clock(), "room_list") + ] = ResponseCache( + clock=hs.get_clock(), + name="room_list", + server_name=self.server_name, + ) self.remote_response_cache: ResponseCache[ Tuple[str, Optional[int], Optional[str], bool, Optional[str]] - ] = ResponseCache(hs.get_clock(), "remote_room_list", timeout_ms=30 * 1000) + ] = ResponseCache( + clock=hs.get_clock(), + name="remote_room_list", + server_name=self.server_name, + timeout_ms=30 * 1000, + ) async def get_local_public_room_list( self, diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index 1f322ac263b..838fee6a303 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -96,6 +96,7 @@ class RoomSummaryHandler: _PAGINATION_SESSION_VALIDITY_PERIOD_MS = 5 * 60 * 1000 def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self._event_auth_handler = hs.get_event_auth_handler() self._store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() @@ -121,8 +122,9 @@ def __init__(self, hs: "HomeServer"): Optional[Tuple[str, ...]], ] ] = ResponseCache( - hs.get_clock(), - "get_room_hierarchy", + clock=hs.get_clock(), + name="get_room_hierarchy", + server_name=self.server_name, ) self._msc3266_enabled = hs.config.experimental.msc3266_enabled diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7b99defac11..69064e751aa 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -353,8 +353,9 @@ def __init__(self, hs: "HomeServer"): # cached result any more, and we could flush the entry from the cache to save # memory. self.response_cache: ResponseCache[SyncRequestKey] = ResponseCache( - hs.get_clock(), - "sync", + clock=hs.get_clock(), + name="sync", + server_name=self.server_name, timeout_ms=hs.config.caches.sync_response_cache_duration, ) @@ -362,8 +363,9 @@ def __init__(self, hs: "HomeServer"): self.lazy_loaded_members_cache: ExpiringCache[ Tuple[str, Optional[str]], LruCache[str, str] ] = ExpiringCache( - "lazy_loaded_members_cache", - self.clock, + cache_name="lazy_loaded_members_cache", + server_name=self.server_name, + clock=self.clock, max_len=0, expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE, ) @@ -1134,7 +1136,7 @@ def get_lazy_loaded_members_cache( ) if cache is None: logger.debug("creating LruCache for %r", cache_key) - cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE) + cache = LruCache(max_size=LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE) self.lazy_loaded_members_cache[cache_key] = cache else: logger.debug("found LruCache for %r", cache_key) diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index bbef3a59a59..3c49655598e 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -263,6 +263,7 @@ def __init__(self, hs: "HomeServer"): assert hs.get_instance_name() in hs.config.worker.writers.typing + self.server_name = hs.hostname self.auth = hs.get_auth() self.notifier = hs.get_notifier() self.event_auth_handler = hs.get_event_auth_handler() @@ -280,7 +281,9 @@ def __init__(self, hs: "HomeServer"): # caches which room_ids changed at which serials self._typing_stream_change_cache = StreamChangeCache( - "TypingStreamChangeCache", self._latest_room_serial + name="TypingStreamChangeCache", + server_name=self.server_name, + current_stream_pos=self._latest_room_serial, ) def _handle_timeout_for_member(self, now: int, member: RoomMember) -> None: diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 4a47665abd3..15609a799ff 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -104,6 +104,13 @@ def __init__( """ Args: server_name: Our homeserver name (used to label metrics) (`hs.hostname`). + reactor + tls_client_options_factory + user_agent + ip_allowlist + ip_blocklist + _srv_resolver + _well_known_resolver """ # proxy_reactor is not blocklisting reactor @@ -133,8 +140,8 @@ def __init__( if _well_known_resolver is None: _well_known_resolver = WellKnownResolver( - server_name, - reactor, + server_name=server_name, + reactor=reactor, agent=BlocklistingAgentWrapper( ProxyAgent( reactor, diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index 911cbac7eae..70242ad0aef 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -77,10 +77,6 @@ logger = logging.getLogger(__name__) -_well_known_cache: TTLCache[bytes, Optional[bytes]] = TTLCache("well-known") -_had_valid_well_known_cache: TTLCache[bytes, bool] = TTLCache("had-valid-well-known") - - @attr.s(slots=True, frozen=True, auto_attribs=True) class WellKnownLookupResult: delegated_server: Optional[bytes] @@ -101,6 +97,11 @@ def __init__( """ Args: server_name: Our homeserver name (used to label metrics) (`hs.hostname`). + reactor + agent + user_agent + well_known_cache + had_well_known_cache """ self.server_name = server_name @@ -108,10 +109,14 @@ def __init__( self._clock = Clock(reactor) if well_known_cache is None: - well_known_cache = _well_known_cache + well_known_cache = TTLCache( + cache_name="well-known", server_name=server_name + ) if had_well_known_cache is None: - had_well_known_cache = _had_valid_well_known_cache + had_well_known_cache = TTLCache( + cache_name="had-valid-well-known", server_name=server_name + ) self._well_known_cache = well_known_cache self._had_valid_well_known_cache = had_well_known_cache diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 67ea9cdb811..0013b97723e 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -417,12 +417,12 @@ def __init__( if hs.get_instance_name() in outbound_federation_restricted_to: # Talk to federation directly federation_agent: IAgent = MatrixFederationAgent( - self.server_name, - self.reactor, - tls_client_options_factory, - user_agent.encode("ascii"), - hs.config.server.federation_ip_range_allowlist, - hs.config.server.federation_ip_range_blocklist, + server_name=self.server_name, + reactor=self.reactor, + tls_client_options_factory=tls_client_options_factory, + user_agent=user_agent.encode("ascii"), + ip_allowlist=hs.config.server.federation_ip_range_allowlist, + ip_blocklist=hs.config.server.federation_ip_range_blocklist, ) else: proxy_authorization_secret = hs.config.worker.worker_replication_secret diff --git a/synapse/media/url_previewer.py b/synapse/media/url_previewer.py index 0c665e1942b..eb0104e5432 100644 --- a/synapse/media/url_previewer.py +++ b/synapse/media/url_previewer.py @@ -200,6 +200,7 @@ def __init__( # JSON-encoded OG metadata self._cache: ExpiringCache[str, ObservableDeferred] = ExpiringCache( cache_name="url_previews", + server_name=self.server_name, clock=self.clock, # don't spider URLs more often than once an hour expiry_ms=ONE_HOUR, diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index f20b98f73f7..fed99319300 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -128,6 +128,7 @@ class BulkPushRuleEvaluator: def __init__(self, hs: "HomeServer"): self.hs = hs + self.server_name = hs.hostname self.store = hs.get_datastores().main self.server_name = hs.hostname # nb must be called this for @measure_func self.clock = hs.get_clock() # nb must be called this for @measure_func @@ -137,10 +138,11 @@ def __init__(self, hs: "HomeServer"): self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled self.room_push_rule_cache_metrics = register_cache( - "cache", - "room_push_rule_cache", + cache_type="cache", + cache_name="room_push_rule_cache", cache=[], # Meaningless size, as this isn't a cache that stores values, resizable=False, + server_name=self.server_name, ) async def _get_rules_for_event( diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 00025386801..31204a83849 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -121,9 +121,14 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): WAIT_FOR_STREAMS: ClassVar[bool] = True def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname + if self.CACHE: self.response_cache: ResponseCache[str] = ResponseCache( - hs.get_clock(), "repl." + self.NAME, timeout_ms=30 * 60 * 1000 + clock=hs.get_clock(), + name="repl." + self.NAME, + server_name=self.server_name, + timeout_ms=30 * 60 * 1000, ) # We reserve `instance_name` as a parameter to sending requests, so we diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 8a781f759c6..aa0aa36cd94 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -314,7 +314,9 @@ async def _do_appservice_login( should_issue_refresh_token=should_issue_refresh_token, # The user represented by an appservice's configured sender_localpart # is not actually created in Synapse. - should_check_deactivated_or_locked=qualified_user_id != appservice.sender, + should_check_deactivated_or_locked=( + qualified_user_id != appservice.sender.to_string() + ), request_info=request_info, ) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index bac02122d04..c9fb9dc4d3a 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -111,6 +111,7 @@ class SyncRestServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs + self.server_name = hs.hostname self.auth = hs.get_auth() self.store = hs.get_datastores().main self.sync_handler = hs.get_sync_handler() @@ -125,6 +126,7 @@ def __init__(self, hs: "HomeServer"): self._json_filter_cache: LruCache[str, bool] = LruCache( max_size=1000, cache_name="sync_valid_filter", + server_name=self.server_name, ) # Ratelimiter for presence updates, keyed by requester. diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index 001a290e87e..19f86b5a563 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -35,6 +35,7 @@ class ServerNoticesManager: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname # nb must be called this for @cached self._store = hs.get_datastores().main self._config = hs.config self._account_data_handler = hs.get_account_data_handler() @@ -44,7 +45,6 @@ def __init__(self, hs: "HomeServer"): self._message_handler = hs.get_message_handler() self._storage_controllers = hs.get_storage_controllers() self._is_mine_id = hs.is_mine_id - self._server_name = hs.hostname self._notifier = hs.get_notifier() self.server_notices_mxid = self._config.servernotices.server_notices_mxid @@ -77,7 +77,7 @@ async def send_notice( assert self.server_notices_mxid is not None requester = create_requester( - self.server_notices_mxid, authenticated_entity=self._server_name + self.server_notices_mxid, authenticated_entity=self.server_name ) logger.info("Sending server notice to %s", user_id) @@ -151,7 +151,7 @@ async def get_or_create_notice_room_for_user(self, user_id: str) -> str: assert self._is_mine_id(user_id), "Cannot send server notices to remote users" requester = create_requester( - self.server_notices_mxid, authenticated_entity=self._server_name + self.server_notices_mxid, authenticated_entity=self.server_name ) room_id = await self.maybe_get_notice_room_for_user(user_id) @@ -256,7 +256,7 @@ async def maybe_invite_user_to_room(self, user_id: str, room_id: str) -> None: """ assert self.server_notices_mxid is not None requester = create_requester( - self.server_notices_mxid, authenticated_entity=self._server_name + self.server_notices_mxid, authenticated_entity=self.server_name ) # Check whether the user has already joined or been invited to this room. If @@ -279,7 +279,7 @@ async def maybe_invite_user_to_room(self, user_id: str, room_id: str) -> None: if self._config.servernotices.server_notices_auto_join: user_requester = create_requester( - user_id, authenticated_entity=self._server_name + user_id, authenticated_entity=self.server_name ) await self._room_member_handler.update_membership( requester=user_requester, diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 9c24525845f..976a98a58b4 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -641,6 +641,7 @@ def __init__(self, hs: "HomeServer"): self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( ExpiringCache( cache_name="state_cache", + server_name=self.server_name, clock=self.clock, max_len=100000, expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 548e7df9305..d55c9e18ed6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -55,7 +55,7 @@ def __init__( hs: "HomeServer", ): self.hs = hs - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self._clock = hs.get_clock() self.database_engine = database.engine self.db_pool = database diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index d79791fed4f..8997f4526fc 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -68,7 +68,7 @@ class StateStorageController: """ def __init__(self, hs: "HomeServer", stores: "Databases"): - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self._is_mine_id = hs.is_mine_id self._clock = hs.get_clock() self.stores = stores diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index be5c494ed61..883ab93f7c0 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -90,7 +90,9 @@ def __init__( account_max = self.get_max_account_data_stream_id() self._account_data_stream_cache = StreamChangeCache( - "AccountDataAndTagsChangeCache", account_max + name="AccountDataAndTagsChangeCache", + server_name=self.server_name, + current_stream_pos=account_max, ) self.db_pool.updates.register_background_index_update( diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 766c94fc14e..9862e574fd1 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -126,7 +126,7 @@ def get_app_service_by_user_id(self, user_id: str) -> Optional[ApplicationServic The application service or None. """ for service in self.services_cache: - if service.sender == user_id: + if service.sender.to_string() == user_id: return service return None diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index 69008804bda..cf7bc4ac693 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -421,6 +421,7 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname if hs.config.redis.redis_enabled: # If we're using Redis, we can shift this update process off to @@ -434,7 +435,9 @@ def __init__( # (user_id, access_token, ip,) -> last_seen self.client_ip_last_seen = LruCache[Tuple[str, str, str], int]( - cache_name="client_ip_last_seen", max_size=50000 + cache_name="client_ip_last_seen", + server_name=self.server_name, + max_size=50000, ) if hs.config.worker.run_background_tasks and self.user_ips_max_age: diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index a22eab24742..da10afbebec 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -94,6 +94,7 @@ def __init__( Tuple[str, Optional[str]], int ] = ExpiringCache( cache_name="last_device_delete_cache", + server_name=self.server_name, clock=self._clock, max_len=10000, expiry_ms=30 * 60 * 1000, @@ -127,8 +128,9 @@ def __init__( limit=1000, ) self._device_inbox_stream_cache = StreamChangeCache( - "DeviceInboxStreamChangeCache", - min_device_inbox_id, + name="DeviceInboxStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_inbox_id, prefilled_cache=device_inbox_prefill, ) @@ -143,8 +145,9 @@ def __init__( limit=1000, ) self._device_federation_outbox_stream_cache = StreamChangeCache( - "DeviceFederationOutboxStreamChangeCache", - min_device_outbox_id, + name="DeviceFederationOutboxStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_outbox_id, prefilled_cache=device_outbox_prefill, ) diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 941d278e6c7..f054c661025 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -128,8 +128,9 @@ def __init__( limit=10000, ) self._device_list_stream_cache = StreamChangeCache( - "DeviceListStreamChangeCache", - min_device_list_id, + name="DeviceListStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_list_id, prefilled_cache=device_list_prefill, ) @@ -142,8 +143,9 @@ def __init__( limit=10000, ) self._device_list_room_stream_cache = StreamChangeCache( - "DeviceListRoomStreamChangeCache", - min_device_list_room_id, + name="DeviceListRoomStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_list_room_id, prefilled_cache=device_list_room_prefill, ) @@ -159,8 +161,9 @@ def __init__( limit=1000, ) self._user_signature_stream_cache = StreamChangeCache( - "UserSignatureStreamChangeCache", - user_signature_stream_list_id, + name="UserSignatureStreamChangeCache", + server_name=self.server_name, + current_stream_pos=user_signature_stream_list_id, prefilled_cache=user_signature_stream_prefill, ) @@ -178,8 +181,9 @@ def __init__( limit=10000, ) self._device_list_federation_stream_cache = StreamChangeCache( - "DeviceListFederationStreamChangeCache", - device_list_federation_list_id, + name="DeviceListFederationStreamChangeCache", + server_name=self.server_name, + current_stream_pos=device_list_federation_list_id, prefilled_cache=device_list_federation_prefill, ) @@ -1769,11 +1773,16 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname # Map of (user_id, device_id) -> bool. If there is an entry that implies # the device exists. self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = ( - LruCache(cache_name="device_id_exists", max_size=10000) + LruCache( + cache_name="device_id_exists", + server_name=self.server_name, + max_size=10000, + ) ) async def store_device( diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index dfc25d8935b..8e623bf0617 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -148,7 +148,10 @@ def __init__( # Cache of event ID to list of auth event IDs and their depths. self._event_auth_cache: LruCache[str, List[Tuple[str, int]]] = LruCache( - 500000, "_event_auth_cache", size_callback=len + max_size=500000, + server_name=self.server_name, + cache_name="_event_auth_cache", + size_callback=len, ) # Flag used by unit tests to disable fallback when there is no chain cover diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 2929b1d57a1..d9ef93f8265 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -269,8 +269,9 @@ def __init__( limit=1000, ) self._curr_state_delta_stream_cache: StreamChangeCache = StreamChangeCache( - "_curr_state_delta_stream_cache", - min_curr_state_delta_id, + name="_curr_state_delta_stream_cache", + server_name=self.server_name, + current_stream_pos=min_curr_state_delta_id, prefilled_cache=curr_state_delta_prefill, ) @@ -283,6 +284,7 @@ def __init__( self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( AsyncLruCache( + server_name=self.server_name, cache_name="*getEvent*", max_size=hs.config.caches.event_cache_size, # `extra_index_cb` Returns a tuple as that is the key type diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 065c8856036..12cff1d3522 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -108,8 +108,9 @@ def __init__( max_value=self._presence_id_gen.get_current_token(), ) self.presence_stream_cache = StreamChangeCache( - "PresenceStreamChangeCache", - min_presence_val, + name="PresenceStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_presence_val, prefilled_cache=presence_cache_prefill, ) diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 86c87f78bf1..3bc977d4971 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -163,8 +163,9 @@ def __init__( ) self.push_rules_stream_cache = StreamChangeCache( - "PushRulesStreamChangeCache", - push_rules_id, + name="PushRulesStreamChangeCache", + server_name=self.server_name, + current_stream_pos=push_rules_id, prefilled_cache=push_rules_prefill, ) diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 99643315107..81f50467e7d 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -158,8 +158,9 @@ def __init__( limit=10000, ) self._receipts_stream_cache = StreamChangeCache( - "ReceiptsRoomChangeCache", - min_receipts_stream_id, + name="ReceiptsRoomChangeCache", + server_name=self.server_name, + current_stream_pos=min_receipts_stream_id, prefilled_cache=receipts_stream_prefill, ) diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 3fda49f31f1..b6c6b69b22c 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -617,12 +617,15 @@ def __init__( max_value=events_max, ) self._events_stream_cache = StreamChangeCache( - "EventsRoomStreamChangeCache", - min_event_val, + name="EventsRoomStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_event_val, prefilled_cache=event_cache_prefill, ) self._membership_stream_cache = StreamChangeCache( - "MembershipStreamChangeCache", events_max + name="MembershipStreamChangeCache", + server_name=self.server_name, + current_stream_pos=events_max, ) self._stream_order_on_start = self.get_room_max_stream_ordering() diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index c1a66dcba02..9b3b7e086f9 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -92,6 +92,7 @@ def __init__( ): super().__init__(database, db_conn, hs) self._state_deletion_store = state_deletion_store + self.server_name = hs.hostname # Originally the state store used a single DictionaryCache to cache the # event IDs for the state types in a given state group to avoid hammering @@ -123,14 +124,16 @@ def __init__( # vast majority of state in Matrix (today) is member events. self._state_group_cache: DictionaryCache[int, StateKey, str] = DictionaryCache( - "*stateGroupCache*", + name="*stateGroupCache*", + server_name=self.server_name, # TODO: this hasn't been tuned yet - 50000, + max_entries=50000, ) self._state_group_members_cache: DictionaryCache[int, StateKey, str] = ( DictionaryCache( - "*stateGroupMembersCache*", - 500000, + name="*stateGroupMembersCache*", + server_name=self.server_name, + max_entries=500000, ) ) diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 76e6e139c6f..3087ad6adc2 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -31,6 +31,7 @@ from prometheus_client.core import Gauge from synapse.config.cache import add_resizable_cache +from synapse.metrics import SERVER_NAME_LABEL from synapse.util.metrics import DynamicCollectorRegistry logger = logging.getLogger(__name__) @@ -46,50 +47,65 @@ caches_by_name: Dict[str, Sized] = {} cache_size = Gauge( - "synapse_util_caches_cache_size", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache_size", + "", + labelnames=["name", SERVER_NAME_LABEL], + registry=CACHE_METRIC_REGISTRY, ) cache_hits = Gauge( - "synapse_util_caches_cache_hits", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache_hits", + "", + labelnames=["name", SERVER_NAME_LABEL], + registry=CACHE_METRIC_REGISTRY, ) cache_evicted = Gauge( "synapse_util_caches_cache_evicted_size", "", - ["name", "reason"], + labelnames=["name", "reason", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) cache_total = Gauge( - "synapse_util_caches_cache", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache", + "", + labelnames=["name", SERVER_NAME_LABEL], + registry=CACHE_METRIC_REGISTRY, ) cache_max_size = Gauge( - "synapse_util_caches_cache_max_size", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache_max_size", + "", + labelnames=["name", SERVER_NAME_LABEL], + registry=CACHE_METRIC_REGISTRY, ) cache_memory_usage = Gauge( "synapse_util_caches_cache_size_bytes", "Estimated memory usage of the caches", - ["name"], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_size = Gauge( "synapse_util_caches_response_cache_size", "", - ["name"], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_hits = Gauge( "synapse_util_caches_response_cache_hits", "", - ["name"], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_evicted = Gauge( "synapse_util_caches_response_cache_evicted_size", "", - ["name", "reason"], + labelnames=["name", "reason", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_total = Gauge( - "synapse_util_caches_response_cache", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_response_cache", + "", + labelnames=["name", SERVER_NAME_LABEL], + registry=CACHE_METRIC_REGISTRY, ) @@ -103,12 +119,17 @@ class EvictionReason(Enum): invalidation = auto() -@attr.s(slots=True, auto_attribs=True) +@attr.s(slots=True, auto_attribs=True, kw_only=True) class CacheMetric: + """ + Used to track cache metrics + """ + _cache: Sized _cache_type: str _cache_name: str _collect_callback: Optional[Callable] + _server_name: str hits: int = 0 misses: int = 0 @@ -145,34 +166,34 @@ def describe(self) -> List[str]: def collect(self) -> None: try: + labels_base = { + "name": self._cache_name, + SERVER_NAME_LABEL: self._server_name, + } if self._cache_type == "response_cache": - response_cache_size.labels(self._cache_name).set(len(self._cache)) - response_cache_hits.labels(self._cache_name).set(self.hits) + response_cache_size.labels(**labels_base).set(len(self._cache)) + response_cache_hits.labels(**labels_base).set(self.hits) for reason in EvictionReason: - response_cache_evicted.labels(self._cache_name, reason.name).set( - self.eviction_size_by_reason[reason] - ) - response_cache_total.labels(self._cache_name).set( - self.hits + self.misses - ) + response_cache_evicted.labels( + **{**labels_base, "reason": reason.name} + ).set(self.eviction_size_by_reason[reason]) + response_cache_total.labels(**labels_base).set(self.hits + self.misses) else: - cache_size.labels(self._cache_name).set(len(self._cache)) - cache_hits.labels(self._cache_name).set(self.hits) + cache_size.labels(**labels_base).set(len(self._cache)) + cache_hits.labels(**labels_base).set(self.hits) for reason in EvictionReason: - cache_evicted.labels(self._cache_name, reason.name).set( + cache_evicted.labels(**{**labels_base, "reason": reason.name}).set( self.eviction_size_by_reason[reason] ) - cache_total.labels(self._cache_name).set(self.hits + self.misses) + cache_total.labels(**labels_base).set(self.hits + self.misses) max_size = getattr(self._cache, "max_size", None) if max_size: - cache_max_size.labels(self._cache_name).set(max_size) + cache_max_size.labels(**labels_base).set(max_size) if TRACK_MEMORY_USAGE: # self.memory_usage can be None if nothing has been inserted # into the cache yet. - cache_memory_usage.labels(self._cache_name).set( - self.memory_usage or 0 - ) + cache_memory_usage.labels(**labels_base).set(self.memory_usage or 0) if self._collect_callback: self._collect_callback() except Exception as e: @@ -181,9 +202,11 @@ def collect(self) -> None: def register_cache( + *, cache_type: str, cache_name: str, cache: Sized, + server_name: str, collect_callback: Optional[Callable] = None, resizable: bool = True, resize_callback: Optional[Callable] = None, @@ -196,6 +219,8 @@ def register_cache( cache_name: name of the cache cache: cache itself, which must implement __len__(), and may optionally implement a max_size property + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). collect_callback: If given, a function which is called during metric collection to update additional metrics. resizable: Whether this cache supports being resized, in which case either @@ -210,7 +235,13 @@ def register_cache( resize_callback = cache.set_cache_factor # type: ignore add_resizable_cache(cache_name, resize_callback) - metric = CacheMetric(cache, cache_type, cache_name, collect_callback) + metric = CacheMetric( + cache=cache, + cache_type=cache_type, + cache_name=cache_name, + server_name=server_name, + collect_callback=collect_callback, + ) metric_name = "cache_%s_%s" % (cache_type, cache_name) caches_by_name[cache_name] = cache CACHE_METRIC_REGISTRY.register_hook(metric_name, metric.collect) diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py index 14868fa4d39..0c6c912918a 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py @@ -79,7 +79,9 @@ class DeferredCache(Generic[KT, VT]): def __init__( self, + *, name: str, + server_name: str, max_entries: int = 1000, tree: bool = False, iterable: bool = False, @@ -89,6 +91,8 @@ def __init__( """ Args: name: The name of the cache + server_name: server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). max_entries: Maximum amount of entries that the cache will hold tree: Use a TreeCache instead of a dict as the underlying cache type iterable: If True, count each item in the cached object as an entry, @@ -113,6 +117,7 @@ def metrics_cb() -> None: # a Deferred. self.cache: LruCache[KT, VT] = LruCache( max_size=max_entries, + server_name=server_name, cache_name=name, cache_type=cache_type, size_callback=( diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 29a95867104..9630cd6d26b 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -33,6 +33,7 @@ List, Mapping, Optional, + Protocol, Sequence, Tuple, Type, @@ -153,6 +154,14 @@ def __init__( ) +class HasServerName(Protocol): + server_name: str + """ + The homeserver name that this cache is associated with (used to label the metric) + (`hs.hostname`). + """ + + class DeferredCacheDescriptor(_CacheDescriptorBase): """A method decorator that applies a memoizing cache around the function. @@ -200,6 +209,7 @@ def foo(self, key, cache_context): def __init__( self, + *, orig: Callable[..., Any], max_entries: int = 1000, num_args: Optional[int] = None, @@ -229,10 +239,20 @@ def __init__( self.prune_unread_entries = prune_unread_entries def __get__( - self, obj: Optional[Any], owner: Optional[Type] + self, obj: Optional[HasServerName], owner: Optional[Type] ) -> Callable[..., "defer.Deferred[Any]"]: + # We need access to instance-level `obj.server_name` attribute + assert obj is not None, ( + "Cannot call cached method from class (❌ `MyClass.cached_method()`) " + "and must be called from an instance (✅ `MyClass().cached_method()`). " + ) + assert obj.server_name is not None, ( + "The `server_name` attribute must be set on the object where `@cached` decorator is used." + ) + cache: DeferredCache[CacheKey, Any] = DeferredCache( name=self.name, + server_name=obj.server_name, max_entries=self.max_entries, tree=self.tree, iterable=self.iterable, @@ -490,7 +510,7 @@ class _CachedFunctionDescriptor: def __call__(self, orig: F) -> CachedFunction[F]: d = DeferredCacheDescriptor( - orig, + orig=orig, max_entries=self.max_entries, num_args=self.num_args, uncached_args=self.uncached_args, diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index 14bd3ba3b04..168ddc51cd5 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -127,7 +127,15 @@ class DictionaryCache(Generic[KT, DKT, DV]): for the '2' dict key. """ - def __init__(self, name: str, max_entries: int = 1000): + def __init__(self, *, name: str, server_name: str, max_entries: int = 1000): + """ + Args: + name + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). + max_entries + """ + # We use a single LruCache to store two different types of entries: # 1. Map from (key, dict_key) -> dict value (or sentinel, indicating # the key doesn't exist in the dict); and @@ -152,6 +160,7 @@ def __init__(self, name: str, max_entries: int = 1000): Union[_PerKeyValue, Dict[DKT, DV]], ] = LruCache( max_size=max_entries, + server_name=server_name, cache_name=name, cache_type=TreeCache, size_callback=len, diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 3198fdd2eda..4be4c6f01b1 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -46,7 +46,9 @@ class ExpiringCache(Generic[KT, VT]): def __init__( self, + *, cache_name: str, + server_name: str, clock: Clock, max_len: int = 0, expiry_ms: int = 0, @@ -56,6 +58,8 @@ def __init__( """ Args: cache_name: Name of this cache, used for logging. + server_name: The homeserver name that this cache is associated + with (used to label the metric) (`hs.hostname`). clock max_len: Max size of dict. If the dict grows larger than this then the oldest items get automatically evicted. Default is 0, @@ -83,7 +87,12 @@ def __init__( self.iterable = iterable - self.metrics = register_cache("expiring", cache_name, self) + self.metrics = register_cache( + cache_type="expiring", + cache_name=cache_name, + cache=self, + server_name=server_name, + ) if not self._expiry_ms: # Don't bother starting the loop if things never expire diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 2e5efa3a521..466362e79ce 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -376,9 +376,43 @@ class LruCache(Generic[KT, VT]): If cache_type=TreeCache, all keys must be tuples. """ + @overload def __init__( self, + *, max_size: int, + server_name: str, + cache_name: str, + cache_type: Type[Union[dict, TreeCache]] = dict, + size_callback: Optional[Callable[[VT], int]] = None, + metrics_collection_callback: Optional[Callable[[], None]] = None, + apply_cache_factor_from_config: bool = True, + clock: Optional[Clock] = None, + prune_unread_entries: bool = True, + extra_index_cb: Optional[Callable[[KT, VT], KT]] = None, + ): ... + + @overload + def __init__( + self, + *, + max_size: int, + server_name: Literal[None] = None, + cache_name: Literal[None] = None, + cache_type: Type[Union[dict, TreeCache]] = dict, + size_callback: Optional[Callable[[VT], int]] = None, + metrics_collection_callback: Optional[Callable[[], None]] = None, + apply_cache_factor_from_config: bool = True, + clock: Optional[Clock] = None, + prune_unread_entries: bool = True, + extra_index_cb: Optional[Callable[[KT, VT], KT]] = None, + ): ... + + def __init__( + self, + *, + max_size: int, + server_name: Optional[str] = None, cache_name: Optional[str] = None, cache_type: Type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, @@ -392,8 +426,13 @@ def __init__( Args: max_size: The maximum amount of entries the cache can hold - cache_name: The name of this cache, for the prometheus metrics. If unset, - no metrics will be reported on this cache. + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). Must be set if `cache_name` is + set. If unset, no metrics will be reported on this cache. + + cache_name: The name of this cache, for the prometheus metrics. Must be set + if `server_name` is set. If unset, no metrics will be reported on this + cache. cache_type: type of underlying cache to be used. Typically one of dict @@ -457,11 +496,12 @@ def __init__( # do yet when we get resized. self._on_resize: Optional[Callable[[], None]] = None - if cache_name is not None: + if cache_name is not None and server_name is not None: metrics: Optional[CacheMetric] = register_cache( - "lru_cache", - cache_name, - self, + cache_type="lru_cache", + cache_name=cache_name, + cache=self, + server_name=server_name, collect_callback=metrics_collection_callback, ) else: diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 54b99134b9c..49a9151916e 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -103,18 +103,35 @@ class ResponseCache(Generic[KV]): def __init__( self, + *, clock: Clock, name: str, + server_name: str, timeout_ms: float = 0, enable_logging: bool = True, ): + """ + Args: + clock + name + server_name: The homeserver name that this cache is associated + with (used to label the metric) (`hs.hostname`). + timeout_ms + enable_logging + """ self._result_cache: Dict[KV, ResponseCacheEntry] = {} self.clock = clock self.timeout_sec = timeout_ms / 1000.0 self._name = name - self._metrics = register_cache("response_cache", name, self, resizable=False) + self._metrics = register_cache( + cache_type="response_cache", + cache_name=name, + cache=self, + server_name=server_name, + resizable=False, + ) self._enable_logging = enable_logging def size(self) -> int: diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 5ac8643eefc..2cffd352d82 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -73,11 +73,23 @@ class StreamChangeCache: def __init__( self, + *, name: str, + server_name: str, current_stream_pos: int, max_size: int = 10000, prefilled_cache: Optional[Mapping[EntityType, int]] = None, ) -> None: + """ + Args: + name + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). + current_stream_pos + max_size + prefilled_cache + """ + self._original_max_size: int = max_size self._max_size = math.floor(max_size) @@ -96,7 +108,11 @@ def __init__( self.name = name self.metrics = caches.register_cache( - "cache", self.name, self._cache, resize_callback=self.set_cache_factor + cache_type="cache", + cache_name=self.name, + server_name=server_name, + cache=self._cache, + resize_callback=self.set_cache_factor, ) if prefilled_cache: diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 26a088603ae..18c3a1e51c5 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -40,7 +40,21 @@ class TTLCache(Generic[KT, VT]): """A key/value cache implementation where each entry has its own TTL""" - def __init__(self, cache_name: str, timer: Callable[[], float] = time.time): + def __init__( + self, + *, + cache_name: str, + server_name: str, + timer: Callable[[], float] = time.time, + ): + """ + Args: + cache_name + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). + timer: Function used to get the current time in seconds since the epoch. + """ + # map from key to _CacheEntry self._data: Dict[KT, _CacheEntry[KT, VT]] = {} @@ -49,7 +63,13 @@ def __init__(self, cache_name: str, timer: Callable[[], float] = time.time): self._timer = timer - self._metrics = register_cache("ttl", cache_name, self, resizable=False) + self._metrics = register_cache( + cache_type="ttl", + cache_name=cache_name, + cache=self, + server_name=server_name, + resizable=False, + ) def set(self, key: KT, value: VT, ttl: float) -> None: """Add/update an entry in the cache diff --git a/synmark/suites/lrucache.py b/synmark/suites/lrucache.py index 49d200c43b5..d109441e551 100644 --- a/synmark/suites/lrucache.py +++ b/synmark/suites/lrucache.py @@ -29,7 +29,7 @@ async def main(reactor: ISynapseReactor, loops: int) -> float: """ Benchmark `loops` number of insertions into LruCache without eviction. """ - cache: LruCache[int, bool] = LruCache(loops) + cache: LruCache[int, bool] = LruCache(max_size=loops) start = perf_counter() diff --git a/synmark/suites/lrucache_evict.py b/synmark/suites/lrucache_evict.py index 77061625a90..00cfdd04471 100644 --- a/synmark/suites/lrucache_evict.py +++ b/synmark/suites/lrucache_evict.py @@ -30,7 +30,7 @@ async def main(reactor: ISynapseReactor, loops: int) -> float: Benchmark `loops` number of insertions into LruCache where half of them are evicted. """ - cache: LruCache[int, bool] = LruCache(loops // 2) + cache: LruCache[int, bool] = LruCache(max_size=loops // 2) start = perf_counter() diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index bd229cf7e9c..95a4683d039 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -60,7 +60,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # modify its config instead of the hs' self.auth_blocking = AuthBlocking(hs) - self.test_user = "@foo:bar" + self.test_user_id = UserID.from_string("@foo:bar") self.test_token = b"_test_token_" # this is overridden for the appservice tests @@ -71,7 +71,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def test_get_user_by_req_user_valid_token(self) -> None: user_info = TokenLookupResult( - user_id=self.test_user, token_id=5, device_id="device" + user_id=self.test_user_id.to_string(), token_id=5, device_id="device" ) self.store.get_user_by_access_token = AsyncMock(return_value=user_info) self.store.mark_access_token_as_used = AsyncMock(return_value=None) @@ -81,7 +81,7 @@ def test_get_user_by_req_user_valid_token(self) -> None: request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = self.get_success(self.auth.get_user_by_req(request)) - self.assertEqual(requester.user.to_string(), self.test_user) + self.assertEqual(requester.user, self.test_user_id) def test_get_user_by_req_user_bad_token(self) -> None: self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -96,7 +96,7 @@ def test_get_user_by_req_user_bad_token(self) -> None: self.assertEqual(f.errcode, "M_UNKNOWN_TOKEN") def test_get_user_by_req_user_missing_token(self) -> None: - user_info = TokenLookupResult(user_id=self.test_user, token_id=5) + user_info = TokenLookupResult(user_id=self.test_user_id.to_string(), token_id=5) self.store.get_user_by_access_token = AsyncMock(return_value=user_info) request = Mock(args={}) @@ -109,7 +109,10 @@ def test_get_user_by_req_user_missing_token(self) -> None: def test_get_user_by_req_appservice_valid_token(self) -> None: app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -119,7 +122,7 @@ def test_get_user_by_req_appservice_valid_token(self) -> None: request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = self.get_success(self.auth.get_user_by_req(request)) - self.assertEqual(requester.user.to_string(), self.test_user) + self.assertEqual(requester.user, self.test_user_id) def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: from netaddr import IPSet @@ -127,7 +130,7 @@ def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: app_service = Mock( token="foobar", url="a_url", - sender=self.test_user, + sender=self.test_user_id.to_string(), ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -138,7 +141,7 @@ def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = self.get_success(self.auth.get_user_by_req(request)) - self.assertEqual(requester.user.to_string(), self.test_user) + self.assertEqual(requester.user, self.test_user_id) def test_get_user_by_req_appservice_valid_token_bad_ip(self) -> None: from netaddr import IPSet @@ -146,7 +149,7 @@ def test_get_user_by_req_appservice_valid_token_bad_ip(self) -> None: app_service = Mock( token="foobar", url="a_url", - sender=self.test_user, + sender=self.test_user_id, ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -176,7 +179,7 @@ def test_get_user_by_req_appservice_bad_token(self) -> None: self.assertEqual(f.errcode, "M_UNKNOWN_TOKEN") def test_get_user_by_req_appservice_missing_token(self) -> None: - app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service = Mock(token="foobar", url="a_url", sender=self.test_user_id) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -191,7 +194,10 @@ def test_get_user_by_req_appservice_missing_token(self) -> None: def test_get_user_by_req_appservice_valid_token_valid_user_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -215,7 +221,10 @@ class FakeUserInfo: def test_get_user_by_req_appservice_valid_token_bad_user_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=False) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -238,7 +247,10 @@ def test_get_user_by_req_appservice_valid_token_valid_device_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" masquerading_device_id = b"DOPPELDEVICE" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -270,7 +282,10 @@ def test_get_user_by_req_appservice_valid_token_invalid_device_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" masquerading_device_id = b"NOT_A_REAL_DEVICE_ID" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -436,7 +451,7 @@ def test_blocking_mau__appservice_requester_allowed_when_not_tracking_ips( namespaces={ "users": [{"regex": "@_appservice.*:sender", "exclusive": True}] }, - sender="@appservice:sender", + sender=UserID.from_string("@appservice:server"), ) requester = Requester( user=UserID.from_string("@appservice:server"), @@ -467,7 +482,7 @@ def test_blocking_mau__appservice_requester_disallowed_when_tracking_ips( namespaces={ "users": [{"regex": "@_appservice.*:sender", "exclusive": True}] }, - sender="@appservice:sender", + sender=UserID.from_string("@appservice:server"), ) requester = Requester( user=UserID.from_string("@appservice:server"), diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index 93f4f989162..2e45d4e4d23 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -5,7 +5,7 @@ from synapse.config.ratelimiting import RatelimitSettings from synapse.module_api import RatelimitOverride from synapse.module_api.callbacks.ratelimit_callbacks import RatelimitModuleApiCallbacks -from synapse.types import create_requester +from synapse.types import UserID, create_requester from tests import unittest @@ -40,7 +40,7 @@ def test_allowed_appservice_ratelimited_via_can_requester_do_action(self) -> Non token="fake_token", id="foo", rate_limited=True, - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), ) as_requester = create_requester("@user:example.com", app_service=appservice) @@ -76,7 +76,7 @@ def test_allowed_appservice_via_can_requester_do_action(self) -> None: token="fake_token", id="foo", rate_limited=False, - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), ) as_requester = create_requester("@user:example.com", app_service=appservice) diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 0f197365409..8fcd928d316 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -25,7 +25,7 @@ from synapse.appservice import ApplicationService from synapse.server import HomeServer -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from tests import unittest @@ -41,7 +41,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.api = hs.get_application_service_api() self.service = ApplicationService( id="unique_identifier", - sender="@as:test", + sender=UserID.from_string("@as:test"), url=URL, token="unused", hs_token=TOKEN, diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 3fa44266387..620c2b907b2 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.appservice import ApplicationService, Namespace +from synapse.types import UserID from tests import unittest @@ -37,7 +38,7 @@ class ApplicationServiceTestCase(unittest.TestCase): def setUp(self) -> None: self.service = ApplicationService( id="unique_identifier", - sender="@as:test", + sender=UserID.from_string("@as:test"), url="some_url", token="some_token", ) @@ -226,11 +227,11 @@ def test_regex_multiple_matches( @defer.inlineCallbacks def test_interested_in_self(self) -> Generator["defer.Deferred[Any]", object, None]: # make sure invites get through - self.service.sender = "@appservice:name" + self.service.sender = UserID.from_string("@appservice:name") self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.type = "m.room.member" self.event.content = {"membership": "invite"} - self.event.state_key = self.service.sender + self.event.state_key = self.service.sender.to_string() self.assertTrue( ( yield self.service.is_interested_in_event( diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py index aead73e0594..deb6bade461 100644 --- a/tests/config/test_cache.py +++ b/tests/config/test_cache.py @@ -75,7 +75,7 @@ def test_individual_instantiated_before_config_load(self) -> None: the default cache size in the interim, and then resized once the config is loaded. """ - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) @@ -96,7 +96,7 @@ def test_individual_instantiated_after_config_load(self) -> None: self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 200) @@ -106,7 +106,7 @@ def test_global_instantiated_before_config_load(self) -> None: the default cache size in the interim, and then resized to the new default cache size once the config is loaded. """ - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) @@ -126,7 +126,7 @@ def test_global_instantiated_after_config_load(self) -> None: self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 150) @@ -145,15 +145,15 @@ def test_cache_with_asterisk_in_name(self) -> None: self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache_a: LruCache = LruCache(100) + cache_a: LruCache = LruCache(max_size=100) add_resizable_cache("*cache_a*", cache_resize_callback=cache_a.set_cache_factor) self.assertEqual(cache_a.max_size, 200) - cache_b: LruCache = LruCache(100) + cache_b: LruCache = LruCache(max_size=100) add_resizable_cache("*Cache_b*", cache_resize_callback=cache_b.set_cache_factor) self.assertEqual(cache_b.max_size, 300) - cache_c: LruCache = LruCache(100) + cache_c: LruCache = LruCache(max_size=100) add_resizable_cache("*cache_c*", cache_resize_callback=cache_c.set_cache_factor) self.assertEqual(cache_c.max_size, 200) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 1db630e9e47..25cf5269b8e 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -43,6 +43,7 @@ MultiWriterStreamToken, RoomStreamToken, StreamKeyType, + UserID, ) from synapse.util import Clock from synapse.util.stringutils import random_string @@ -1009,7 +1010,7 @@ def _register_application_service( appservice = ApplicationService( token=random_string(10), id=random_string(10), - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), rate_limited=False, namespaces=namespaces, supports_ephemeral=True, @@ -1087,7 +1088,7 @@ def test_application_service_receives_device_list_updates( appservice = ApplicationService( token=random_string(10), id=random_string(10), - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), rate_limited=False, namespaces={ ApplicationService.NS_USERS: [ @@ -1151,9 +1152,9 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # Define an application service for the tests self._service_token = "VERYSECRET" self._service = ApplicationService( - self._service_token, - "as1", - "@as.sender:test", + token=self._service_token, + id="as1", + sender=UserID.from_string("@as.sender:test"), namespaces={ "users": [ {"regex": "@_as_.*:test", "exclusive": True}, diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 080e6a70286..99a0c502111 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -34,7 +34,7 @@ from synapse.rest.client import devices, login, register from synapse.server import HomeServer from synapse.storage.databases.main.appservice import _make_exclusive_regex -from synapse.types import JsonDict, create_requester +from synapse.types import JsonDict, UserID, create_requester from synapse.util import Clock from synapse.util.task_scheduler import TaskScheduler @@ -419,7 +419,7 @@ def test_on_federation_query_user_devices_appservice(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 70fc4263e7a..323950c2f4a 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -1457,7 +1457,7 @@ def test_query_appservice(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( @@ -1525,7 +1525,7 @@ def test_query_appservice_with_fallback(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( @@ -1751,7 +1751,7 @@ def test_query_local_devices_appservice(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index fefa2f1135c..20f2306d4c5 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -726,7 +726,7 @@ def test_registration_endpoints_removed(self) -> None: token="i_am_an_app_service", id="1234", namespaces={"users": [{"regex": r"@alice:.+", "exclusive": True}]}, - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 394315d2b0b..1126b6f1835 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -86,8 +86,8 @@ def make_homeserver( self.mock_federation_client = AsyncMock(spec=["put_json"]) self.mock_federation_client.put_json.return_value = (200, "OK") self.mock_federation_client.agent = MatrixFederationAgent( - "OUR_STUB_HOMESERVER_NAME", - reactor, + server_name="OUR_STUB_HOMESERVER_NAME", + reactor=reactor, tls_client_options_factory=None, user_agent=b"SynapseInTrialTest/0.0.0", ip_allowlist=None, diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index b12ffc3665b..0da423142ca 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -31,7 +31,7 @@ from synapse.rest.client import login, register, room, user_directory from synapse.server import HomeServer from synapse.storage.roommember import ProfileInfo -from synapse.types import JsonDict, UserProfile, create_requester +from synapse.types import JsonDict, UserID, UserProfile, create_requester from synapse.util import Clock from tests import unittest @@ -78,7 +78,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, # Note: this user does not match the regex above, so that tests # can distinguish the sender from the AS user. - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) mock_load_appservices = Mock(return_value=[self.appservice]) @@ -196,7 +196,9 @@ def test_excludes_appservice_sender(self) -> None: user = self.register_user("user", "pass") token = self.login(user, "pass") room = self.helper.create_room_as(user, is_public=True, tok=token) - self.helper.join(room, self.appservice.sender, tok=self.appservice.token) + self.helper.join( + room, self.appservice.sender.to_string(), tok=self.appservice.token + ) self._check_only_one_user_in_directory(user, room) def test_search_term_with_colon_in_it_does_not_raise(self) -> None: @@ -433,7 +435,7 @@ def test_handle_local_profile_change_with_appservice_user(self) -> None: def test_handle_local_profile_change_with_appservice_sender(self) -> None: # profile is not in directory profile = self.get_success( - self.store._get_user_in_directory(self.appservice.sender) + self.store._get_user_in_directory(self.appservice.sender.to_string()) ) self.assertIsNone(profile) @@ -441,13 +443,13 @@ def test_handle_local_profile_change_with_appservice_sender(self) -> None: profile_info = ProfileInfo(avatar_url="avatar_url", display_name="4L1c3") self.get_success( self.handler.handle_local_profile_change( - self.appservice.sender, profile_info + self.appservice.sender.to_string(), profile_info ) ) # profile is still not in directory profile = self.get_success( - self.store._get_user_in_directory(self.appservice.sender) + self.store._get_user_in_directory(self.appservice.sender.to_string()) ) self.assertIsNone(profile) diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index eb859ca47a1..a1243b053d1 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -85,16 +85,20 @@ def setUp(self) -> None: self.tls_factory = FederationPolicyForHTTPS(config) self.well_known_cache: TTLCache[bytes, Optional[bytes]] = TTLCache( - "test_cache", timer=self.reactor.seconds + cache_name="test_cache", + server_name="test_server", + timer=self.reactor.seconds, ) self.had_well_known_cache: TTLCache[bytes, bool] = TTLCache( - "test_cache", timer=self.reactor.seconds + cache_name="test_cache", + server_name="test_server", + timer=self.reactor.seconds, ) self.well_known_resolver = WellKnownResolver( - "OUR_STUB_HOMESERVER_NAME", - self.reactor, - Agent(self.reactor, contextFactory=self.tls_factory), - b"test-agent", + server_name="OUR_STUB_HOMESERVER_NAME", + reactor=self.reactor, + agent=Agent(self.reactor, contextFactory=self.tls_factory), + user_agent=b"test-agent", well_known_cache=self.well_known_cache, had_well_known_cache=self.had_well_known_cache, ) @@ -270,7 +274,7 @@ def _make_agent(self) -> MatrixFederationAgent: because it is created too early during setUp """ return MatrixFederationAgent( - "OUR_STUB_HOMESERVER_NAME", + server_name="OUR_STUB_HOMESERVER_NAME", reactor=cast(ISynapseReactor, self.reactor), tls_client_options_factory=self.tls_factory, user_agent=b"test-agent", # Note that this is unused since _well_known_resolver is provided. @@ -1013,7 +1017,7 @@ def test_get_well_known_unsigned_cert(self) -> None: # Build a new agent and WellKnownResolver with a different tls factory tls_factory = FederationPolicyForHTTPS(config) agent = MatrixFederationAgent( - "OUR_STUB_HOMESERVER_NAME", + server_name="OUR_STUB_HOMESERVER_NAME", reactor=self.reactor, tls_client_options_factory=tls_factory, user_agent=b"test-agent", # This is unused since _well_known_resolver is passed below. @@ -1021,10 +1025,10 @@ def test_get_well_known_unsigned_cert(self) -> None: ip_blocklist=IPSet(), _srv_resolver=self.mock_resolver, _well_known_resolver=WellKnownResolver( - "OUR_STUB_HOMESERVER_NAME", - cast(ISynapseReactor, self.reactor), - Agent(self.reactor, contextFactory=tls_factory), - b"test-agent", + server_name="OUR_STUB_HOMESERVER_NAME", + reactor=cast(ISynapseReactor, self.reactor), + agent=Agent(self.reactor, contextFactory=tls_factory), + user_agent=b"test-agent", well_known_cache=self.well_known_cache, had_well_known_cache=self.had_well_known_cache, ), diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index 2e7004df3a9..dca8dd79b18 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -159,7 +159,9 @@ def test_cache_metric(self) -> None: Caches produce metrics reflecting their state when scraped. """ CACHE_NAME = "cache_metrics_test_fgjkbdfg" - cache: DeferredCache[str, str] = DeferredCache(CACHE_NAME, max_entries=777) + cache: DeferredCache[str, str] = DeferredCache( + name=CACHE_NAME, server_name=self.hs.hostname, max_entries=777 + ) items = { x.split(b"{")[0].decode("ascii"): x.split(b" ")[1].decode("ascii") diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 3898532acff..98a3a22154f 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -823,9 +823,9 @@ def prepare( # Define an application service so that we can register appservice users self._service_token = "some_token" self._service = ApplicationService( - self._service_token, - "as1", - "@as.sender:test", + token=self._service_token, + id="as1", + sender=UserID.from_string("@as.sender:test"), namespaces={ "users": [ {"regex": "@_as_.*:test", "exclusive": True}, diff --git a/tests/replication/tcp/streams/test_typing.py b/tests/replication/tcp/streams/test_typing.py index b1c2f5b03b9..c8958189f8b 100644 --- a/tests/replication/tcp/streams/test_typing.py +++ b/tests/replication/tcp/streams/test_typing.py @@ -139,7 +139,9 @@ def test_reset(self) -> None: self.hs.get_replication_command_handler()._streams["typing"].last_token = 0 typing._latest_room_serial = 0 typing._typing_stream_change_cache = StreamChangeCache( - "TypingStreamChangeCache", typing._latest_room_serial + name="TypingStreamChangeCache", + server_name=self.hs.hostname, + current_stream_pos=typing._latest_room_serial, ) typing._reset() diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index 6c4145f2c26..5b7ed95a238 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -68,8 +68,8 @@ def setUp(self) -> None: reactor, _ = get_clock() self.matrix_federation_agent = MatrixFederationAgent( - "OUR_STUB_HOMESERVER_NAME", - reactor, + server_name="OUR_STUB_HOMESERVER_NAME", + reactor=reactor, tls_client_options_factory=None, user_agent=b"SynapseInTrialTest/0.0.0", ip_allowlist=None, diff --git a/tests/replication/test_module_cache_invalidation.py b/tests/replication/test_module_cache_invalidation.py index 1e7183edaa0..8d5d0cce9a6 100644 --- a/tests/replication/test_module_cache_invalidation.py +++ b/tests/replication/test_module_cache_invalidation.py @@ -35,6 +35,7 @@ class TestCache: current_value = FIRST_VALUE + server_name = "test_server" # nb must be called this for @cached @cached() async def cached_function(self, user_id: str) -> str: diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index a85ea994dec..5343b10e921 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -764,10 +764,10 @@ def test_GET_whoami_appservices(self) -> None: as_token = "i_am_an_app_service" appservice = ApplicationService( - as_token, + token=as_token, id="1234", namespaces={"users": [{"regex": user_id, "exclusive": True}]}, - sender=user_id, + sender=UserID.from_string(user_id), ) self.hs.get_datastores().main.services_cache.append(appservice) diff --git a/tests/rest/client/test_devices.py b/tests/rest/client/test_devices.py index dd3abdebac0..b7230488e4e 100644 --- a/tests/rest/client/test_devices.py +++ b/tests/rest/client/test_devices.py @@ -472,7 +472,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: id="msc4190", token="some_token", hs_token="some_token", - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), namespaces={ ApplicationService.NS_USERS: [{"regex": "@.*", "exclusive": False}] }, @@ -483,7 +483,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: id="regular", token="other_token", hs_token="other_token", - sender="@as2:example.com", + sender=UserID.from_string("@as2:example.com"), namespaces={ ApplicationService.NS_USERS: [{"regex": "@.*", "exclusive": False}] }, diff --git a/tests/rest/client/test_directory.py b/tests/rest/client/test_directory.py index ecf38493c3f..6e499093cfa 100644 --- a/tests/rest/client/test_directory.py +++ b/tests/rest/client/test_directory.py @@ -25,7 +25,7 @@ from synapse.rest import admin from synapse.rest.client import directory, login, room from synapse.server import HomeServer -from synapse.types import RoomAlias +from synapse.types import RoomAlias, UserID from synapse.util import Clock from synapse.util.stringutils import random_string @@ -140,7 +140,7 @@ def test_deleting_alias_via_directory_appservice(self) -> None: as_token, id="1234", namespaces={"aliases": [{"regex": "#asns-*", "exclusive": True}]}, - sender=user_id, + sender=UserID.from_string(user_id), ) self.hs.get_datastores().main.services_cache.append(appservice) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index c5c6604667b..b8bcc235e91 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -51,7 +51,7 @@ from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer -from synapse.types import JsonDict, create_requester +from synapse.types import JsonDict, UserID, create_requester from synapse.util import Clock from tests import unittest @@ -1484,7 +1484,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.service = ApplicationService( id="unique_identifier", token="some_token", - sender="@asbot:example.com", + sender=UserID.from_string("@asbot:example.com"), namespaces={ ApplicationService.NS_USERS: [ {"regex": r"@as_user.*", "exclusive": False} @@ -1496,7 +1496,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.another_service = ApplicationService( id="another__identifier", token="another_token", - sender="@as2bot:example.com", + sender=UserID.from_string("@as2bot:example.com"), namespaces={ ApplicationService.NS_USERS: [ {"regex": r"@as2_user.*", "exclusive": False} @@ -1530,7 +1530,10 @@ def test_login_appservice_user_bot(self) -> None: params = { "type": login.LoginRestServlet.APPSERVICE_TYPE, - "identifier": {"type": "m.id.user", "user": self.service.sender}, + "identifier": { + "type": "m.id.user", + "user": self.service.sender.to_string(), + }, } channel = self.make_request( b"POST", LOGIN_URL, params, access_token=self.service.token diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index b697bf6f675..638fbf00620 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -39,7 +39,7 @@ from synapse.rest.client import account, account_validity, login, logout, register, sync from synapse.server import HomeServer from synapse.storage._base import db_to_json -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from tests import unittest @@ -75,7 +75,7 @@ def test_POST_appservice_registration_valid(self) -> None: as_token, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), ) self.hs.get_datastores().main.services_cache.append(appservice) @@ -99,7 +99,7 @@ def test_POST_appservice_registration_no_type(self) -> None: as_token, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), ) self.hs.get_datastores().main.services_cache.append(appservice) @@ -129,7 +129,7 @@ def test_POST_appservice_msc4190_enabled(self) -> None: as_token, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), msc4190_device_management=True, ) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 3ba7584c654..8a6e6f118a1 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1479,7 +1479,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) mock_load_appservices = Mock(return_value=[self.appservice]) diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index f97ca12d843..80f491aff9f 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -36,6 +36,7 @@ _parse_words_with_icu, ) from synapse.storage.roommember import ProfileInfo +from synapse.types import UserID from synapse.util import Clock from tests.server import ThreadedMemoryReactorClock @@ -153,7 +154,7 @@ def make_homeserver( token="i_am_an_app_service", id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), ) mock_load_appservices = Mock(return_value=[self.appservice]) @@ -378,7 +379,7 @@ def test_population_excludes_appservice_sender(self) -> None: # Join the AS sender to rooms owned by the normal user. public, private = self._create_rooms_and_inject_memberships( - user, token, self.appservice.sender + user, token, self.appservice.sender.to_string() ) # Rebuild the directory. diff --git a/tests/test_mau.py b/tests/test_mau.py index 714854cdf29..472965e022a 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -29,7 +29,7 @@ from synapse.appservice import ApplicationService from synapse.rest.client import register, sync from synapse.server import HomeServer -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from tests import unittest @@ -118,7 +118,7 @@ def test_as_ignores_mau(self) -> None: ApplicationService( token=as_token, id="SomeASID", - sender="@as_sender:test", + sender=UserID.from_string("@as_sender:test"), namespaces={"users": [{"regex": "@as_*", "exclusive": True}]}, ) ) @@ -263,7 +263,7 @@ def advance_time_and_sync() -> None: ApplicationService( token=as_token_1, id="SomeASID", - sender="@as_sender_1:test", + sender=UserID.from_string("@as_sender_1:test"), namespaces={"users": [{"regex": "@as_1.*", "exclusive": True}]}, ) ) @@ -273,7 +273,7 @@ def advance_time_and_sync() -> None: ApplicationService( token=as_token_2, id="AnotherASID", - sender="@as_sender_2:test", + sender=UserID.from_string("@as_sender_2:test"), namespaces={"users": [{"regex": "@as_2.*", "exclusive": True}]}, ) ) diff --git a/tests/util/caches/test_deferred_cache.py b/tests/util/caches/test_deferred_cache.py index f99f99237ef..7017d6d70ac 100644 --- a/tests/util/caches/test_deferred_cache.py +++ b/tests/util/caches/test_deferred_cache.py @@ -31,18 +31,24 @@ class DeferredCacheTestCase(TestCase): def test_empty(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) with self.assertRaises(KeyError): cache.get("foo") def test_hit(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) cache.prefill("foo", 123) self.assertEqual(self.successResultOf(cache.get("foo")), 123) def test_hit_deferred(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) origin_d: "defer.Deferred[int]" = defer.Deferred() set_d = cache.set("k1", origin_d) @@ -65,7 +71,9 @@ def check1(r: str) -> str: def test_callbacks(self) -> None: """Invalidation callbacks are called at the right time""" - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) callbacks = set() # start with an entry, with a callback @@ -98,7 +106,9 @@ def test_callbacks(self) -> None: self.assertEqual(callbacks, {"set", "get"}) def test_set_fail(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) callbacks = set() # start with an entry, with a callback @@ -135,7 +145,9 @@ def test_set_fail(self) -> None: self.assertEqual(callbacks, {"prefill", "get2"}) def test_get_immediate(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) d1: "defer.Deferred[int]" = defer.Deferred() cache.set("key1", d1) @@ -151,7 +163,9 @@ def test_get_immediate(self) -> None: self.assertEqual(v, 2) def test_invalidate(self) -> None: - cache: DeferredCache[Tuple[str], int] = DeferredCache("test") + cache: DeferredCache[Tuple[str], int] = DeferredCache( + name="test", server_name="test_server" + ) cache.prefill(("foo",), 123) cache.invalidate(("foo",)) @@ -159,7 +173,9 @@ def test_invalidate(self) -> None: cache.get(("foo",)) def test_invalidate_all(self) -> None: - cache: DeferredCache[str, str] = DeferredCache("testcache") + cache: DeferredCache[str, str] = DeferredCache( + name="testcache", server_name="test_server" + ) callback_record = [False, False] @@ -203,7 +219,10 @@ def record_callback(idx: int) -> None: def test_eviction(self) -> None: cache: DeferredCache[int, str] = DeferredCache( - "test", max_entries=2, apply_cache_factor_from_config=False + name="test", + server_name="test_server", + max_entries=2, + apply_cache_factor_from_config=False, ) cache.prefill(1, "one") @@ -218,7 +237,10 @@ def test_eviction(self) -> None: def test_eviction_lru(self) -> None: cache: DeferredCache[int, str] = DeferredCache( - "test", max_entries=2, apply_cache_factor_from_config=False + name="test", + server_name="test_server", + max_entries=2, + apply_cache_factor_from_config=False, ) cache.prefill(1, "one") @@ -237,7 +259,8 @@ def test_eviction_lru(self) -> None: def test_eviction_iterable(self) -> None: cache: DeferredCache[int, List[str]] = DeferredCache( - "test", + name="test", + server_name="test_server", max_entries=3, apply_cache_factor_from_config=False, iterable=True, diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 6af9dfaf56a..7865a677093 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -66,6 +66,7 @@ def test_cache(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int) -> str: @@ -100,6 +101,7 @@ def test_cache_num_args(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached(num_args=1) def fn(self, arg1: int, arg2: int) -> str: @@ -145,6 +147,7 @@ def fn(self, arg1: int, arg2: int, arg3: int) -> str: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" obj = Cls() obj.mock.return_value = "fish" @@ -175,6 +178,7 @@ def test_cache_kwargs(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, kwarg1: int = 2) -> str: @@ -209,6 +213,8 @@ def test_cache_with_sync_exception(self) -> None: """If the wrapped function throws synchronously, things should continue to work""" class Cls: + server_name = "test_server" # nb must be called this for @cached + @cached() def fn(self, arg1: int) -> NoReturn: raise SynapseError(100, "mai spoon iz too big!!1") @@ -232,6 +238,7 @@ def test_cache_with_async_exception(self) -> None: class Cls: result: Optional[Deferred] = None call_count = 0 + server_name = "test_server" # nb must be called this for @cached @cached() def fn(self, arg1: int) -> Deferred: @@ -285,6 +292,8 @@ def test_cache_logcontexts(self) -> Deferred: complete_lookup: Deferred = Deferred() class Cls: + server_name = "test_server" + @descriptors.cached() def fn(self, arg1: int) -> "Deferred[int]": @defer.inlineCallbacks @@ -327,6 +336,8 @@ def test_cache_logcontexts_with_exception(self) -> "Deferred[None]": the lookup function throws an exception""" class Cls: + server_name = "test_server" + @descriptors.cached() def fn(self, arg1: int) -> Deferred: @defer.inlineCallbacks @@ -369,6 +380,7 @@ def test_cache_default_args(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int = 2, arg3: int = 3) -> str: @@ -406,6 +418,7 @@ def test_cache_iterable(self) -> None: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached(iterable=True) def fn(self, arg1: int, arg2: int) -> Tuple[str, ...]: @@ -439,6 +452,8 @@ def test_cache_iterable_with_sync_exception(self) -> None: """If the wrapped function throws synchronously, things should continue to work""" class Cls: + server_name = "test_server" + @descriptors.cached(iterable=True) def fn(self, arg1: int) -> NoReturn: raise SynapseError(100, "mai spoon iz too big!!1") @@ -460,6 +475,8 @@ def test_invalidate_cascade(self) -> None: """Invalidations should cascade up through cache contexts""" class Cls: + server_name = "test_server" # nb must be called this for @cached + @cached(cache_context=True) async def func1(self, key: str, cache_context: _CacheContext) -> int: return await self.func2(key, on_invalidate=cache_context.invalidate) @@ -486,6 +503,8 @@ def test_cancel(self) -> None: complete_lookup: "Deferred[None]" = Deferred() class Cls: + server_name = "test_server" + @cached() async def fn(self, arg1: int) -> str: await complete_lookup @@ -517,6 +536,7 @@ def test_cancel_logcontexts(self) -> None: class Cls: inner_context_was_finished = False + server_name = "test_server" # nb must be called this for @cached @cached() async def fn(self, arg1: int) -> str: @@ -562,6 +582,8 @@ class CacheDecoratorTestCase(unittest.HomeserverTestCase): @defer.inlineCallbacks def test_passthrough(self) -> Generator["Deferred[Any]", object, None]: class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> str: return key @@ -576,6 +598,8 @@ def test_hit(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -594,6 +618,8 @@ def test_invalidate(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -612,6 +638,8 @@ def func(self, key: str) -> str: def test_invalidate_missing(self) -> None: class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> str: return key @@ -623,6 +651,8 @@ def test_max_entries(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: + server_name = "test_server" # nb must be called this for @cached + @cached(max_entries=10) def func(self, key: int) -> int: callcount[0] += 1 @@ -650,6 +680,8 @@ def test_prefill(self) -> None: d = defer.succeed(123) class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> "Deferred[int]": callcount[0] += 1 @@ -668,6 +700,8 @@ def test_invalidate_context(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -701,6 +735,8 @@ def test_eviction_context(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: + server_name = "test_server" # nb must be called this for @cached + @cached(max_entries=2) def func(self, key: str) -> str: callcount[0] += 1 @@ -738,6 +774,8 @@ def test_double_get(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: + server_name = "test_server" # nb must be called this for @cached + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -785,6 +823,7 @@ def test_cache(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int) -> None: @@ -850,6 +889,7 @@ def test_concurrent_lookups(self) -> None: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int) -> None: @@ -893,6 +933,7 @@ def test_invalidate(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int) -> None: @@ -933,6 +974,8 @@ def test_cancel(self) -> None: complete_lookup: "Deferred[None]" = Deferred() class Cls: + server_name = "test_server" # nb must be called this for @cached + @cached() def fn(self, arg1: int) -> None: pass @@ -967,6 +1010,7 @@ def test_cancel_logcontexts(self) -> None: class Cls: inner_context_was_finished = False + server_name = "test_server" # nb must be called this for @cached @cached() def fn(self, arg1: int) -> None: @@ -1010,6 +1054,8 @@ def test_num_args_mismatch(self) -> None: """ class Cls: + server_name = "test_server" + @descriptors.cached(tree=True) def fn(self, room_id: str, event_id: str) -> None: pass diff --git a/tests/util/caches/test_response_cache.py b/tests/util/caches/test_response_cache.py index e350967bbae..30cd6ef0e48 100644 --- a/tests/util/caches/test_response_cache.py +++ b/tests/util/caches/test_response_cache.py @@ -46,7 +46,9 @@ def setUp(self) -> None: self.reactor, self.clock = get_clock() def with_cache(self, name: str, ms: int = 0) -> ResponseCache: - return ResponseCache(self.clock, name, timeout_ms=ms) + return ResponseCache( + clock=self.clock, name=name, server_name="test_server", timeout_ms=ms + ) @staticmethod async def instant_return(o: str) -> str: diff --git a/tests/util/caches/test_ttlcache.py b/tests/util/caches/test_ttlcache.py index ae73df18411..a7b55ffecf4 100644 --- a/tests/util/caches/test_ttlcache.py +++ b/tests/util/caches/test_ttlcache.py @@ -28,7 +28,9 @@ class CacheTestCase(unittest.TestCase): def setUp(self) -> None: self.mock_timer = Mock(side_effect=lambda: 100.0) - self.cache: TTLCache[str, str] = TTLCache("test_cache", self.mock_timer) + self.cache: TTLCache[str, str] = TTLCache( + cache_name="test_cache", server_name="test_server", timer=self.mock_timer + ) def test_get(self) -> None: """simple set/get tests""" diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index 5055e4aead2..246e18fd155 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -28,7 +28,7 @@ class DictCacheTestCase(unittest.TestCase): def setUp(self) -> None: self.cache: DictionaryCache[str, str, str] = DictionaryCache( - "foobar", max_entries=10 + name="foobar", server_name="test_server", max_entries=10 ) def test_simple_cache_hit_full(self) -> None: diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index e97e5cf77d9..75bf50e644b 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -33,7 +33,10 @@ class ExpiringCacheTestCase(unittest.HomeserverTestCase): def test_get_set(self) -> None: clock = MockClock() cache: ExpiringCache[str, str] = ExpiringCache( - "test", cast(Clock, clock), max_len=1 + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + max_len=1, ) cache["key"] = "value" @@ -43,7 +46,10 @@ def test_get_set(self) -> None: def test_eviction(self) -> None: clock = MockClock() cache: ExpiringCache[str, str] = ExpiringCache( - "test", cast(Clock, clock), max_len=2 + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + max_len=2, ) cache["key"] = "value" @@ -59,7 +65,11 @@ def test_eviction(self) -> None: def test_iterable_eviction(self) -> None: clock = MockClock() cache: ExpiringCache[str, List[int]] = ExpiringCache( - "test", cast(Clock, clock), max_len=5, iterable=True + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + max_len=5, + iterable=True, ) cache["key"] = [1] @@ -79,7 +89,10 @@ def test_iterable_eviction(self) -> None: def test_time_eviction(self) -> None: clock = MockClock() cache: ExpiringCache[str, int] = ExpiringCache( - "test", cast(Clock, clock), expiry_ms=1000 + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + expiry_ms=1000, ) cache["key"] = 1 diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index 3f0d8139f8e..b7acf586904 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -34,13 +34,13 @@ class LruCacheTestCase(unittest.HomeserverTestCase): def test_get_set(self) -> None: - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache["key"] = "value" self.assertEqual(cache.get("key"), "value") self.assertEqual(cache["key"], "value") def test_eviction(self) -> None: - cache: LruCache[int, int] = LruCache(2) + cache: LruCache[int, int] = LruCache(max_size=2) cache[1] = 1 cache[2] = 2 @@ -54,7 +54,7 @@ def test_eviction(self) -> None: self.assertEqual(cache.get(3), 3) def test_setdefault(self) -> None: - cache: LruCache[str, int] = LruCache(1) + cache: LruCache[str, int] = LruCache(max_size=1) self.assertEqual(cache.setdefault("key", 1), 1) self.assertEqual(cache.get("key"), 1) self.assertEqual(cache.setdefault("key", 2), 1) @@ -63,14 +63,16 @@ def test_setdefault(self) -> None: self.assertEqual(cache.get("key"), 2) def test_pop(self) -> None: - cache: LruCache[str, int] = LruCache(1) + cache: LruCache[str, int] = LruCache(max_size=1) cache["key"] = 1 self.assertEqual(cache.pop("key"), 1) self.assertEqual(cache.pop("key"), None) def test_del_multi(self) -> None: # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache(4, cache_type=TreeCache) + cache: LruCache[Tuple[str, str], str] = LruCache( + max_size=4, cache_type=TreeCache + ) cache[("animal", "cat")] = "mew" cache[("animal", "dog")] = "woof" cache[("vehicles", "car")] = "vroom" @@ -89,21 +91,23 @@ def test_del_multi(self) -> None: # Man from del_multi say "Yes". def test_clear(self) -> None: - cache: LruCache[str, int] = LruCache(1) + cache: LruCache[str, int] = LruCache(max_size=1) cache["key"] = 1 cache.clear() self.assertEqual(len(cache), 0) @override_config({"caches": {"per_cache_factors": {"mycache": 10}}}) def test_special_size(self) -> None: - cache: LruCache = LruCache(10, "mycache") + cache: LruCache = LruCache( + max_size=10, server_name="test_server", cache_name="mycache" + ) self.assertEqual(cache.max_size, 100) class LruCacheCallbacksTestCase(unittest.HomeserverTestCase): def test_get(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value") self.assertFalse(m.called) @@ -122,7 +126,7 @@ def test_get(self) -> None: def test_multi_get(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value") self.assertFalse(m.called) @@ -141,7 +145,7 @@ def test_multi_get(self) -> None: def test_set(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value", callbacks=[m]) self.assertFalse(m.called) @@ -157,7 +161,7 @@ def test_set(self) -> None: def test_pop(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value", callbacks=[m]) self.assertFalse(m.called) @@ -177,7 +181,9 @@ def test_del_multi(self) -> None: m3 = Mock() m4 = Mock() # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache(4, cache_type=TreeCache) + cache: LruCache[Tuple[str, str], str] = LruCache( + max_size=4, cache_type=TreeCache + ) cache.set(("a", "1"), "value", callbacks=[m1]) cache.set(("a", "2"), "value", callbacks=[m2]) @@ -199,7 +205,7 @@ def test_del_multi(self) -> None: def test_clear(self) -> None: m1 = Mock() m2 = Mock() - cache: LruCache[str, str] = LruCache(5) + cache: LruCache[str, str] = LruCache(max_size=5) cache.set("key1", "value", callbacks=[m1]) cache.set("key2", "value", callbacks=[m2]) @@ -216,7 +222,7 @@ def test_eviction(self) -> None: m1 = Mock(name="m1") m2 = Mock(name="m2") m3 = Mock(name="m3") - cache: LruCache[str, str] = LruCache(2) + cache: LruCache[str, str] = LruCache(max_size=2) cache.set("key1", "value", callbacks=[m1]) cache.set("key2", "value", callbacks=[m2]) @@ -252,7 +258,7 @@ def test_eviction(self) -> None: class LruCacheSizedTestCase(unittest.HomeserverTestCase): def test_evict(self) -> None: - cache: LruCache[str, List[int]] = LruCache(5, size_callback=len) + cache: LruCache[str, List[int]] = LruCache(max_size=5, size_callback=len) cache["key1"] = [0] cache["key2"] = [1, 2] cache["key3"] = [3] @@ -275,7 +281,9 @@ def test_evict(self) -> None: def test_zero_size_drop_from_cache(self) -> None: """Test that `drop_from_cache` works correctly with 0-sized entries.""" - cache: LruCache[str, List[int]] = LruCache(5, size_callback=lambda x: 0) + cache: LruCache[str, List[int]] = LruCache( + max_size=5, size_callback=lambda x: 0 + ) cache["key1"] = [] self.assertEqual(len(cache), 0) @@ -299,7 +307,7 @@ def default_config(self) -> JsonDict: def test_evict(self) -> None: setup_expire_lru_cache_entries(self.hs) - cache: LruCache[str, int] = LruCache(5, clock=self.hs.get_clock()) + cache: LruCache[str, int] = LruCache(max_size=5, clock=self.hs.get_clock()) # Check that we evict entries we haven't accessed for 30 minutes. cache["key1"] = 1 @@ -351,7 +359,7 @@ def test_evict_memory(self, jemalloc_interface: Mock) -> None: mock_jemalloc_class.get_stat.return_value = 924288000 setup_expire_lru_cache_entries(self.hs) - cache: LruCache[str, int] = LruCache(4, clock=self.hs.get_clock()) + cache: LruCache[str, int] = LruCache(max_size=4, clock=self.hs.get_clock()) cache["key1"] = 1 cache["key2"] = 2 @@ -387,7 +395,9 @@ def test_evict_memory(self, jemalloc_interface: Mock) -> None: class ExtraIndexLruCacheTestCase(unittest.HomeserverTestCase): def test_invalidate_simple(self) -> None: - cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v)) + cache: LruCache[str, int] = LruCache( + max_size=10, extra_index_cb=lambda k, v: str(v) + ) cache["key1"] = 1 cache["key2"] = 2 @@ -400,7 +410,9 @@ def test_invalidate_simple(self) -> None: self.assertEqual(cache.get("key2"), 2) def test_invalidate_multi(self) -> None: - cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v)) + cache: LruCache[str, int] = LruCache( + max_size=10, extra_index_cb=lambda k, v: str(v) + ) cache["key1"] = 1 cache["key2"] = 1 cache["key3"] = 2 diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py index 9254bff79b5..69a072cd36c 100644 --- a/tests/util/test_stream_change_cache.py +++ b/tests/util/test_stream_change_cache.py @@ -15,7 +15,12 @@ def test_prefilled_cache(self) -> None: Providing a prefilled cache to StreamChangeCache will result in a cache with the prefilled-cache entered in. """ - cache = StreamChangeCache("#test", 1, prefilled_cache={"user@foo.com": 2}) + cache = StreamChangeCache( + name="#test", + server_name=self.hs.hostname, + current_stream_pos=1, + prefilled_cache={"user@foo.com": 2}, + ) self.assertTrue(cache.has_entity_changed("user@foo.com", 1)) def test_has_entity_changed(self) -> None: @@ -23,7 +28,9 @@ def test_has_entity_changed(self) -> None: StreamChangeCache.entity_has_changed will mark entities as changed, and has_entity_changed will observe the changed entities. """ - cache = StreamChangeCache("#test", 3) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=3 + ) cache.entity_has_changed("user@foo.com", 6) cache.entity_has_changed("bar@baz.net", 7) @@ -61,7 +68,9 @@ def test_entity_has_changed_pops_off_start(self) -> None: StreamChangeCache.entity_has_changed will respect the max size and purge the oldest items upon reaching that max size. """ - cache = StreamChangeCache("#test", 1, max_size=2) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1, max_size=2 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -100,7 +109,9 @@ def test_get_all_entities_changed(self) -> None: entities since the given position. If the position is before the start of the known stream, it returns None instead. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -148,7 +159,9 @@ def test_has_any_entity_changed(self) -> None: stream position is before it, it will return True, otherwise False if the cache has no entries. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) # With no entities, it returns True for the past, present, and False for # the future. @@ -175,7 +188,9 @@ def test_get_entities_changed(self, perf_factor: int) -> None: stream position is earlier than the earliest known position, it will return all of the entities queried for. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -242,7 +257,9 @@ def test_max_pos(self) -> None: recent point where the entity could have changed. If the entity is not known, the stream start is provided instead. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -260,7 +277,12 @@ def test_all_entities_changed(self) -> None: """ `StreamChangeCache.all_entities_changed(...)` will mark all entites as changed. """ - cache = StreamChangeCache("#test", 1, max_size=10) + cache = StreamChangeCache( + name="#test", + server_name=self.hs.hostname, + current_stream_pos=1, + max_size=10, + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3)