From ba3bbbb13a8b2adb28178815d71424bdd9788e4e Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 15:49:06 -0500 Subject: [PATCH 01/24] Add `INSTANCE_LABEL_NAME` to `register_cache(...)` --- synapse/metrics/__init__.py | 11 +++ synapse/push/bulk_push_rule_evaluator.py | 4 +- synapse/util/caches/__init__.py | 83 ++++++++++++++-------- synapse/util/caches/expiringcache.py | 4 +- synapse/util/caches/lrucache.py | 6 +- synapse/util/caches/response_cache.py | 4 +- synapse/util/caches/stream_change_cache.py | 5 +- synapse/util/caches/ttlcache.py | 4 +- 8 files changed, 84 insertions(+), 37 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 86ac2c23959..2d6bb07b8c3 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -66,6 +66,17 @@ HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") +INSTANCE_LABEL_NAME = "instance" +""" +The standard Prometheus label name used to identify which server instance the metrics +came from. +In the case of a Synapse homeserver, this should be set to the homeserver name +(`hs.hostname`). +Normally, this would be set automatically by the Prometheus server scraping the data but +since we support multiple instances of Synapse running in the same process and all +metrics are in a single global `REGISTRY`, we need to manually label any metrics. +""" + class _RegistryProxy: @staticmethod diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 1f4f5b90c3a..9b13212ea22 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -136,8 +136,8 @@ def __init__(self, hs: "HomeServer"): self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled self.room_push_rule_cache_metrics = register_cache( - "cache", - "room_push_rule_cache", + cache_type="cache", + cache_name="room_push_rule_cache", cache=[], # Meaningless size, as this isn't a cache that stores values, resizable=False, ) diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index df8829baeba..a7a1b33ccab 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -31,6 +31,7 @@ from prometheus_client.core import Gauge from synapse.config.cache import add_resizable_cache +from synapse.metrics import INSTANCE_LABEL_NAME from synapse.util.metrics import DynamicCollectorRegistry logger = logging.getLogger(__name__) @@ -46,50 +47,65 @@ caches_by_name: Dict[str, Sized] = {} cache_size = Gauge( - "synapse_util_caches_cache_size", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache_size", + "", + labelnames=["name", INSTANCE_LABEL_NAME], + registry=CACHE_METRIC_REGISTRY, ) cache_hits = Gauge( - "synapse_util_caches_cache_hits", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache_hits", + "", + labelnames=["name", INSTANCE_LABEL_NAME], + registry=CACHE_METRIC_REGISTRY, ) cache_evicted = Gauge( "synapse_util_caches_cache_evicted_size", "", - ["name", "reason"], + labelnames=["name", "reason", INSTANCE_LABEL_NAME], registry=CACHE_METRIC_REGISTRY, ) cache_total = Gauge( - "synapse_util_caches_cache", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache", + "", + labelnames=["name", INSTANCE_LABEL_NAME], + registry=CACHE_METRIC_REGISTRY, ) cache_max_size = Gauge( - "synapse_util_caches_cache_max_size", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_cache_max_size", + "", + labelnames=["name", INSTANCE_LABEL_NAME], + registry=CACHE_METRIC_REGISTRY, ) cache_memory_usage = Gauge( "synapse_util_caches_cache_size_bytes", "Estimated memory usage of the caches", - ["name"], + labelnames=["name", INSTANCE_LABEL_NAME], registry=CACHE_METRIC_REGISTRY, ) response_cache_size = Gauge( "synapse_util_caches_response_cache_size", "", - ["name"], + labelnames=["name", INSTANCE_LABEL_NAME], registry=CACHE_METRIC_REGISTRY, ) response_cache_hits = Gauge( "synapse_util_caches_response_cache_hits", "", - ["name"], + labelnames=["name", INSTANCE_LABEL_NAME], registry=CACHE_METRIC_REGISTRY, ) response_cache_evicted = Gauge( "synapse_util_caches_response_cache_evicted_size", "", - ["name", "reason"], + labelnames=["name", "reason", INSTANCE_LABEL_NAME], registry=CACHE_METRIC_REGISTRY, ) response_cache_total = Gauge( - "synapse_util_caches_response_cache", "", ["name"], registry=CACHE_METRIC_REGISTRY + "synapse_util_caches_response_cache", + "", + labelnames=["name", INSTANCE_LABEL_NAME], + registry=CACHE_METRIC_REGISTRY, ) @@ -103,12 +119,13 @@ class EvictionReason(Enum): invalidation = auto() -@attr.s(slots=True, auto_attribs=True) +@attr.s(slots=True, auto_attribs=True, kw_only=True) class CacheMetric: _cache: Sized _cache_type: str _cache_name: str _collect_callback: Optional[Callable] + _server_name: str hits: int = 0 misses: int = 0 @@ -145,34 +162,34 @@ def describe(self) -> List[str]: def collect(self) -> None: try: + labels_base = { + "name": self._cache_name, + INSTANCE_LABEL_NAME: self._server_name, + } if self._cache_type == "response_cache": - response_cache_size.labels(self._cache_name).set(len(self._cache)) - response_cache_hits.labels(self._cache_name).set(self.hits) + response_cache_size.labels(**labels_base).set(len(self._cache)) + response_cache_hits.labels(**labels_base).set(self.hits) for reason in EvictionReason: - response_cache_evicted.labels(self._cache_name, reason.name).set( - self.eviction_size_by_reason[reason] - ) - response_cache_total.labels(self._cache_name).set( - self.hits + self.misses - ) + response_cache_evicted.labels( + {**labels_base, "reason": reason.name} + ).set(self.eviction_size_by_reason[reason]) + response_cache_total.labels(**labels_base).set(self.hits + self.misses) else: - cache_size.labels(self._cache_name).set(len(self._cache)) - cache_hits.labels(self._cache_name).set(self.hits) + cache_size.labels(**labels_base).set(len(self._cache)) + cache_hits.labels(**labels_base).set(self.hits) for reason in EvictionReason: - cache_evicted.labels(self._cache_name, reason.name).set( + cache_evicted.labels({**labels_base, "reason": reason.name}).set( self.eviction_size_by_reason[reason] ) - cache_total.labels(self._cache_name).set(self.hits + self.misses) + cache_total.labels(**labels_base).set(self.hits + self.misses) max_size = getattr(self._cache, "max_size", None) if max_size: - cache_max_size.labels(self._cache_name).set(max_size) + cache_max_size.labels(**labels_base).set(max_size) if TRACK_MEMORY_USAGE: # self.memory_usage can be None if nothing has been inserted # into the cache yet. - cache_memory_usage.labels(self._cache_name).set( - self.memory_usage or 0 - ) + cache_memory_usage.labels(**labels_base).set(self.memory_usage or 0) if self._collect_callback: self._collect_callback() except Exception as e: @@ -181,9 +198,11 @@ def collect(self) -> None: def register_cache( + *, cache_type: str, cache_name: str, cache: Sized, + server_name: str, collect_callback: Optional[Callable] = None, resizable: bool = True, resize_callback: Optional[Callable] = None, @@ -196,6 +215,8 @@ def register_cache( cache_name: name of the cache cache: cache itself, which must implement __len__(), and may optionally implement a max_size property + server_name: server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). collect_callback: If given, a function which is called during metric collection to update additional metrics. resizable: Whether this cache supports being resized, in which case either @@ -210,7 +231,13 @@ def register_cache( resize_callback = cache.set_cache_factor # type: ignore add_resizable_cache(cache_name, resize_callback) - metric = CacheMetric(cache, cache_type, cache_name, collect_callback) + metric = CacheMetric( + cache=cache, + cache_type=cache_type, + cache_name=cache_name, + server_name=server_name, + collect_callback=collect_callback, + ) metric_name = "cache_%s_%s" % (cache_type, cache_name) caches_by_name[cache_name] = cache CACHE_METRIC_REGISTRY.register_hook(metric_name, metric.collect) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 3198fdd2eda..2a3bfed3646 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -83,7 +83,9 @@ def __init__( self.iterable = iterable - self.metrics = register_cache("expiring", cache_name, self) + self.metrics = register_cache( + cache_type="expiring", cache_name=cache_name, cache=self + ) if not self._expiry_ms: # Don't bother starting the loop if things never expire diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 2e5efa3a521..fc7333e9784 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -459,9 +459,9 @@ def __init__( if cache_name is not None: metrics: Optional[CacheMetric] = register_cache( - "lru_cache", - cache_name, - self, + cache_type="lru_cache", + cache_name=cache_name, + cache=self, collect_callback=metrics_collection_callback, ) else: diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 54b99134b9c..c97e6c3d31a 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -114,7 +114,9 @@ def __init__( self.timeout_sec = timeout_ms / 1000.0 self._name = name - self._metrics = register_cache("response_cache", name, self, resizable=False) + self._metrics = register_cache( + cache_type="response_cache", cache_name=name, cache=self, resizable=False + ) self._enable_logging = enable_logging def size(self) -> int: diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 5ac8643eefc..c4a451bfd79 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -96,7 +96,10 @@ def __init__( self.name = name self.metrics = caches.register_cache( - "cache", self.name, self._cache, resize_callback=self.set_cache_factor + cache_type="cache", + cache_name=self.name, + cache=self._cache, + resize_callback=self.set_cache_factor, ) if prefilled_cache: diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 26a088603ae..aa632e68af9 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -49,7 +49,9 @@ def __init__(self, cache_name: str, timer: Callable[[], float] = time.time): self._timer = timer - self._metrics = register_cache("ttl", cache_name, self, resizable=False) + self._metrics = register_cache( + cache_type="ttl", cache_name=cache_name, cache=self, resizable=False + ) def set(self, key: KT, value: VT, ttl: float) -> None: """Add/update an entry in the cache From 749b7a493c98865af5549df47e46eb0ca4d5206f Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 15:58:49 -0500 Subject: [PATCH 02/24] Fill in `ExpiringCache` --- synapse/federation/federation_client.py | 5 +++-- synapse/handlers/device.py | 2 ++ synapse/handlers/message.py | 5 +++-- synapse/handlers/sync.py | 6 ++++-- synapse/media/url_previewer.py | 1 + synapse/push/bulk_push_rule_evaluator.py | 2 ++ synapse/state/__init__.py | 2 ++ synapse/storage/databases/main/deviceinbox.py | 2 ++ synapse/util/caches/expiringcache.py | 7 ++++++- tests/util/test_expiring_cache.py | 21 +++++++++++++++---- 10 files changed, 42 insertions(+), 11 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 2d1da70793c..36fefe67a47 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -137,7 +137,7 @@ def __init__(self, hs: "HomeServer"): self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() - self.hostname = hs.hostname + self.server_name = hs.hostname self.signing_key = hs.signing_key # Cache mapping `event_id` to a tuple of the event itself and the `pull_origin` @@ -162,6 +162,7 @@ def __init__(self, hs: "HomeServer"): Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]], ] = ExpiringCache( cache_name="get_room_hierarchy_cache", + server_name=self.server_name, clock=self._clock, max_len=1000, expiry_ms=5 * 60 * 1000, @@ -1068,7 +1069,7 @@ async def send_request(destination: str) -> Tuple[str, EventBase, RoomVersion]: # there's some we never care about ev = builder.create_local_event_from_event_dict( self._clock, - self.hostname, + self.server_name, self.signing_key, room_version=room_version, event_dict=pdu_dict, diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 8f9bf92fda3..34e34f7639f 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -1212,6 +1212,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater): "Handles incoming device list updates from federation and updates the DB" def __init__(self, hs: "HomeServer", device_handler: DeviceHandler): + self.server_name = hs.hostname self.store = hs.get_datastores().main self.federation = hs.get_federation_client() self.clock = hs.get_clock() @@ -1231,6 +1232,7 @@ def __init__(self, hs: "HomeServer", device_handler: DeviceHandler): # resyncs. self._seen_updates: ExpiringCache[str, Set[str]] = ExpiringCache( cache_name="device_update_edu", + server_name=self.server_name, clock=self.clock, max_len=10000, expiry_ms=30 * 60 * 1000, diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 5d6ee6996f7..34bb509b40e 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -558,8 +558,9 @@ def __init__(self, hs: "HomeServer"): self._external_cache_joined_hosts_updates: Optional[ExpiringCache] = None if self._external_cache.is_enabled(): self._external_cache_joined_hosts_updates = ExpiringCache( - "_external_cache_joined_hosts_updates", - self.clock, + cache_name="_external_cache_joined_hosts_updates", + server_name=self.server_name, + clock=self.clock, expiry_ms=30 * 60 * 1000, ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index a400e63fd56..250ed654c9a 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -329,6 +329,7 @@ class E2eeSyncResult: class SyncHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.hs_config = hs.config self.store = hs.get_datastores().main self.notifier = hs.get_notifier() @@ -361,8 +362,9 @@ def __init__(self, hs: "HomeServer"): self.lazy_loaded_members_cache: ExpiringCache[ Tuple[str, Optional[str]], LruCache[str, str] ] = ExpiringCache( - "lazy_loaded_members_cache", - self.clock, + cache_name="lazy_loaded_members_cache", + server_name=self.server_name, + clock=self.clock, max_len=0, expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE, ) diff --git a/synapse/media/url_previewer.py b/synapse/media/url_previewer.py index 0c665e1942b..eb0104e5432 100644 --- a/synapse/media/url_previewer.py +++ b/synapse/media/url_previewer.py @@ -200,6 +200,7 @@ def __init__( # JSON-encoded OG metadata self._cache: ExpiringCache[str, ObservableDeferred] = ExpiringCache( cache_name="url_previews", + server_name=self.server_name, clock=self.clock, # don't spider URLs more often than once an hour expiry_ms=ONE_HOUR, diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 9b13212ea22..e86d9bae6ef 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -128,6 +128,7 @@ class BulkPushRuleEvaluator: def __init__(self, hs: "HomeServer"): self.hs = hs + self.server_name = hs.hostname self.store = hs.get_datastores().main self.clock = hs.get_clock() self._event_auth_handler = hs.get_event_auth_handler() @@ -140,6 +141,7 @@ def __init__(self, hs: "HomeServer"): cache_name="room_push_rule_cache", cache=[], # Meaningless size, as this isn't a cache that stores values, resizable=False, + server_name=self.server_name, ) async def _get_rules_for_event( diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 1c3e5d00a92..52d923573ed 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -631,6 +631,7 @@ class StateResolutionHandler: """ def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.clock = hs.get_clock() self.resolve_linearizer = Linearizer(name="state_resolve_lock") @@ -639,6 +640,7 @@ def __init__(self, hs: "HomeServer"): self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( ExpiringCache( cache_name="state_cache", + server_name=self.server_name, clock=self.clock, max_len=100000, expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index 579d29ac15b..7cdcb87d780 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -85,6 +85,7 @@ def __init__( ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname self._instance_name = hs.get_instance_name() # Map of (user_id, device_id) to the last stream_id that has been @@ -93,6 +94,7 @@ def __init__( Tuple[str, Optional[str]], int ] = ExpiringCache( cache_name="last_device_delete_cache", + server_name=self.server_name, clock=self._clock, max_len=10000, expiry_ms=30 * 60 * 1000, diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 2a3bfed3646..d0ecec74e38 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -46,7 +46,9 @@ class ExpiringCache(Generic[KT, VT]): def __init__( self, + *, cache_name: str, + server_name: str, clock: Clock, max_len: int = 0, expiry_ms: int = 0, @@ -84,7 +86,10 @@ def __init__( self.iterable = iterable self.metrics = register_cache( - cache_type="expiring", cache_name=cache_name, cache=self + cache_type="expiring", + cache_name=cache_name, + cache=self, + server_name=server_name, ) if not self._expiry_ms: diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index e97e5cf77d9..75bf50e644b 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -33,7 +33,10 @@ class ExpiringCacheTestCase(unittest.HomeserverTestCase): def test_get_set(self) -> None: clock = MockClock() cache: ExpiringCache[str, str] = ExpiringCache( - "test", cast(Clock, clock), max_len=1 + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + max_len=1, ) cache["key"] = "value" @@ -43,7 +46,10 @@ def test_get_set(self) -> None: def test_eviction(self) -> None: clock = MockClock() cache: ExpiringCache[str, str] = ExpiringCache( - "test", cast(Clock, clock), max_len=2 + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + max_len=2, ) cache["key"] = "value" @@ -59,7 +65,11 @@ def test_eviction(self) -> None: def test_iterable_eviction(self) -> None: clock = MockClock() cache: ExpiringCache[str, List[int]] = ExpiringCache( - "test", cast(Clock, clock), max_len=5, iterable=True + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + max_len=5, + iterable=True, ) cache["key"] = [1] @@ -79,7 +89,10 @@ def test_iterable_eviction(self) -> None: def test_time_eviction(self) -> None: clock = MockClock() cache: ExpiringCache[str, int] = ExpiringCache( - "test", cast(Clock, clock), expiry_ms=1000 + cache_name="test", + server_name="testserver", + clock=cast(Clock, clock), + expiry_ms=1000, ) cache["key"] = 1 From 8e71fcdb822e744db33f21c7c3750fb725b7af0d Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 16:06:44 -0500 Subject: [PATCH 03/24] Fill in `ResponseCache` --- synapse/api/auth/msc3861_delegated.py | 6 ++++-- synapse/appservice/api.py | 6 +++++- synapse/federation/federation_server.py | 17 ++++++++++++++--- synapse/handlers/initial_sync.py | 7 ++++++- synapse/handlers/room.py | 6 +++++- synapse/handlers/room_list.py | 14 ++++++++++++-- synapse/handlers/room_summary.py | 6 ++++-- synapse/handlers/sync.py | 5 +++-- synapse/replication/http/_base.py | 7 ++++++- synapse/util/caches/expiringcache.py | 2 ++ synapse/util/caches/response_cache.py | 17 ++++++++++++++++- tests/util/caches/test_response_cache.py | 4 +++- 12 files changed, 80 insertions(+), 17 deletions(-) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 0cfdf15d60a..ad8f4e04f6a 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -176,6 +176,7 @@ def __init__(self, hs: "HomeServer"): assert self._config.client_id, "No client_id provided" assert auth_method is not None, "Invalid client_auth_method provided" + self.server_name = hs.hostname self._clock = hs.get_clock() self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname @@ -206,8 +207,9 @@ def __init__(self, hs: "HomeServer"): # In this case, the device still exists and it's not the end of the world for # the old access token to continue working for a short time. self._introspection_cache: ResponseCache[str] = ResponseCache( - self._clock, - "token_introspection", + clock=self._clock, + name="token_introspection", + server_name=self.server_name, timeout_ms=120_000, # don't log because the keys are access tokens enable_logging=False, diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 19322471dc5..0daf30ae2dd 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -126,11 +126,15 @@ class ApplicationServiceApi(SimpleHttpClient): def __init__(self, hs: "HomeServer"): super().__init__(hs) + self.server_name = hs.hostname self.clock = hs.get_clock() self.config = hs.config.appservice self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS + clock=hs.get_clock(), + name="as_protocol_meta", + server_name=self.server_name, + timeout_ms=HOUR_IN_MS, ) def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 8718b704017..9733253fbd2 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -160,7 +160,10 @@ def __init__(self, hs: "HomeServer"): # We cache results for transaction with the same ID self._transaction_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "fed_txn_handler", timeout_ms=30000 + clock=hs.get_clock(), + name="fed_txn_handler", + server_name=self.server_name, + timeout_ms=30000, ) self.transaction_actions = TransactionActions(self.store) @@ -170,10 +173,18 @@ def __init__(self, hs: "HomeServer"): # We cache responses to state queries, as they take a while and often # come in waves. self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( - ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + ResponseCache( + clock=hs.get_clock(), + name="state_resp", + server_name=self.server_name, + timeout_ms=30000, + ) ) self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "state_ids_resp", timeout_ms=30000 + clock=hs.get_clock(), + name="state_ids_resp", + server_name=self.server_name, + timeout_ms=30000, ) self._federation_metrics_domains = ( diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index bd3c87f5f4e..75d64d2d50b 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -60,6 +60,7 @@ class InitialSyncHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.store = hs.get_datastores().main self.auth = hs.get_auth() self.state_handler = hs.get_state_handler() @@ -77,7 +78,11 @@ def __init__(self, hs: "HomeServer"): bool, bool, ] - ] = ResponseCache(hs.get_clock(), "initial_sync_cache") + ] = ResponseCache( + clock=hs.get_clock(), + name="initial_sync_cache", + server_name=self.server_name, + ) self._event_serializer = hs.get_event_client_serializer() self._storage_controllers = hs.get_storage_controllers() self._state_storage_controller = self._storage_controllers.state diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 0f004d02d34..ef76aeba002 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -118,6 +118,7 @@ class EventContext: class RoomCreationHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.auth = hs.get_auth() @@ -174,7 +175,10 @@ def __init__(self, hs: "HomeServer"): # succession, only process the first attempt and return its result to # subsequent requests self._upgrade_response_cache: ResponseCache[Tuple[str, str]] = ResponseCache( - hs.get_clock(), "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS + clock=hs.get_clock(), + name="room_upgrade", + server_name=self.server_name, + timeout_ms=FIVE_MINUTES_IN_MS, ) self._server_notices_mxid = hs.config.servernotices.server_notices_mxid diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 07eac71e2af..258b015f0ff 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -61,16 +61,26 @@ class RoomListHandler: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.hs = hs self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.response_cache: ResponseCache[ Tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] - ] = ResponseCache(hs.get_clock(), "room_list") + ] = ResponseCache( + clock=hs.get_clock(), + name="room_list", + server_name=self.server_name, + ) self.remote_response_cache: ResponseCache[ Tuple[str, Optional[int], Optional[str], bool, Optional[str]] - ] = ResponseCache(hs.get_clock(), "remote_room_list", timeout_ms=30 * 1000) + ] = ResponseCache( + clock=hs.get_clock(), + name="remote_room_list", + server_name=self.server_name, + timeout_ms=30 * 1000, + ) async def get_local_public_room_list( self, diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index 91b131d09b1..dffcd31fc93 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -96,6 +96,7 @@ class RoomSummaryHandler: _PAGINATION_SESSION_VALIDITY_PERIOD_MS = 5 * 60 * 1000 def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self._event_auth_handler = hs.get_event_auth_handler() self._store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() @@ -113,8 +114,9 @@ def __init__(self, hs: "HomeServer"): self._pagination_response_cache: ResponseCache[ Tuple[str, str, bool, Optional[int], Optional[int], Optional[str]] ] = ResponseCache( - hs.get_clock(), - "get_room_hierarchy", + clock=hs.get_clock(), + name="get_room_hierarchy", + server_name=self.server_name, ) self._msc3266_enabled = hs.config.experimental.msc3266_enabled diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 250ed654c9a..1a8cdc4f768 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -353,8 +353,9 @@ def __init__(self, hs: "HomeServer"): # cached result any more, and we could flush the entry from the cache to save # memory. self.response_cache: ResponseCache[SyncRequestKey] = ResponseCache( - hs.get_clock(), - "sync", + clock=hs.get_clock(), + name="sync", + server_name=self.server_name, timeout_ms=hs.config.caches.sync_response_cache_duration, ) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 00025386801..31204a83849 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -121,9 +121,14 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): WAIT_FOR_STREAMS: ClassVar[bool] = True def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname + if self.CACHE: self.response_cache: ResponseCache[str] = ResponseCache( - hs.get_clock(), "repl." + self.NAME, timeout_ms=30 * 60 * 1000 + clock=hs.get_clock(), + name="repl." + self.NAME, + server_name=self.server_name, + timeout_ms=30 * 60 * 1000, ) # We reserve `instance_name` as a parameter to sending requests, so we diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index d0ecec74e38..6bd4995c26d 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -58,6 +58,8 @@ def __init__( """ Args: cache_name: Name of this cache, used for logging. + server_name: server_name: The homeserver name that this cache is associated + with (used to label the metric) (`hs.hostname`). clock max_len: Max size of dict. If the dict grows larger than this then the oldest items get automatically evicted. Default is 0, diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index c97e6c3d31a..6f1dace5847 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -103,11 +103,22 @@ class ResponseCache(Generic[KV]): def __init__( self, + *, clock: Clock, name: str, + server_name: str, timeout_ms: float = 0, enable_logging: bool = True, ): + """ + Args: + clock + name + server_name: server_name: The homeserver name that this cache is associated + with (used to label the metric) (`hs.hostname`). + timeout_ms + enable_logging + """ self._result_cache: Dict[KV, ResponseCacheEntry] = {} self.clock = clock @@ -115,7 +126,11 @@ def __init__( self._name = name self._metrics = register_cache( - cache_type="response_cache", cache_name=name, cache=self, resizable=False + cache_type="response_cache", + cache_name=name, + cache=self, + server_name=server_name, + resizable=False, ) self._enable_logging = enable_logging diff --git a/tests/util/caches/test_response_cache.py b/tests/util/caches/test_response_cache.py index e350967bbae..30cd6ef0e48 100644 --- a/tests/util/caches/test_response_cache.py +++ b/tests/util/caches/test_response_cache.py @@ -46,7 +46,9 @@ def setUp(self) -> None: self.reactor, self.clock = get_clock() def with_cache(self, name: str, ms: int = 0) -> ResponseCache: - return ResponseCache(self.clock, name, timeout_ms=ms) + return ResponseCache( + clock=self.clock, name=name, server_name="test_server", timeout_ms=ms + ) @staticmethod async def instant_return(o: str) -> str: From 61fc9ba52af4e1bd097381eae4c92405b02af6ea Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 16:17:58 -0500 Subject: [PATCH 04/24] Fill in `StreamChangeCache` --- synapse/handlers/typing.py | 5 ++- .../storage/databases/main/account_data.py | 6 ++- synapse/storage/databases/main/deviceinbox.py | 10 +++-- synapse/storage/databases/main/devices.py | 21 ++++++---- .../storage/databases/main/events_worker.py | 7 +++- synapse/storage/databases/main/presence.py | 6 ++- synapse/storage/databases/main/push_rule.py | 5 ++- synapse/storage/databases/main/receipts.py | 6 ++- synapse/storage/databases/main/stream.py | 9 +++-- synapse/util/caches/stream_change_cache.py | 13 +++++++ tests/replication/tcp/streams/test_typing.py | 4 +- tests/util/test_stream_change_cache.py | 38 +++++++++++++++---- 12 files changed, 96 insertions(+), 34 deletions(-) diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 8d693fee304..6f57475e5bb 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -263,6 +263,7 @@ def __init__(self, hs: "HomeServer"): assert hs.get_instance_name() in hs.config.worker.writers.typing + self.server_name = hs.hostname self.auth = hs.get_auth() self.notifier = hs.get_notifier() self.event_auth_handler = hs.get_event_auth_handler() @@ -280,7 +281,9 @@ def __init__(self, hs: "HomeServer"): # caches which room_ids changed at which serials self._typing_stream_change_cache = StreamChangeCache( - "TypingStreamChangeCache", self._latest_room_serial + name="TypingStreamChangeCache", + server_name=self.server_name, + current_stream_pos=self._latest_room_serial, ) def _handle_timeout_for_member(self, now: int, member: RoomMember) -> None: diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index 715815cc091..ee2b1f6c5dd 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -66,6 +66,8 @@ def __init__( ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname + self._can_write_to_account_data = ( self._instance_name in hs.config.worker.writers.account_data ) @@ -89,7 +91,9 @@ def __init__( account_max = self.get_max_account_data_stream_id() self._account_data_stream_cache = StreamChangeCache( - "AccountDataAndTagsChangeCache", account_max + name="AccountDataAndTagsChangeCache", + server_name=self.server_name, + current_stream_pos=account_max, ) self.db_pool.updates.register_background_index_update( diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index 7cdcb87d780..fb56c31105d 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -128,8 +128,9 @@ def __init__( limit=1000, ) self._device_inbox_stream_cache = StreamChangeCache( - "DeviceInboxStreamChangeCache", - min_device_inbox_id, + name="DeviceInboxStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_inbox_id, prefilled_cache=device_inbox_prefill, ) @@ -144,8 +145,9 @@ def __init__( limit=1000, ) self._device_federation_outbox_stream_cache = StreamChangeCache( - "DeviceFederationOutboxStreamChangeCache", - min_device_outbox_id, + name="DeviceFederationOutboxStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_outbox_id, prefilled_cache=device_outbox_prefill, ) diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 6191f22cd6a..ec8d85cc3de 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -93,6 +93,7 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname # In the worker store this is an ID tracker which we overwrite in the non-worker # class below that is used on the main process. @@ -128,8 +129,9 @@ def __init__( limit=10000, ) self._device_list_stream_cache = StreamChangeCache( - "DeviceListStreamChangeCache", - min_device_list_id, + name="DeviceListStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_list_id, prefilled_cache=device_list_prefill, ) @@ -142,8 +144,9 @@ def __init__( limit=10000, ) self._device_list_room_stream_cache = StreamChangeCache( - "DeviceListRoomStreamChangeCache", - min_device_list_room_id, + name="DeviceListRoomStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_device_list_room_id, prefilled_cache=device_list_room_prefill, ) @@ -159,8 +162,9 @@ def __init__( limit=1000, ) self._user_signature_stream_cache = StreamChangeCache( - "UserSignatureStreamChangeCache", - user_signature_stream_list_id, + name="UserSignatureStreamChangeCache", + server_name=self.server_name, + current_stream_pos=user_signature_stream_list_id, prefilled_cache=user_signature_stream_prefill, ) @@ -178,8 +182,9 @@ def __init__( limit=10000, ) self._device_list_federation_stream_cache = StreamChangeCache( - "DeviceListFederationStreamChangeCache", - device_list_federation_list_id, + name="DeviceListFederationStreamChangeCache", + server_name=self.server_name, + current_stream_pos=device_list_federation_list_id, prefilled_cache=device_list_federation_prefill, ) diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 3db4460f571..3f69b698737 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -227,6 +227,8 @@ def __init__( ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname + self._stream_id_gen: MultiWriterIdGenerator self._backfill_id_gen: MultiWriterIdGenerator @@ -269,8 +271,9 @@ def __init__( limit=1000, ) self._curr_state_delta_stream_cache: StreamChangeCache = StreamChangeCache( - "_curr_state_delta_stream_cache", - min_curr_state_delta_id, + name="_curr_state_delta_stream_cache", + server_name=self.server_name, + current_stream_pos=min_curr_state_delta_id, prefilled_cache=curr_state_delta_prefill, ) diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 065c8856036..d01153497a2 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -79,6 +79,7 @@ def __init__( ) -> None: super().__init__(database, db_conn, hs) + self.server_name = hs.hostname self._instance_name = hs.get_instance_name() self._presence_id_gen: MultiWriterIdGenerator @@ -108,8 +109,9 @@ def __init__( max_value=self._presence_id_gen.get_current_token(), ) self.presence_stream_cache = StreamChangeCache( - "PresenceStreamChangeCache", - min_presence_val, + name="PresenceStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_presence_val, prefilled_cache=presence_cache_prefill, ) diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 86c87f78bf1..3bc977d4971 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -163,8 +163,9 @@ def __init__( ) self.push_rules_stream_cache = StreamChangeCache( - "PushRulesStreamChangeCache", - push_rules_id, + name="PushRulesStreamChangeCache", + server_name=self.server_name, + current_stream_pos=push_rules_id, prefilled_cache=push_rules_prefill, ) diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 99643315107..5af8e28fb36 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -125,6 +125,7 @@ def __init__( db_conn: LoggingDatabaseConnection, hs: "HomeServer", ): + self.server_name = hs.hostname self._instance_name = hs.get_instance_name() # In the worker store this is an ID tracker which we overwrite in the non-worker @@ -158,8 +159,9 @@ def __init__( limit=10000, ) self._receipts_stream_cache = StreamChangeCache( - "ReceiptsRoomChangeCache", - min_receipts_stream_id, + name="ReceiptsRoomChangeCache", + server_name=self.server_name, + current_stream_pos=min_receipts_stream_id, prefilled_cache=receipts_stream_prefill, ) diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 3fda49f31f1..b6c6b69b22c 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -617,12 +617,15 @@ def __init__( max_value=events_max, ) self._events_stream_cache = StreamChangeCache( - "EventsRoomStreamChangeCache", - min_event_val, + name="EventsRoomStreamChangeCache", + server_name=self.server_name, + current_stream_pos=min_event_val, prefilled_cache=event_cache_prefill, ) self._membership_stream_cache = StreamChangeCache( - "MembershipStreamChangeCache", events_max + name="MembershipStreamChangeCache", + server_name=self.server_name, + current_stream_pos=events_max, ) self._stream_order_on_start = self.get_room_max_stream_ordering() diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index c4a451bfd79..1a160675958 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -73,11 +73,23 @@ class StreamChangeCache: def __init__( self, + *, name: str, + server_name: str, current_stream_pos: int, max_size: int = 10000, prefilled_cache: Optional[Mapping[EntityType, int]] = None, ) -> None: + """ + Args: + name + server_name: server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). + current_stream_pos + max_size + prefilled_cache + """ + self._original_max_size: int = max_size self._max_size = math.floor(max_size) @@ -98,6 +110,7 @@ def __init__( self.metrics = caches.register_cache( cache_type="cache", cache_name=self.name, + server_name=server_name, cache=self._cache, resize_callback=self.set_cache_factor, ) diff --git a/tests/replication/tcp/streams/test_typing.py b/tests/replication/tcp/streams/test_typing.py index b1c2f5b03b9..c8958189f8b 100644 --- a/tests/replication/tcp/streams/test_typing.py +++ b/tests/replication/tcp/streams/test_typing.py @@ -139,7 +139,9 @@ def test_reset(self) -> None: self.hs.get_replication_command_handler()._streams["typing"].last_token = 0 typing._latest_room_serial = 0 typing._typing_stream_change_cache = StreamChangeCache( - "TypingStreamChangeCache", typing._latest_room_serial + name="TypingStreamChangeCache", + server_name=self.hs.hostname, + current_stream_pos=typing._latest_room_serial, ) typing._reset() diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py index 9254bff79b5..69a072cd36c 100644 --- a/tests/util/test_stream_change_cache.py +++ b/tests/util/test_stream_change_cache.py @@ -15,7 +15,12 @@ def test_prefilled_cache(self) -> None: Providing a prefilled cache to StreamChangeCache will result in a cache with the prefilled-cache entered in. """ - cache = StreamChangeCache("#test", 1, prefilled_cache={"user@foo.com": 2}) + cache = StreamChangeCache( + name="#test", + server_name=self.hs.hostname, + current_stream_pos=1, + prefilled_cache={"user@foo.com": 2}, + ) self.assertTrue(cache.has_entity_changed("user@foo.com", 1)) def test_has_entity_changed(self) -> None: @@ -23,7 +28,9 @@ def test_has_entity_changed(self) -> None: StreamChangeCache.entity_has_changed will mark entities as changed, and has_entity_changed will observe the changed entities. """ - cache = StreamChangeCache("#test", 3) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=3 + ) cache.entity_has_changed("user@foo.com", 6) cache.entity_has_changed("bar@baz.net", 7) @@ -61,7 +68,9 @@ def test_entity_has_changed_pops_off_start(self) -> None: StreamChangeCache.entity_has_changed will respect the max size and purge the oldest items upon reaching that max size. """ - cache = StreamChangeCache("#test", 1, max_size=2) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1, max_size=2 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -100,7 +109,9 @@ def test_get_all_entities_changed(self) -> None: entities since the given position. If the position is before the start of the known stream, it returns None instead. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -148,7 +159,9 @@ def test_has_any_entity_changed(self) -> None: stream position is before it, it will return True, otherwise False if the cache has no entries. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) # With no entities, it returns True for the past, present, and False for # the future. @@ -175,7 +188,9 @@ def test_get_entities_changed(self, perf_factor: int) -> None: stream position is earlier than the earliest known position, it will return all of the entities queried for. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -242,7 +257,9 @@ def test_max_pos(self) -> None: recent point where the entity could have changed. If the entity is not known, the stream start is provided instead. """ - cache = StreamChangeCache("#test", 1) + cache = StreamChangeCache( + name="#test", server_name=self.hs.hostname, current_stream_pos=1 + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) @@ -260,7 +277,12 @@ def test_all_entities_changed(self) -> None: """ `StreamChangeCache.all_entities_changed(...)` will mark all entites as changed. """ - cache = StreamChangeCache("#test", 1, max_size=10) + cache = StreamChangeCache( + name="#test", + server_name=self.hs.hostname, + current_stream_pos=1, + max_size=10, + ) cache.entity_has_changed("user@foo.com", 2) cache.entity_has_changed("bar@baz.net", 3) From 74610aabd21ce00d784b990adc832a577a160b68 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 16:49:57 -0500 Subject: [PATCH 05/24] Fill in `TTLCache` --- .../federation/matrix_federation_agent.py | 15 ++++++++++++ .../http/federation/well_known_resolver.py | 24 ++++++++++++++----- synapse/http/matrixfederationclient.py | 1 + synapse/util/caches/__init__.py | 4 ++++ synapse/util/caches/ttlcache.py | 22 +++++++++++++++-- tests/handlers/test_typing.py | 1 + .../test_matrix_federation_agent.py | 12 ++++++++-- .../test_federation_sender_shard.py | 1 + tests/util/caches/test_ttlcache.py | 4 +++- 9 files changed, 73 insertions(+), 11 deletions(-) diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index a7742fcea85..0699f945b4b 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -97,9 +97,23 @@ def __init__( user_agent: bytes, ip_allowlist: Optional[IPSet], ip_blocklist: IPSet, + server_name: str, _srv_resolver: Optional[SrvResolver] = None, _well_known_resolver: Optional[WellKnownResolver] = None, ): + """ + Args: + reactor + tls_client_options_factory + user_agent + ip_allowlist + ip_blocklist + server_name: The homeserver name running this resolver + (used to label metrics) (`hs.hostname`). + _srv_resolver + _well_known_resolver + """ + # proxy_reactor is not blocklisting reactor proxy_reactor = reactor @@ -139,6 +153,7 @@ def __init__( ip_blocklist=ip_blocklist, ), user_agent=self.user_agent, + server_name=server_name, ) self._well_known_resolver = _well_known_resolver diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index 9a6bac7281a..9d2afb18d63 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -77,10 +77,6 @@ logger = logging.getLogger(__name__) -_well_known_cache: TTLCache[bytes, Optional[bytes]] = TTLCache("well-known") -_had_valid_well_known_cache: TTLCache[bytes, bool] = TTLCache("had-valid-well-known") - - @attr.s(slots=True, frozen=True, auto_attribs=True) class WellKnownLookupResult: delegated_server: Optional[bytes] @@ -94,17 +90,33 @@ def __init__( reactor: IReactorTime, agent: IAgent, user_agent: bytes, + server_name: str, well_known_cache: Optional[TTLCache[bytes, Optional[bytes]]] = None, had_well_known_cache: Optional[TTLCache[bytes, bool]] = None, ): + """ + Args: + reactor + agent + user_agent + server_name: The homeserver name running this resolver + (used to label metrics) (`hs.hostname`). + well_known_cache + had_well_known_cache + """ + self._reactor = reactor self._clock = Clock(reactor) if well_known_cache is None: - well_known_cache = _well_known_cache + well_known_cache = TTLCache( + cache_name="well-known", server_name=server_name + ) if had_well_known_cache is None: - had_well_known_cache = _had_valid_well_known_cache + had_well_known_cache = TTLCache( + cache_name="had-valid-well-known", server_name=server_name + ) self._well_known_cache = well_known_cache self._had_valid_well_known_cache = had_well_known_cache diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 97a863a1189..bfc4696cd98 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -422,6 +422,7 @@ def __init__( user_agent.encode("ascii"), hs.config.server.federation_ip_range_allowlist, hs.config.server.federation_ip_range_blocklist, + server_name=self.server_name, ) else: proxy_authorization_secret = hs.config.worker.worker_replication_secret diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index a7a1b33ccab..b054b47b0f3 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -121,6 +121,10 @@ class EvictionReason(Enum): @attr.s(slots=True, auto_attribs=True, kw_only=True) class CacheMetric: + """ + Used to track cache metrics + """ + _cache: Sized _cache_type: str _cache_name: str diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index aa632e68af9..19cc3593b85 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -40,7 +40,21 @@ class TTLCache(Generic[KT, VT]): """A key/value cache implementation where each entry has its own TTL""" - def __init__(self, cache_name: str, timer: Callable[[], float] = time.time): + def __init__( + self, + *, + cache_name: str, + server_name: str, + timer: Callable[[], float] = time.time, + ): + """ + Args: + cache_name + server_name: server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). + timer: Function used to get the current time in seconds since the epoch. + """ + # map from key to _CacheEntry self._data: Dict[KT, _CacheEntry[KT, VT]] = {} @@ -50,7 +64,11 @@ def __init__(self, cache_name: str, timer: Callable[[], float] = time.time): self._timer = timer self._metrics = register_cache( - cache_type="ttl", cache_name=cache_name, cache=self, resizable=False + cache_type="ttl", + cache_name=cache_name, + cache=self, + server_name=server_name, + resizable=False, ) def set(self, key: KT, value: VT, ttl: float) -> None: diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 9d8960315fe..f450547f054 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -91,6 +91,7 @@ def make_homeserver( user_agent=b"SynapseInTrialTest/0.0.0", ip_allowlist=None, ip_blocklist=IPSet(), + server_name="test_server", ) # the tests assume that we are starting at unix time 1000 diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 6b25e53c28e..a9e1ecb4a69 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -85,15 +85,20 @@ def setUp(self) -> None: self.tls_factory = FederationPolicyForHTTPS(config) self.well_known_cache: TTLCache[bytes, Optional[bytes]] = TTLCache( - "test_cache", timer=self.reactor.seconds + cache_name="test_cache", + server_name="test_server", + timer=self.reactor.seconds, ) self.had_well_known_cache: TTLCache[bytes, bool] = TTLCache( - "test_cache", timer=self.reactor.seconds + cache_name="test_cache", + server_name="test_server", + timer=self.reactor.seconds, ) self.well_known_resolver = WellKnownResolver( self.reactor, Agent(self.reactor, contextFactory=self.tls_factory), b"test-agent", + server_name="test_server", well_known_cache=self.well_known_cache, had_well_known_cache=self.had_well_known_cache, ) @@ -274,6 +279,7 @@ def _make_agent(self) -> MatrixFederationAgent: user_agent=b"test-agent", # Note that this is unused since _well_known_resolver is provided. ip_allowlist=IPSet(), ip_blocklist=IPSet(), + server_name="test_server", _srv_resolver=self.mock_resolver, _well_known_resolver=self.well_known_resolver, ) @@ -1016,11 +1022,13 @@ def test_get_well_known_unsigned_cert(self) -> None: user_agent=b"test-agent", # This is unused since _well_known_resolver is passed below. ip_allowlist=IPSet(), ip_blocklist=IPSet(), + server_name="test_server", _srv_resolver=self.mock_resolver, _well_known_resolver=WellKnownResolver( cast(ISynapseReactor, self.reactor), Agent(self.reactor, contextFactory=tls_factory), b"test-agent", + server_name="test_server", well_known_cache=self.well_known_cache, had_well_known_cache=self.had_well_known_cache, ), diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index 58a7a9dc721..0e9ecc54e28 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -73,6 +73,7 @@ def setUp(self) -> None: user_agent=b"SynapseInTrialTest/0.0.0", ip_allowlist=None, ip_blocklist=IPSet(), + server_name="test_server", ) def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: diff --git a/tests/util/caches/test_ttlcache.py b/tests/util/caches/test_ttlcache.py index ae73df18411..a7b55ffecf4 100644 --- a/tests/util/caches/test_ttlcache.py +++ b/tests/util/caches/test_ttlcache.py @@ -28,7 +28,9 @@ class CacheTestCase(unittest.TestCase): def setUp(self) -> None: self.mock_timer = Mock(side_effect=lambda: 100.0) - self.cache: TTLCache[str, str] = TTLCache("test_cache", self.mock_timer) + self.cache: TTLCache[str, str] = TTLCache( + cache_name="test_cache", server_name="test_server", timer=self.mock_timer + ) def test_get(self) -> None: """simple set/get tests""" From 8dbca87f44aa4f6b819178027ba13e33ab383f17 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 17:12:29 -0500 Subject: [PATCH 06/24] Fill in `LruCache` except for `@cached` --- synapse/handlers/sync.py | 2 +- synapse/rest/client/sync.py | 2 + synapse/storage/databases/main/devices.py | 7 ++- .../databases/main/event_federation.py | 6 ++- .../storage/databases/main/events_worker.py | 1 + synapse/storage/databases/state/store.py | 11 +++-- synapse/util/caches/__init__.py | 2 +- synapse/util/caches/deferred_cache.py | 5 ++ synapse/util/caches/descriptors.py | 8 +++- synapse/util/caches/dictionary_cache.py | 11 ++++- synapse/util/caches/expiringcache.py | 2 +- synapse/util/caches/lrucache.py | 46 +++++++++++++++++-- synapse/util/caches/response_cache.py | 2 +- synapse/util/caches/stream_change_cache.py | 2 +- synapse/util/caches/ttlcache.py | 2 +- synmark/suites/lrucache.py | 2 +- synmark/suites/lrucache_evict.py | 2 +- tests/config/test_cache.py | 14 +++--- tests/util/test_dict_cache.py | 2 +- tests/util/test_lrucache.py | 40 ++++++++-------- 20 files changed, 122 insertions(+), 47 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 1a8cdc4f768..d830cbc5463 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1132,7 +1132,7 @@ def get_lazy_loaded_members_cache( ) if cache is None: logger.debug("creating LruCache for %r", cache_key) - cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE) + cache = LruCache(max_size=LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE) self.lazy_loaded_members_cache[cache_key] = cache else: logger.debug("found LruCache for %r", cache_key) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index bac02122d04..c9fb9dc4d3a 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -111,6 +111,7 @@ class SyncRestServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs + self.server_name = hs.hostname self.auth = hs.get_auth() self.store = hs.get_datastores().main self.sync_handler = hs.get_sync_handler() @@ -125,6 +126,7 @@ def __init__(self, hs: "HomeServer"): self._json_filter_cache: LruCache[str, bool] = LruCache( max_size=1000, cache_name="sync_valid_filter", + server_name=self.server_name, ) # Ratelimiter for presence updates, keyed by requester. diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index ec8d85cc3de..af4e35c47e8 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1774,11 +1774,16 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname # Map of (user_id, device_id) -> bool. If there is an entry that implies # the device exists. self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = ( - LruCache(cache_name="device_id_exists", max_size=10000) + LruCache( + cache_name="device_id_exists", + server_name=self.server_name, + max_size=10000, + ) ) async def store_device( diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 46aa5902d8d..32ff51cc372 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -137,6 +137,7 @@ def __init__( super().__init__(database, db_conn, hs) self.hs = hs + self.server_name = hs.hostname if hs.config.worker.run_background_tasks: hs.get_clock().looping_call( @@ -145,7 +146,10 @@ def __init__( # Cache of event ID to list of auth event IDs and their depths. self._event_auth_cache: LruCache[str, List[Tuple[str, int]]] = LruCache( - 500000, "_event_auth_cache", size_callback=len + max_size=500000, + server_name=self.server_name, + cache_name="_event_auth_cache", + size_callback=len, ) # Flag used by unit tests to disable fallback when there is no chain cover diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 3f69b698737..fa132837d0c 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -286,6 +286,7 @@ def __init__( self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( AsyncLruCache( + server_name=self.server_name, cache_name="*getEvent*", max_size=hs.config.caches.event_cache_size, # `extra_index_cb` Returns a tuple as that is the key type diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index c1a66dcba02..9b3b7e086f9 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -92,6 +92,7 @@ def __init__( ): super().__init__(database, db_conn, hs) self._state_deletion_store = state_deletion_store + self.server_name = hs.hostname # Originally the state store used a single DictionaryCache to cache the # event IDs for the state types in a given state group to avoid hammering @@ -123,14 +124,16 @@ def __init__( # vast majority of state in Matrix (today) is member events. self._state_group_cache: DictionaryCache[int, StateKey, str] = DictionaryCache( - "*stateGroupCache*", + name="*stateGroupCache*", + server_name=self.server_name, # TODO: this hasn't been tuned yet - 50000, + max_entries=50000, ) self._state_group_members_cache: DictionaryCache[int, StateKey, str] = ( DictionaryCache( - "*stateGroupMembersCache*", - 500000, + name="*stateGroupMembersCache*", + server_name=self.server_name, + max_entries=500000, ) ) diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index b054b47b0f3..6e66a8d8219 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -219,7 +219,7 @@ def register_cache( cache_name: name of the cache cache: cache itself, which must implement __len__(), and may optionally implement a max_size property - server_name: server_name: The homeserver name that this cache is associated with + server_name: The homeserver name that this cache is associated with (used to label the metric) (`hs.hostname`). collect_callback: If given, a function which is called during metric collection to update additional metrics. diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py index 14868fa4d39..0c6c912918a 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py @@ -79,7 +79,9 @@ class DeferredCache(Generic[KT, VT]): def __init__( self, + *, name: str, + server_name: str, max_entries: int = 1000, tree: bool = False, iterable: bool = False, @@ -89,6 +91,8 @@ def __init__( """ Args: name: The name of the cache + server_name: server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). max_entries: Maximum amount of entries that the cache will hold tree: Use a TreeCache instead of a dict as the underlying cache type iterable: If True, count each item in the cached object as an entry, @@ -113,6 +117,7 @@ def metrics_cb() -> None: # a Deferred. self.cache: LruCache[KT, VT] = LruCache( max_size=max_entries, + server_name=server_name, cache_name=name, cache_type=cache_type, size_callback=( diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 29a95867104..714740912b3 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -200,6 +200,8 @@ def foo(self, key, cache_context): def __init__( self, + *, + server_name: str, orig: Callable[..., Any], max_entries: int = 1000, num_args: Optional[int] = None, @@ -217,6 +219,7 @@ def __init__( cache_context=cache_context, name=name, ) + self.server_name = server_name if tree and self.num_args < 2: raise RuntimeError( @@ -233,6 +236,7 @@ def __get__( ) -> Callable[..., "defer.Deferred[Any]"]: cache: DeferredCache[CacheKey, Any] = DeferredCache( name=self.name, + server_name=self.server_name, max_entries=self.max_entries, tree=self.tree, iterable=self.iterable, @@ -479,6 +483,7 @@ class _CachedFunctionDescriptor: """Helper for `@cached`, we name it so that we can hook into it with mypy plugin.""" + server_name: str max_entries: int num_args: Optional[int] uncached_args: Optional[Collection[str]] @@ -490,7 +495,8 @@ class _CachedFunctionDescriptor: def __call__(self, orig: F) -> CachedFunction[F]: d = DeferredCacheDescriptor( - orig, + server_name=self.server_name, + orig=orig, max_entries=self.max_entries, num_args=self.num_args, uncached_args=self.uncached_args, diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index 14bd3ba3b04..168ddc51cd5 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -127,7 +127,15 @@ class DictionaryCache(Generic[KT, DKT, DV]): for the '2' dict key. """ - def __init__(self, name: str, max_entries: int = 1000): + def __init__(self, *, name: str, server_name: str, max_entries: int = 1000): + """ + Args: + name + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). + max_entries + """ + # We use a single LruCache to store two different types of entries: # 1. Map from (key, dict_key) -> dict value (or sentinel, indicating # the key doesn't exist in the dict); and @@ -152,6 +160,7 @@ def __init__(self, name: str, max_entries: int = 1000): Union[_PerKeyValue, Dict[DKT, DV]], ] = LruCache( max_size=max_entries, + server_name=server_name, cache_name=name, cache_type=TreeCache, size_callback=len, diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 6bd4995c26d..4be4c6f01b1 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -58,7 +58,7 @@ def __init__( """ Args: cache_name: Name of this cache, used for logging. - server_name: server_name: The homeserver name that this cache is associated + server_name: The homeserver name that this cache is associated with (used to label the metric) (`hs.hostname`). clock max_len: Max size of dict. If the dict grows larger than this diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index fc7333e9784..466362e79ce 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -376,9 +376,43 @@ class LruCache(Generic[KT, VT]): If cache_type=TreeCache, all keys must be tuples. """ + @overload def __init__( self, + *, max_size: int, + server_name: str, + cache_name: str, + cache_type: Type[Union[dict, TreeCache]] = dict, + size_callback: Optional[Callable[[VT], int]] = None, + metrics_collection_callback: Optional[Callable[[], None]] = None, + apply_cache_factor_from_config: bool = True, + clock: Optional[Clock] = None, + prune_unread_entries: bool = True, + extra_index_cb: Optional[Callable[[KT, VT], KT]] = None, + ): ... + + @overload + def __init__( + self, + *, + max_size: int, + server_name: Literal[None] = None, + cache_name: Literal[None] = None, + cache_type: Type[Union[dict, TreeCache]] = dict, + size_callback: Optional[Callable[[VT], int]] = None, + metrics_collection_callback: Optional[Callable[[], None]] = None, + apply_cache_factor_from_config: bool = True, + clock: Optional[Clock] = None, + prune_unread_entries: bool = True, + extra_index_cb: Optional[Callable[[KT, VT], KT]] = None, + ): ... + + def __init__( + self, + *, + max_size: int, + server_name: Optional[str] = None, cache_name: Optional[str] = None, cache_type: Type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, @@ -392,8 +426,13 @@ def __init__( Args: max_size: The maximum amount of entries the cache can hold - cache_name: The name of this cache, for the prometheus metrics. If unset, - no metrics will be reported on this cache. + server_name: The homeserver name that this cache is associated with + (used to label the metric) (`hs.hostname`). Must be set if `cache_name` is + set. If unset, no metrics will be reported on this cache. + + cache_name: The name of this cache, for the prometheus metrics. Must be set + if `server_name` is set. If unset, no metrics will be reported on this + cache. cache_type: type of underlying cache to be used. Typically one of dict @@ -457,11 +496,12 @@ def __init__( # do yet when we get resized. self._on_resize: Optional[Callable[[], None]] = None - if cache_name is not None: + if cache_name is not None and server_name is not None: metrics: Optional[CacheMetric] = register_cache( cache_type="lru_cache", cache_name=cache_name, cache=self, + server_name=server_name, collect_callback=metrics_collection_callback, ) else: diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 6f1dace5847..49a9151916e 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -114,7 +114,7 @@ def __init__( Args: clock name - server_name: server_name: The homeserver name that this cache is associated + server_name: The homeserver name that this cache is associated with (used to label the metric) (`hs.hostname`). timeout_ms enable_logging diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 1a160675958..2cffd352d82 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -83,7 +83,7 @@ def __init__( """ Args: name - server_name: server_name: The homeserver name that this cache is associated with + server_name: The homeserver name that this cache is associated with (used to label the metric) (`hs.hostname`). current_stream_pos max_size diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 19cc3593b85..18c3a1e51c5 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -50,7 +50,7 @@ def __init__( """ Args: cache_name - server_name: server_name: The homeserver name that this cache is associated with + server_name: The homeserver name that this cache is associated with (used to label the metric) (`hs.hostname`). timer: Function used to get the current time in seconds since the epoch. """ diff --git a/synmark/suites/lrucache.py b/synmark/suites/lrucache.py index 49d200c43b5..d109441e551 100644 --- a/synmark/suites/lrucache.py +++ b/synmark/suites/lrucache.py @@ -29,7 +29,7 @@ async def main(reactor: ISynapseReactor, loops: int) -> float: """ Benchmark `loops` number of insertions into LruCache without eviction. """ - cache: LruCache[int, bool] = LruCache(loops) + cache: LruCache[int, bool] = LruCache(max_size=loops) start = perf_counter() diff --git a/synmark/suites/lrucache_evict.py b/synmark/suites/lrucache_evict.py index 77061625a90..00cfdd04471 100644 --- a/synmark/suites/lrucache_evict.py +++ b/synmark/suites/lrucache_evict.py @@ -30,7 +30,7 @@ async def main(reactor: ISynapseReactor, loops: int) -> float: Benchmark `loops` number of insertions into LruCache where half of them are evicted. """ - cache: LruCache[int, bool] = LruCache(loops // 2) + cache: LruCache[int, bool] = LruCache(max_size=loops // 2) start = perf_counter() diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py index aead73e0594..deb6bade461 100644 --- a/tests/config/test_cache.py +++ b/tests/config/test_cache.py @@ -75,7 +75,7 @@ def test_individual_instantiated_before_config_load(self) -> None: the default cache size in the interim, and then resized once the config is loaded. """ - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) @@ -96,7 +96,7 @@ def test_individual_instantiated_after_config_load(self) -> None: self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 200) @@ -106,7 +106,7 @@ def test_global_instantiated_before_config_load(self) -> None: the default cache size in the interim, and then resized to the new default cache size once the config is loaded. """ - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 50) @@ -126,7 +126,7 @@ def test_global_instantiated_after_config_load(self) -> None: self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache: LruCache = LruCache(100) + cache: LruCache = LruCache(max_size=100) add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor) self.assertEqual(cache.max_size, 150) @@ -145,15 +145,15 @@ def test_cache_with_asterisk_in_name(self) -> None: self.config.read_config(config, config_dir_path="", data_dir_path="") self.config.resize_all_caches() - cache_a: LruCache = LruCache(100) + cache_a: LruCache = LruCache(max_size=100) add_resizable_cache("*cache_a*", cache_resize_callback=cache_a.set_cache_factor) self.assertEqual(cache_a.max_size, 200) - cache_b: LruCache = LruCache(100) + cache_b: LruCache = LruCache(max_size=100) add_resizable_cache("*Cache_b*", cache_resize_callback=cache_b.set_cache_factor) self.assertEqual(cache_b.max_size, 300) - cache_c: LruCache = LruCache(100) + cache_c: LruCache = LruCache(max_size=100) add_resizable_cache("*cache_c*", cache_resize_callback=cache_c.set_cache_factor) self.assertEqual(cache_c.max_size, 200) diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index 5055e4aead2..246e18fd155 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -28,7 +28,7 @@ class DictCacheTestCase(unittest.TestCase): def setUp(self) -> None: self.cache: DictionaryCache[str, str, str] = DictionaryCache( - "foobar", max_entries=10 + name="foobar", server_name="test_server", max_entries=10 ) def test_simple_cache_hit_full(self) -> None: diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index 3f0d8139f8e..0e919cad37f 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -34,13 +34,13 @@ class LruCacheTestCase(unittest.HomeserverTestCase): def test_get_set(self) -> None: - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache["key"] = "value" self.assertEqual(cache.get("key"), "value") self.assertEqual(cache["key"], "value") def test_eviction(self) -> None: - cache: LruCache[int, int] = LruCache(2) + cache: LruCache[int, int] = LruCache(max_size=2) cache[1] = 1 cache[2] = 2 @@ -54,7 +54,7 @@ def test_eviction(self) -> None: self.assertEqual(cache.get(3), 3) def test_setdefault(self) -> None: - cache: LruCache[str, int] = LruCache(1) + cache: LruCache[str, int] = LruCache(max_size=1) self.assertEqual(cache.setdefault("key", 1), 1) self.assertEqual(cache.get("key"), 1) self.assertEqual(cache.setdefault("key", 2), 1) @@ -63,14 +63,14 @@ def test_setdefault(self) -> None: self.assertEqual(cache.get("key"), 2) def test_pop(self) -> None: - cache: LruCache[str, int] = LruCache(1) + cache: LruCache[str, int] = LruCache(max_size=1) cache["key"] = 1 self.assertEqual(cache.pop("key"), 1) self.assertEqual(cache.pop("key"), None) def test_del_multi(self) -> None: # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache(4, cache_type=TreeCache) + cache: LruCache[Tuple[str, str], str] = LruCache(max_size=4, cache_type=TreeCache) cache[("animal", "cat")] = "mew" cache[("animal", "dog")] = "woof" cache[("vehicles", "car")] = "vroom" @@ -89,21 +89,21 @@ def test_del_multi(self) -> None: # Man from del_multi say "Yes". def test_clear(self) -> None: - cache: LruCache[str, int] = LruCache(1) + cache: LruCache[str, int] = LruCache(max_size=1) cache["key"] = 1 cache.clear() self.assertEqual(len(cache), 0) @override_config({"caches": {"per_cache_factors": {"mycache": 10}}}) def test_special_size(self) -> None: - cache: LruCache = LruCache(10, "mycache") + cache: LruCache = LruCache(max_size=10, "mycache") self.assertEqual(cache.max_size, 100) class LruCacheCallbacksTestCase(unittest.HomeserverTestCase): def test_get(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value") self.assertFalse(m.called) @@ -122,7 +122,7 @@ def test_get(self) -> None: def test_multi_get(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value") self.assertFalse(m.called) @@ -141,7 +141,7 @@ def test_multi_get(self) -> None: def test_set(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value", callbacks=[m]) self.assertFalse(m.called) @@ -157,7 +157,7 @@ def test_set(self) -> None: def test_pop(self) -> None: m = Mock() - cache: LruCache[str, str] = LruCache(1) + cache: LruCache[str, str] = LruCache(max_size=1) cache.set("key", "value", callbacks=[m]) self.assertFalse(m.called) @@ -177,7 +177,7 @@ def test_del_multi(self) -> None: m3 = Mock() m4 = Mock() # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache(4, cache_type=TreeCache) + cache: LruCache[Tuple[str, str], str] = LruCache(max_size=4, cache_type=TreeCache) cache.set(("a", "1"), "value", callbacks=[m1]) cache.set(("a", "2"), "value", callbacks=[m2]) @@ -199,7 +199,7 @@ def test_del_multi(self) -> None: def test_clear(self) -> None: m1 = Mock() m2 = Mock() - cache: LruCache[str, str] = LruCache(5) + cache: LruCache[str, str] = LruCache(max_size=5) cache.set("key1", "value", callbacks=[m1]) cache.set("key2", "value", callbacks=[m2]) @@ -216,7 +216,7 @@ def test_eviction(self) -> None: m1 = Mock(name="m1") m2 = Mock(name="m2") m3 = Mock(name="m3") - cache: LruCache[str, str] = LruCache(2) + cache: LruCache[str, str] = LruCache(max_size=2) cache.set("key1", "value", callbacks=[m1]) cache.set("key2", "value", callbacks=[m2]) @@ -252,7 +252,7 @@ def test_eviction(self) -> None: class LruCacheSizedTestCase(unittest.HomeserverTestCase): def test_evict(self) -> None: - cache: LruCache[str, List[int]] = LruCache(5, size_callback=len) + cache: LruCache[str, List[int]] = LruCache(max_size=5, size_callback=len) cache["key1"] = [0] cache["key2"] = [1, 2] cache["key3"] = [3] @@ -275,7 +275,7 @@ def test_evict(self) -> None: def test_zero_size_drop_from_cache(self) -> None: """Test that `drop_from_cache` works correctly with 0-sized entries.""" - cache: LruCache[str, List[int]] = LruCache(5, size_callback=lambda x: 0) + cache: LruCache[str, List[int]] = LruCache(max_size=5, size_callback=lambda x: 0) cache["key1"] = [] self.assertEqual(len(cache), 0) @@ -299,7 +299,7 @@ def default_config(self) -> JsonDict: def test_evict(self) -> None: setup_expire_lru_cache_entries(self.hs) - cache: LruCache[str, int] = LruCache(5, clock=self.hs.get_clock()) + cache: LruCache[str, int] = LruCache(max_size=5, clock=self.hs.get_clock()) # Check that we evict entries we haven't accessed for 30 minutes. cache["key1"] = 1 @@ -351,7 +351,7 @@ def test_evict_memory(self, jemalloc_interface: Mock) -> None: mock_jemalloc_class.get_stat.return_value = 924288000 setup_expire_lru_cache_entries(self.hs) - cache: LruCache[str, int] = LruCache(4, clock=self.hs.get_clock()) + cache: LruCache[str, int] = LruCache(max_size=4, clock=self.hs.get_clock()) cache["key1"] = 1 cache["key2"] = 2 @@ -387,7 +387,7 @@ def test_evict_memory(self, jemalloc_interface: Mock) -> None: class ExtraIndexLruCacheTestCase(unittest.HomeserverTestCase): def test_invalidate_simple(self) -> None: - cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v)) + cache: LruCache[str, int] = LruCache(max_size=10, extra_index_cb=lambda k, v: str(v)) cache["key1"] = 1 cache["key2"] = 2 @@ -400,7 +400,7 @@ def test_invalidate_simple(self) -> None: self.assertEqual(cache.get("key2"), 2) def test_invalidate_multi(self) -> None: - cache: LruCache[str, int] = LruCache(10, extra_index_cb=lambda k, v: str(v)) + cache: LruCache[str, int] = LruCache(max_size=10, extra_index_cb=lambda k, v: str(v)) cache["key1"] = 1 cache["key2"] = 1 cache["key3"] = 2 From 1e57b57e29dd0d2de7b160787fba5b3673ace1c4 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 17:16:33 -0500 Subject: [PATCH 07/24] Fix `LruCache` positional argument lint --- tests/util/test_lrucache.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index 0e919cad37f..b7acf586904 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -70,7 +70,9 @@ def test_pop(self) -> None: def test_del_multi(self) -> None: # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache(max_size=4, cache_type=TreeCache) + cache: LruCache[Tuple[str, str], str] = LruCache( + max_size=4, cache_type=TreeCache + ) cache[("animal", "cat")] = "mew" cache[("animal", "dog")] = "woof" cache[("vehicles", "car")] = "vroom" @@ -96,7 +98,9 @@ def test_clear(self) -> None: @override_config({"caches": {"per_cache_factors": {"mycache": 10}}}) def test_special_size(self) -> None: - cache: LruCache = LruCache(max_size=10, "mycache") + cache: LruCache = LruCache( + max_size=10, server_name="test_server", cache_name="mycache" + ) self.assertEqual(cache.max_size, 100) @@ -177,7 +181,9 @@ def test_del_multi(self) -> None: m3 = Mock() m4 = Mock() # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache(max_size=4, cache_type=TreeCache) + cache: LruCache[Tuple[str, str], str] = LruCache( + max_size=4, cache_type=TreeCache + ) cache.set(("a", "1"), "value", callbacks=[m1]) cache.set(("a", "2"), "value", callbacks=[m2]) @@ -275,7 +281,9 @@ def test_evict(self) -> None: def test_zero_size_drop_from_cache(self) -> None: """Test that `drop_from_cache` works correctly with 0-sized entries.""" - cache: LruCache[str, List[int]] = LruCache(max_size=5, size_callback=lambda x: 0) + cache: LruCache[str, List[int]] = LruCache( + max_size=5, size_callback=lambda x: 0 + ) cache["key1"] = [] self.assertEqual(len(cache), 0) @@ -387,7 +395,9 @@ def test_evict_memory(self, jemalloc_interface: Mock) -> None: class ExtraIndexLruCacheTestCase(unittest.HomeserverTestCase): def test_invalidate_simple(self) -> None: - cache: LruCache[str, int] = LruCache(max_size=10, extra_index_cb=lambda k, v: str(v)) + cache: LruCache[str, int] = LruCache( + max_size=10, extra_index_cb=lambda k, v: str(v) + ) cache["key1"] = 1 cache["key2"] = 2 @@ -400,7 +410,9 @@ def test_invalidate_simple(self) -> None: self.assertEqual(cache.get("key2"), 2) def test_invalidate_multi(self) -> None: - cache: LruCache[str, int] = LruCache(max_size=10, extra_index_cb=lambda k, v: str(v)) + cache: LruCache[str, int] = LruCache( + max_size=10, extra_index_cb=lambda k, v: str(v) + ) cache["key1"] = 1 cache["key2"] = 1 cache["key3"] = 2 From 19c917cacee85ce8df41afbbe23b3dce7eb19ae1 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 17:41:59 -0500 Subject: [PATCH 08/24] Attempt `@cached` solution v1 --- synapse/util/caches/descriptors.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 714740912b3..97e20166c93 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -33,6 +33,7 @@ List, Mapping, Optional, + Protocol, Sequence, Tuple, Type, @@ -153,6 +154,14 @@ def __init__( ) +class HasServerName(Protocol): + server_name: str + """ + The homeserver name that this cache is associated with (used to label the metric) + (`hs.hostname`). + """ + + class DeferredCacheDescriptor(_CacheDescriptorBase): """A method decorator that applies a memoizing cache around the function. @@ -201,7 +210,6 @@ def foo(self, key, cache_context): def __init__( self, *, - server_name: str, orig: Callable[..., Any], max_entries: int = 1000, num_args: Optional[int] = None, @@ -219,7 +227,6 @@ def __init__( cache_context=cache_context, name=name, ) - self.server_name = server_name if tree and self.num_args < 2: raise RuntimeError( @@ -232,11 +239,19 @@ def __init__( self.prune_unread_entries = prune_unread_entries def __get__( - self, obj: Optional[Any], owner: Optional[Type] + self, obj: Optional[HasServerName], owner: Optional[Type] ) -> Callable[..., "defer.Deferred[Any]"]: + # We need access to instance-level `obj.server_name` attribute + assert obj is not None, ( + "Cannot call cached method from class (❌ `MyClass.cached_method()`)" + ) + assert obj.server_name is not None, ( + "The `server_name` attribute must be set on the object where `@cached` decorator is used." + ) + cache: DeferredCache[CacheKey, Any] = DeferredCache( name=self.name, - server_name=self.server_name, + server_name=obj.server_name, max_entries=self.max_entries, tree=self.tree, iterable=self.iterable, @@ -483,7 +498,6 @@ class _CachedFunctionDescriptor: """Helper for `@cached`, we name it so that we can hook into it with mypy plugin.""" - server_name: str max_entries: int num_args: Optional[int] uncached_args: Optional[Collection[str]] @@ -495,7 +509,6 @@ class _CachedFunctionDescriptor: def __call__(self, orig: F) -> CachedFunction[F]: d = DeferredCacheDescriptor( - server_name=self.server_name, orig=orig, max_entries=self.max_entries, num_args=self.num_args, From 045366644870ce6ec36c8dda3481dc3c3c31f9a4 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 17:42:41 -0500 Subject: [PATCH 09/24] Fix missing `server_name` on `ExpiringCache` usage --- synapse/federation/federation_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 36fefe67a47..35c5ac63119 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -144,6 +144,7 @@ def __init__(self, hs: "HomeServer"): # (which server we pulled the event from) self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache( cache_name="get_pdu_cache", + server_name=self.server_name, clock=self._clock, max_len=1000, expiry_ms=120 * 1000, From a17206f5646ae62d20c8bccc466076fc4e2d566c Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 27 Jun 2025 17:46:01 -0500 Subject: [PATCH 10/24] Fix arguments in `DeferredCache` usage --- tests/metrics/test_metrics.py | 4 ++- tests/util/caches/test_deferred_cache.py | 45 ++++++++++++++++++------ 2 files changed, 37 insertions(+), 12 deletions(-) diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index 2e7004df3a9..dca8dd79b18 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -159,7 +159,9 @@ def test_cache_metric(self) -> None: Caches produce metrics reflecting their state when scraped. """ CACHE_NAME = "cache_metrics_test_fgjkbdfg" - cache: DeferredCache[str, str] = DeferredCache(CACHE_NAME, max_entries=777) + cache: DeferredCache[str, str] = DeferredCache( + name=CACHE_NAME, server_name=self.hs.hostname, max_entries=777 + ) items = { x.split(b"{")[0].decode("ascii"): x.split(b" ")[1].decode("ascii") diff --git a/tests/util/caches/test_deferred_cache.py b/tests/util/caches/test_deferred_cache.py index f99f99237ef..7017d6d70ac 100644 --- a/tests/util/caches/test_deferred_cache.py +++ b/tests/util/caches/test_deferred_cache.py @@ -31,18 +31,24 @@ class DeferredCacheTestCase(TestCase): def test_empty(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) with self.assertRaises(KeyError): cache.get("foo") def test_hit(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) cache.prefill("foo", 123) self.assertEqual(self.successResultOf(cache.get("foo")), 123) def test_hit_deferred(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) origin_d: "defer.Deferred[int]" = defer.Deferred() set_d = cache.set("k1", origin_d) @@ -65,7 +71,9 @@ def check1(r: str) -> str: def test_callbacks(self) -> None: """Invalidation callbacks are called at the right time""" - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) callbacks = set() # start with an entry, with a callback @@ -98,7 +106,9 @@ def test_callbacks(self) -> None: self.assertEqual(callbacks, {"set", "get"}) def test_set_fail(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) callbacks = set() # start with an entry, with a callback @@ -135,7 +145,9 @@ def test_set_fail(self) -> None: self.assertEqual(callbacks, {"prefill", "get2"}) def test_get_immediate(self) -> None: - cache: DeferredCache[str, int] = DeferredCache("test") + cache: DeferredCache[str, int] = DeferredCache( + name="test", server_name="test_server" + ) d1: "defer.Deferred[int]" = defer.Deferred() cache.set("key1", d1) @@ -151,7 +163,9 @@ def test_get_immediate(self) -> None: self.assertEqual(v, 2) def test_invalidate(self) -> None: - cache: DeferredCache[Tuple[str], int] = DeferredCache("test") + cache: DeferredCache[Tuple[str], int] = DeferredCache( + name="test", server_name="test_server" + ) cache.prefill(("foo",), 123) cache.invalidate(("foo",)) @@ -159,7 +173,9 @@ def test_invalidate(self) -> None: cache.get(("foo",)) def test_invalidate_all(self) -> None: - cache: DeferredCache[str, str] = DeferredCache("testcache") + cache: DeferredCache[str, str] = DeferredCache( + name="testcache", server_name="test_server" + ) callback_record = [False, False] @@ -203,7 +219,10 @@ def record_callback(idx: int) -> None: def test_eviction(self) -> None: cache: DeferredCache[int, str] = DeferredCache( - "test", max_entries=2, apply_cache_factor_from_config=False + name="test", + server_name="test_server", + max_entries=2, + apply_cache_factor_from_config=False, ) cache.prefill(1, "one") @@ -218,7 +237,10 @@ def test_eviction(self) -> None: def test_eviction_lru(self) -> None: cache: DeferredCache[int, str] = DeferredCache( - "test", max_entries=2, apply_cache_factor_from_config=False + name="test", + server_name="test_server", + max_entries=2, + apply_cache_factor_from_config=False, ) cache.prefill(1, "one") @@ -237,7 +259,8 @@ def test_eviction_lru(self) -> None: def test_eviction_iterable(self) -> None: cache: DeferredCache[int, List[str]] = DeferredCache( - "test", + name="test", + server_name="test_server", max_entries=3, apply_cache_factor_from_config=False, iterable=True, From d10d862ae689ff0b1ebb6050321daeb8d138c425 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 14:55:10 -0500 Subject: [PATCH 11/24] Add changelog --- changelog.d/18604.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/18604.misc diff --git a/changelog.d/18604.misc b/changelog.d/18604.misc new file mode 100644 index 00000000000..c06fb23af5c --- /dev/null +++ b/changelog.d/18604.misc @@ -0,0 +1 @@ +Refactor cache metrics to be homeserver-scoped. From 9895b3b726d5be13de34e1f00f291c58660d0b48 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 15:53:00 -0500 Subject: [PATCH 12/24] Fill in missing `LruCache` usage --- synapse/storage/databases/main/client_ips.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index 69008804bda..cf7bc4ac693 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -421,6 +421,7 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) + self.server_name = hs.hostname if hs.config.redis.redis_enabled: # If we're using Redis, we can shift this update process off to @@ -434,7 +435,9 @@ def __init__( # (user_id, access_token, ip,) -> last_seen self.client_ip_last_seen = LruCache[Tuple[str, str, str], int]( - cache_name="client_ip_last_seen", max_size=50000 + cache_name="client_ip_last_seen", + server_name=self.server_name, + max_size=50000, ) if hs.config.worker.run_background_tasks and self.user_ips_max_age: From 9eae037c1bb8205d0649e3c6644c8b687f6ec4d0 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 16:42:16 -0500 Subject: [PATCH 13/24] Fix mypy complaining about unknown types ``` synapse/replication/tcp/streams/_base.py:568: error: Cannot determine type of "_device_list_id_gen" [has-type] synapse/storage/databases/main/event_push_actions.py:256: error: Cannot determine type of "server_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/event_push_actions.py:256: error: Cannot determine type of "server_name" in base class "EventsWorkerStore" [misc] synapse/storage/databases/main/event_push_actions.py:256: error: Cannot determine type of "_instance_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/metrics.py:64: error: Cannot determine type of "server_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/metrics.py:64: error: Cannot determine type of "server_name" in base class "EventsWorkerStore" [misc] synapse/storage/databases/main/metrics.py:64: error: Cannot determine type of "_instance_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/push_rule.py:118: error: Cannot determine type of "_instance_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/push_rule.py:118: error: Cannot determine type of "server_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/push_rule.py:118: error: Cannot determine type of "server_name" in base class "EventsWorkerStore" [misc] synapse/storage/databases/main/account_data.py:60: error: Cannot determine type of "_instance_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/account_data.py:60: error: Cannot determine type of "server_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/account_data.py:60: error: Cannot determine type of "server_name" in base class "EventsWorkerStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "server_name" in base class "PresenceStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "server_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "server_name" in base class "ClientIpWorkerStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "server_name" in base class "DeviceInboxWorkerStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "server_name" in base class "EventsWorkerStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "_instance_name" in base class "ReceiptsWorkerStore" [misc] synapse/storage/databases/main/__init__.py:114: error: Cannot determine type of "_instance_name" in base class "DeviceInboxWorkerStore" [misc] synapse/app/generic_worker.py:117: error: Cannot determine type of "_instance_name" in base class "DeviceInboxWorkerStore" [misc] synapse/app/generic_worker.py:117: error: Cannot determine type of "_instance_name" in base class "ReceiptsWorkerStore" [misc] Found 22 errors in 7 files (checked 937 source files) ``` --- synapse/storage/_base.py | 1 + synapse/storage/databases/main/account_data.py | 2 -- synapse/storage/databases/main/deviceinbox.py | 1 - synapse/storage/databases/main/devices.py | 1 - synapse/storage/databases/main/event_federation.py | 1 - synapse/storage/databases/main/events_worker.py | 2 -- synapse/storage/databases/main/presence.py | 1 - synapse/storage/databases/main/receipts.py | 1 - 8 files changed, 1 insertion(+), 9 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 2309b1648e3..548e7df9305 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -55,6 +55,7 @@ def __init__( hs: "HomeServer", ): self.hs = hs + self.server_name = hs.hostname self._clock = hs.get_clock() self.database_engine = database.engine self.db_pool = database diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index ee2b1f6c5dd..93f061ae51a 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -66,8 +66,6 @@ def __init__( ): super().__init__(database, db_conn, hs) - self.server_name = hs.hostname - self._can_write_to_account_data = ( self._instance_name in hs.config.worker.writers.account_data ) diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index b51db856a10..da10afbebec 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -86,7 +86,6 @@ def __init__( ): super().__init__(database, db_conn, hs) - self.server_name = hs.hostname self._instance_name = hs.get_instance_name() # Map of (user_id, device_id) to the last stream_id that has been diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 01e6a521ea0..f054c661025 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -93,7 +93,6 @@ def __init__( hs: "HomeServer", ): super().__init__(database, db_conn, hs) - self.server_name = hs.hostname # In the worker store this is an ID tracker which we overwrite in the non-worker # class below that is used on the main process. diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 32ff51cc372..b7e4a1eb936 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -137,7 +137,6 @@ def __init__( super().__init__(database, db_conn, hs) self.hs = hs - self.server_name = hs.hostname if hs.config.worker.run_background_tasks: hs.get_clock().looping_call( diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index aa3aaa7721a..41e44a24160 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -227,8 +227,6 @@ def __init__( ): super().__init__(database, db_conn, hs) - self.server_name = hs.hostname - self._stream_id_gen: MultiWriterIdGenerator self._backfill_id_gen: MultiWriterIdGenerator diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index d01153497a2..12cff1d3522 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -79,7 +79,6 @@ def __init__( ) -> None: super().__init__(database, db_conn, hs) - self.server_name = hs.hostname self._instance_name = hs.get_instance_name() self._presence_id_gen: MultiWriterIdGenerator diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 5af8e28fb36..81f50467e7d 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -125,7 +125,6 @@ def __init__( db_conn: LoggingDatabaseConnection, hs: "HomeServer", ): - self.server_name = hs.hostname self._instance_name = hs.get_instance_name() # In the worker store this is an ID tracker which we overwrite in the non-worker From ee91f6b00d10c98a68a3cef12925c5d8e22c5a4e Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 16:46:23 -0500 Subject: [PATCH 14/24] Better explain usage --- synapse/util/caches/descriptors.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 97e20166c93..9630cd6d26b 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -243,7 +243,8 @@ def __get__( ) -> Callable[..., "defer.Deferred[Any]"]: # We need access to instance-level `obj.server_name` attribute assert obj is not None, ( - "Cannot call cached method from class (❌ `MyClass.cached_method()`)" + "Cannot call cached method from class (❌ `MyClass.cached_method()`) " + "and must be called from an instance (✅ `MyClass().cached_method()`). " ) assert obj.server_name is not None, ( "The `server_name` attribute must be set on the object where `@cached` decorator is used." From 1917a0bc93f5fabfa8a1af71b716fb64b94a6f86 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 17:19:35 -0500 Subject: [PATCH 15/24] Fill in `server_name` attribute for `ApplicationService` (for `@cached`) --- synapse/api/auth/base.py | 2 +- synapse/appservice/__init__.py | 8 +++++--- synapse/appservice/scheduler.py | 2 +- synapse/config/appservice.py | 3 +-- synapse/handlers/appservice.py | 2 +- synapse/handlers/directory.py | 2 +- synapse/rest/client/login.py | 2 +- synapse/storage/databases/main/appservice.py | 2 +- tests/api/test_auth.py | 4 ++-- tests/api/test_ratelimiting.py | 6 +++--- tests/appservice/test_api.py | 4 ++-- tests/appservice/test_appservice.py | 5 +++-- tests/handlers/test_appservice.py | 11 ++++++----- tests/handlers/test_device.py | 4 ++-- tests/handlers/test_e2e_keys.py | 6 +++--- tests/handlers/test_oauth_delegation.py | 2 +- tests/handlers/test_user_directory.py | 14 ++++++++------ tests/push/test_push_rule_evaluator.py | 6 +++--- tests/rest/client/test_account.py | 4 ++-- tests/rest/client/test_devices.py | 4 ++-- tests/rest/client/test_directory.py | 4 ++-- tests/rest/client/test_login.py | 6 +++--- tests/rest/client/test_register.py | 8 ++++---- tests/rest/client/test_rooms.py | 2 +- tests/storage/test_user_directory.py | 5 +++-- tests/test_mau.py | 8 ++++---- 26 files changed, 66 insertions(+), 60 deletions(-) diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py index 3126bc9f27a..f97a71caf7c 100644 --- a/synapse/api/auth/base.py +++ b/synapse/api/auth/base.py @@ -172,7 +172,7 @@ async def validate_appservice_can_control_user_id( """ # It's ok if the app service is trying to use the sender from their registration - if app_service.sender == user_id: + if app_service.sender.to_string() == user_id: pass # Check to make sure the app service is allowed to control the user elif not app_service.is_interested_in_user(user_id): diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 6ee5240c4ee..26b20951533 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -78,7 +78,7 @@ def __init__( self, token: str, id: str, - sender: str, + sender: UserID, url: Optional[str] = None, namespaces: Optional[JsonDict] = None, hs_token: Optional[str] = None, @@ -96,6 +96,8 @@ def __init__( self.hs_token = hs_token # The full Matrix ID for this application service's sender. self.sender = sender + # The application service user should be part of the server's domain. + self.server_name = sender.domain self.namespaces = self._check_namespaces(namespaces) self.id = id self.ip_range_whitelist = ip_range_whitelist @@ -223,7 +225,7 @@ def is_interested_in_user( """ return ( # User is the appservice's configured sender_localpart user - user_id == self.sender + user_id == self.sender.to_string() # User is in the appservice's user namespace or self.is_user_in_namespace(user_id) ) @@ -347,7 +349,7 @@ def is_room_id_in_namespace(self, room_id: str) -> bool: def is_exclusive_user(self, user_id: str) -> bool: return ( self._is_exclusive(ApplicationService.NS_USERS, user_id) - or user_id == self.sender + or user_id == self.sender.to_string() ) def is_interested_in_protocol(self, protocol: str) -> bool: diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index ab3f4e15fe9..9d7fc0995a9 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -319,7 +319,7 @@ async def _compute_msc3202_otk_counts_and_fallback_keys( users: Set[str] = set() # The sender is always included - users.add(service.sender) + users.add(service.sender.to_string()) # All AS users that would receive the PDUs or EDUs sent to these rooms # are classed as 'interesting'. diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index dda6bcd1b79..81dbd330cc4 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -122,8 +122,7 @@ def _load_appservice( localpart = as_info["sender_localpart"] if urlparse.quote(localpart) != localpart: raise ValueError("sender_localpart needs characters which are not URL encoded.") - user = UserID(localpart, hostname) - user_id = user.to_string() + user_id = UserID(localpart, hostname) # Rate limiting for users of this AS is on by default (excludes sender) rate_limited = as_info.get("rate_limited") diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index f3bbdb5a05b..0a88910861d 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -839,7 +839,7 @@ async def _is_unknown_user(self, user_id: str) -> bool: # user not found; could be the AS though, so check. services = self.store.get_app_services() - service_list = [s for s in services if s.sender == user_id] + service_list = [s for s in services if s.sender.to_string() == user_id] return len(service_list) == 0 async def _check_user_exists(self, user_id: str) -> bool: diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 74c697960fe..11284ccd0bc 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -406,7 +406,7 @@ def can_modify_alias(self, alias: RoomAlias, user_id: Optional[str] = None) -> b ] for service in interested_services: - if user_id == service.sender: + if user_id == service.sender.to_string(): # this user IS the app service so they can do whatever they like return True elif service.is_exclusive_alias(alias.to_string()): diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 72b219447b0..418b5e72bee 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -313,7 +313,7 @@ async def _do_appservice_login( should_issue_refresh_token=should_issue_refresh_token, # The user represented by an appservice's configured sender_localpart # is not actually created in Synapse. - should_check_deactivated=qualified_user_id != appservice.sender, + should_check_deactivated=qualified_user_id != appservice.sender.to_string(), request_info=request_info, ) diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 766c94fc14e..9862e574fd1 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -126,7 +126,7 @@ def get_app_service_by_user_id(self, user_id: str) -> Optional[ApplicationServic The application service or None. """ for service in self.services_cache: - if service.sender == user_id: + if service.sender.to_string() == user_id: return service return None diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index bd229cf7e9c..24c0291d81e 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -436,7 +436,7 @@ def test_blocking_mau__appservice_requester_allowed_when_not_tracking_ips( namespaces={ "users": [{"regex": "@_appservice.*:sender", "exclusive": True}] }, - sender="@appservice:sender", + sender=UserID.from_string("@appservice:server"), ) requester = Requester( user=UserID.from_string("@appservice:server"), @@ -467,7 +467,7 @@ def test_blocking_mau__appservice_requester_disallowed_when_tracking_ips( namespaces={ "users": [{"regex": "@_appservice.*:sender", "exclusive": True}] }, - sender="@appservice:sender", + sender=UserID.from_string("@appservice:server"), ) requester = Requester( user=UserID.from_string("@appservice:server"), diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index 93f4f989162..2e45d4e4d23 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -5,7 +5,7 @@ from synapse.config.ratelimiting import RatelimitSettings from synapse.module_api import RatelimitOverride from synapse.module_api.callbacks.ratelimit_callbacks import RatelimitModuleApiCallbacks -from synapse.types import create_requester +from synapse.types import UserID, create_requester from tests import unittest @@ -40,7 +40,7 @@ def test_allowed_appservice_ratelimited_via_can_requester_do_action(self) -> Non token="fake_token", id="foo", rate_limited=True, - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), ) as_requester = create_requester("@user:example.com", app_service=appservice) @@ -76,7 +76,7 @@ def test_allowed_appservice_via_can_requester_do_action(self) -> None: token="fake_token", id="foo", rate_limited=False, - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), ) as_requester = create_requester("@user:example.com", app_service=appservice) diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 0f197365409..8fcd928d316 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -25,7 +25,7 @@ from synapse.appservice import ApplicationService from synapse.server import HomeServer -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from tests import unittest @@ -41,7 +41,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.api = hs.get_application_service_api() self.service = ApplicationService( id="unique_identifier", - sender="@as:test", + sender=UserID.from_string("@as:test"), url=URL, token="unused", hs_token=TOKEN, diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 3fa44266387..b6c8fb0e83d 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.appservice import ApplicationService, Namespace +from synapse.types import UserID from tests import unittest @@ -37,7 +38,7 @@ class ApplicationServiceTestCase(unittest.TestCase): def setUp(self) -> None: self.service = ApplicationService( id="unique_identifier", - sender="@as:test", + sender=UserID.from_string("@as:test"), url="some_url", token="some_token", ) @@ -226,7 +227,7 @@ def test_regex_multiple_matches( @defer.inlineCallbacks def test_interested_in_self(self) -> Generator["defer.Deferred[Any]", object, None]: # make sure invites get through - self.service.sender = "@appservice:name" + self.service.sender = UserID.from_string("@appservice:name") self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.type = "m.room.member" self.event.content = {"membership": "invite"} diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 1db630e9e47..25cf5269b8e 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -43,6 +43,7 @@ MultiWriterStreamToken, RoomStreamToken, StreamKeyType, + UserID, ) from synapse.util import Clock from synapse.util.stringutils import random_string @@ -1009,7 +1010,7 @@ def _register_application_service( appservice = ApplicationService( token=random_string(10), id=random_string(10), - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), rate_limited=False, namespaces=namespaces, supports_ephemeral=True, @@ -1087,7 +1088,7 @@ def test_application_service_receives_device_list_updates( appservice = ApplicationService( token=random_string(10), id=random_string(10), - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), rate_limited=False, namespaces={ ApplicationService.NS_USERS: [ @@ -1151,9 +1152,9 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # Define an application service for the tests self._service_token = "VERYSECRET" self._service = ApplicationService( - self._service_token, - "as1", - "@as.sender:test", + token=self._service_token, + id="as1", + sender=UserID.from_string("@as.sender:test"), namespaces={ "users": [ {"regex": "@_as_.*:test", "exclusive": True}, diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 080e6a70286..99a0c502111 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -34,7 +34,7 @@ from synapse.rest.client import devices, login, register from synapse.server import HomeServer from synapse.storage.databases.main.appservice import _make_exclusive_regex -from synapse.types import JsonDict, create_requester +from synapse.types import JsonDict, UserID, create_requester from synapse.util import Clock from synapse.util.task_scheduler import TaskScheduler @@ -419,7 +419,7 @@ def test_on_federation_query_user_devices_appservice(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 70fc4263e7a..323950c2f4a 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -1457,7 +1457,7 @@ def test_query_appservice(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( @@ -1525,7 +1525,7 @@ def test_query_appservice_with_fallback(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( @@ -1751,7 +1751,7 @@ def test_query_local_devices_appservice(self) -> None: id="1234", namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index fefa2f1135c..20f2306d4c5 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -726,7 +726,7 @@ def test_registration_endpoints_removed(self) -> None: token="i_am_an_app_service", id="1234", namespaces={"users": [{"regex": r"@alice:.+", "exclusive": True}]}, - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) self.hs.get_datastores().main.services_cache = [appservice] diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index b12ffc3665b..0da423142ca 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -31,7 +31,7 @@ from synapse.rest.client import login, register, room, user_directory from synapse.server import HomeServer from synapse.storage.roommember import ProfileInfo -from synapse.types import JsonDict, UserProfile, create_requester +from synapse.types import JsonDict, UserID, UserProfile, create_requester from synapse.util import Clock from tests import unittest @@ -78,7 +78,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, # Note: this user does not match the regex above, so that tests # can distinguish the sender from the AS user. - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) mock_load_appservices = Mock(return_value=[self.appservice]) @@ -196,7 +196,9 @@ def test_excludes_appservice_sender(self) -> None: user = self.register_user("user", "pass") token = self.login(user, "pass") room = self.helper.create_room_as(user, is_public=True, tok=token) - self.helper.join(room, self.appservice.sender, tok=self.appservice.token) + self.helper.join( + room, self.appservice.sender.to_string(), tok=self.appservice.token + ) self._check_only_one_user_in_directory(user, room) def test_search_term_with_colon_in_it_does_not_raise(self) -> None: @@ -433,7 +435,7 @@ def test_handle_local_profile_change_with_appservice_user(self) -> None: def test_handle_local_profile_change_with_appservice_sender(self) -> None: # profile is not in directory profile = self.get_success( - self.store._get_user_in_directory(self.appservice.sender) + self.store._get_user_in_directory(self.appservice.sender.to_string()) ) self.assertIsNone(profile) @@ -441,13 +443,13 @@ def test_handle_local_profile_change_with_appservice_sender(self) -> None: profile_info = ProfileInfo(avatar_url="avatar_url", display_name="4L1c3") self.get_success( self.handler.handle_local_profile_change( - self.appservice.sender, profile_info + self.appservice.sender.to_string(), profile_info ) ) # profile is still not in directory profile = self.get_success( - self.store._get_user_in_directory(self.appservice.sender) + self.store._get_user_in_directory(self.appservice.sender.to_string()) ) self.assertIsNone(profile) diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 3898532acff..98a3a22154f 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -823,9 +823,9 @@ def prepare( # Define an application service so that we can register appservice users self._service_token = "some_token" self._service = ApplicationService( - self._service_token, - "as1", - "@as.sender:test", + token=self._service_token, + id="as1", + sender=UserID.from_string("@as.sender:test"), namespaces={ "users": [ {"regex": "@_as_.*:test", "exclusive": True}, diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index a85ea994dec..5343b10e921 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -764,10 +764,10 @@ def test_GET_whoami_appservices(self) -> None: as_token = "i_am_an_app_service" appservice = ApplicationService( - as_token, + token=as_token, id="1234", namespaces={"users": [{"regex": user_id, "exclusive": True}]}, - sender=user_id, + sender=UserID.from_string(user_id), ) self.hs.get_datastores().main.services_cache.append(appservice) diff --git a/tests/rest/client/test_devices.py b/tests/rest/client/test_devices.py index dd3abdebac0..b7230488e4e 100644 --- a/tests/rest/client/test_devices.py +++ b/tests/rest/client/test_devices.py @@ -472,7 +472,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: id="msc4190", token="some_token", hs_token="some_token", - sender="@as:example.com", + sender=UserID.from_string("@as:example.com"), namespaces={ ApplicationService.NS_USERS: [{"regex": "@.*", "exclusive": False}] }, @@ -483,7 +483,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: id="regular", token="other_token", hs_token="other_token", - sender="@as2:example.com", + sender=UserID.from_string("@as2:example.com"), namespaces={ ApplicationService.NS_USERS: [{"regex": "@.*", "exclusive": False}] }, diff --git a/tests/rest/client/test_directory.py b/tests/rest/client/test_directory.py index ecf38493c3f..6e499093cfa 100644 --- a/tests/rest/client/test_directory.py +++ b/tests/rest/client/test_directory.py @@ -25,7 +25,7 @@ from synapse.rest import admin from synapse.rest.client import directory, login, room from synapse.server import HomeServer -from synapse.types import RoomAlias +from synapse.types import RoomAlias, UserID from synapse.util import Clock from synapse.util.stringutils import random_string @@ -140,7 +140,7 @@ def test_deleting_alias_via_directory_appservice(self) -> None: as_token, id="1234", namespaces={"aliases": [{"regex": "#asns-*", "exclusive": True}]}, - sender=user_id, + sender=UserID.from_string(user_id), ) self.hs.get_datastores().main.services_cache.append(appservice) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index c5c6604667b..fa34f4fa5e6 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -51,7 +51,7 @@ from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer -from synapse.types import JsonDict, create_requester +from synapse.types import JsonDict, UserID, create_requester from synapse.util import Clock from tests import unittest @@ -1484,7 +1484,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.service = ApplicationService( id="unique_identifier", token="some_token", - sender="@asbot:example.com", + sender=UserID.from_string("@asbot:example.com"), namespaces={ ApplicationService.NS_USERS: [ {"regex": r"@as_user.*", "exclusive": False} @@ -1496,7 +1496,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.another_service = ApplicationService( id="another__identifier", token="another_token", - sender="@as2bot:example.com", + sender=UserID.from_string("@as2bot:example.com"), namespaces={ ApplicationService.NS_USERS: [ {"regex": r"@as2_user.*", "exclusive": False} diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index b697bf6f675..638fbf00620 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -39,7 +39,7 @@ from synapse.rest.client import account, account_validity, login, logout, register, sync from synapse.server import HomeServer from synapse.storage._base import db_to_json -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from tests import unittest @@ -75,7 +75,7 @@ def test_POST_appservice_registration_valid(self) -> None: as_token, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), ) self.hs.get_datastores().main.services_cache.append(appservice) @@ -99,7 +99,7 @@ def test_POST_appservice_registration_no_type(self) -> None: as_token, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), ) self.hs.get_datastores().main.services_cache.append(appservice) @@ -129,7 +129,7 @@ def test_POST_appservice_msc4190_enabled(self) -> None: as_token, id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), msc4190_device_management=True, ) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 6c93ead3b89..7e92c2de1b9 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1426,7 +1426,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, # Note: this user does not have to match the regex above - sender="@as_main:test", + sender=UserID.from_string("@as_main:test"), ) mock_load_appservices = Mock(return_value=[self.appservice]) diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index c26932069f8..ecb208421dc 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -38,6 +38,7 @@ _parse_words_with_regex, ) from synapse.storage.roommember import ProfileInfo +from synapse.types import UserID from synapse.util import Clock from tests.server import ThreadedMemoryReactorClock @@ -161,7 +162,7 @@ def make_homeserver( token="i_am_an_app_service", id="1234", namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, - sender="@as:test", + sender=UserID.from_string("@as:test"), ) mock_load_appservices = Mock(return_value=[self.appservice]) @@ -386,7 +387,7 @@ def test_population_excludes_appservice_sender(self) -> None: # Join the AS sender to rooms owned by the normal user. public, private = self._create_rooms_and_inject_memberships( - user, token, self.appservice.sender + user, token, self.appservice.sender.to_string() ) # Rebuild the directory. diff --git a/tests/test_mau.py b/tests/test_mau.py index 714854cdf29..472965e022a 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -29,7 +29,7 @@ from synapse.appservice import ApplicationService from synapse.rest.client import register, sync from synapse.server import HomeServer -from synapse.types import JsonDict +from synapse.types import JsonDict, UserID from synapse.util import Clock from tests import unittest @@ -118,7 +118,7 @@ def test_as_ignores_mau(self) -> None: ApplicationService( token=as_token, id="SomeASID", - sender="@as_sender:test", + sender=UserID.from_string("@as_sender:test"), namespaces={"users": [{"regex": "@as_*", "exclusive": True}]}, ) ) @@ -263,7 +263,7 @@ def advance_time_and_sync() -> None: ApplicationService( token=as_token_1, id="SomeASID", - sender="@as_sender_1:test", + sender=UserID.from_string("@as_sender_1:test"), namespaces={"users": [{"regex": "@as_1.*", "exclusive": True}]}, ) ) @@ -273,7 +273,7 @@ def advance_time_and_sync() -> None: ApplicationService( token=as_token_2, id="AnotherASID", - sender="@as_sender_2:test", + sender=UserID.from_string("@as_sender_2:test"), namespaces={"users": [{"regex": "@as_2.*", "exclusive": True}]}, ) ) From 64ed156532202725bfcefec19b384e3e21c8d450 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 17:32:20 -0500 Subject: [PATCH 16/24] Fill in `server_name` attribute for `@cached` --- synapse/handlers/profile.py | 1 + .../server_notices/server_notices_manager.py | 10 ++-- synapse/storage/controllers/state.py | 1 + .../test_module_cache_invalidation.py | 1 + tests/util/caches/test_descriptors.py | 46 +++++++++++++++++++ 5 files changed, 54 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index cdc388b4ab1..944c3c0afd4 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -55,6 +55,7 @@ class ProfileHandler: """ def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self.store = hs.get_datastores().main self.clock = hs.get_clock() self.hs = hs diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index 001a290e87e..f19995b520f 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -35,6 +35,7 @@ class ServerNoticesManager: def __init__(self, hs: "HomeServer"): + self.server_name = hs.hostname self._store = hs.get_datastores().main self._config = hs.config self._account_data_handler = hs.get_account_data_handler() @@ -44,7 +45,6 @@ def __init__(self, hs: "HomeServer"): self._message_handler = hs.get_message_handler() self._storage_controllers = hs.get_storage_controllers() self._is_mine_id = hs.is_mine_id - self._server_name = hs.hostname self._notifier = hs.get_notifier() self.server_notices_mxid = self._config.servernotices.server_notices_mxid @@ -77,7 +77,7 @@ async def send_notice( assert self.server_notices_mxid is not None requester = create_requester( - self.server_notices_mxid, authenticated_entity=self._server_name + self.server_notices_mxid, authenticated_entity=self.server_name ) logger.info("Sending server notice to %s", user_id) @@ -151,7 +151,7 @@ async def get_or_create_notice_room_for_user(self, user_id: str) -> str: assert self._is_mine_id(user_id), "Cannot send server notices to remote users" requester = create_requester( - self.server_notices_mxid, authenticated_entity=self._server_name + self.server_notices_mxid, authenticated_entity=self.server_name ) room_id = await self.maybe_get_notice_room_for_user(user_id) @@ -256,7 +256,7 @@ async def maybe_invite_user_to_room(self, user_id: str, room_id: str) -> None: """ assert self.server_notices_mxid is not None requester = create_requester( - self.server_notices_mxid, authenticated_entity=self._server_name + self.server_notices_mxid, authenticated_entity=self.server_name ) # Check whether the user has already joined or been invited to this room. If @@ -279,7 +279,7 @@ async def maybe_invite_user_to_room(self, user_id: str, room_id: str) -> None: if self._config.servernotices.server_notices_auto_join: user_requester = create_requester( - user_id, authenticated_entity=self._server_name + user_id, authenticated_entity=self.server_name ) await self._room_member_handler.update_membership( requester=user_requester, diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index f28f5d7e039..7c2946c15d9 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -68,6 +68,7 @@ class StateStorageController: """ def __init__(self, hs: "HomeServer", stores: "Databases"): + self.server_name = hs.hostname self._is_mine_id = hs.is_mine_id self._clock = hs.get_clock() self.stores = stores diff --git a/tests/replication/test_module_cache_invalidation.py b/tests/replication/test_module_cache_invalidation.py index 1e7183edaa0..1ad4468e5d9 100644 --- a/tests/replication/test_module_cache_invalidation.py +++ b/tests/replication/test_module_cache_invalidation.py @@ -35,6 +35,7 @@ class TestCache: current_value = FIRST_VALUE + server_name = "test_server" @cached() async def cached_function(self, user_id: str) -> str: diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 6af9dfaf56a..21c190a12d2 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -66,6 +66,7 @@ def test_cache(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int) -> str: @@ -100,6 +101,7 @@ def test_cache_num_args(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached(num_args=1) def fn(self, arg1: int, arg2: int) -> str: @@ -145,6 +147,7 @@ def fn(self, arg1: int, arg2: int, arg3: int) -> str: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" obj = Cls() obj.mock.return_value = "fish" @@ -175,6 +178,7 @@ def test_cache_kwargs(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, kwarg1: int = 2) -> str: @@ -209,6 +213,8 @@ def test_cache_with_sync_exception(self) -> None: """If the wrapped function throws synchronously, things should continue to work""" class Cls: + server_name = "test_server" + @cached() def fn(self, arg1: int) -> NoReturn: raise SynapseError(100, "mai spoon iz too big!!1") @@ -232,6 +238,7 @@ def test_cache_with_async_exception(self) -> None: class Cls: result: Optional[Deferred] = None call_count = 0 + server_name = "test_server" @cached() def fn(self, arg1: int) -> Deferred: @@ -285,6 +292,8 @@ def test_cache_logcontexts(self) -> Deferred: complete_lookup: Deferred = Deferred() class Cls: + server_name = "test_server" + @descriptors.cached() def fn(self, arg1: int) -> "Deferred[int]": @defer.inlineCallbacks @@ -327,6 +336,8 @@ def test_cache_logcontexts_with_exception(self) -> "Deferred[None]": the lookup function throws an exception""" class Cls: + server_name = "test_server" + @descriptors.cached() def fn(self, arg1: int) -> Deferred: @defer.inlineCallbacks @@ -369,6 +380,7 @@ def test_cache_default_args(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int = 2, arg3: int = 3) -> str: @@ -406,6 +418,7 @@ def test_cache_iterable(self) -> None: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached(iterable=True) def fn(self, arg1: int, arg2: int) -> Tuple[str, ...]: @@ -439,6 +452,8 @@ def test_cache_iterable_with_sync_exception(self) -> None: """If the wrapped function throws synchronously, things should continue to work""" class Cls: + server_name = "test_server" + @descriptors.cached(iterable=True) def fn(self, arg1: int) -> NoReturn: raise SynapseError(100, "mai spoon iz too big!!1") @@ -460,6 +475,8 @@ def test_invalidate_cascade(self) -> None: """Invalidations should cascade up through cache contexts""" class Cls: + server_name = "test_server" + @cached(cache_context=True) async def func1(self, key: str, cache_context: _CacheContext) -> int: return await self.func2(key, on_invalidate=cache_context.invalidate) @@ -486,6 +503,8 @@ def test_cancel(self) -> None: complete_lookup: "Deferred[None]" = Deferred() class Cls: + server_name = "test_server" + @cached() async def fn(self, arg1: int) -> str: await complete_lookup @@ -517,6 +536,7 @@ def test_cancel_logcontexts(self) -> None: class Cls: inner_context_was_finished = False + server_name = "test_server" @cached() async def fn(self, arg1: int) -> str: @@ -562,6 +582,8 @@ class CacheDecoratorTestCase(unittest.HomeserverTestCase): @defer.inlineCallbacks def test_passthrough(self) -> Generator["Deferred[Any]", object, None]: class A: + server_name = "test_server" + @cached() def func(self, key: str) -> str: return key @@ -576,6 +598,8 @@ def test_hit(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: + server_name = "test_server" + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -594,6 +618,8 @@ def test_invalidate(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: + server_name = "test_server" + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -612,6 +638,8 @@ def func(self, key: str) -> str: def test_invalidate_missing(self) -> None: class A: + server_name = "test_server" + @cached() def func(self, key: str) -> str: return key @@ -623,6 +651,8 @@ def test_max_entries(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: + server_name = "test_server" + @cached(max_entries=10) def func(self, key: int) -> int: callcount[0] += 1 @@ -650,6 +680,8 @@ def test_prefill(self) -> None: d = defer.succeed(123) class A: + server_name = "test_server" + @cached() def func(self, key: str) -> "Deferred[int]": callcount[0] += 1 @@ -668,6 +700,8 @@ def test_invalidate_context(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: + server_name = "test_server" + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -701,6 +735,8 @@ def test_eviction_context(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: + server_name = "test_server" + @cached(max_entries=2) def func(self, key: str) -> str: callcount[0] += 1 @@ -738,6 +774,8 @@ def test_double_get(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: + server_name = "test_server" + @cached() def func(self, key: str) -> str: callcount[0] += 1 @@ -785,6 +823,7 @@ def test_cache(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int) -> None: @@ -850,6 +889,7 @@ def test_concurrent_lookups(self) -> None: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int) -> None: @@ -893,6 +933,7 @@ def test_invalidate(self) -> Generator["Deferred[Any]", object, None]: class Cls: def __init__(self) -> None: self.mock = mock.Mock() + self.server_name = "test_server" @descriptors.cached() def fn(self, arg1: int, arg2: int) -> None: @@ -933,6 +974,8 @@ def test_cancel(self) -> None: complete_lookup: "Deferred[None]" = Deferred() class Cls: + server_name = "test_server" + @cached() def fn(self, arg1: int) -> None: pass @@ -967,6 +1010,7 @@ def test_cancel_logcontexts(self) -> None: class Cls: inner_context_was_finished = False + server_name = "test_server" @cached() def fn(self, arg1: int) -> None: @@ -1010,6 +1054,8 @@ def test_num_args_mismatch(self) -> None: """ class Cls: + server_name = "test_server" + @descriptors.cached(tree=True) def fn(self, room_id: str, event_id: str) -> None: pass From e943bb12fe024b71a6f31d494d115409bad74095 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 17:58:56 -0500 Subject: [PATCH 17/24] Fix more `ApplicationService` usage/mocks --- tests/api/test_auth.py | 43 ++++++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 24c0291d81e..95a4683d039 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -60,7 +60,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # modify its config instead of the hs' self.auth_blocking = AuthBlocking(hs) - self.test_user = "@foo:bar" + self.test_user_id = UserID.from_string("@foo:bar") self.test_token = b"_test_token_" # this is overridden for the appservice tests @@ -71,7 +71,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def test_get_user_by_req_user_valid_token(self) -> None: user_info = TokenLookupResult( - user_id=self.test_user, token_id=5, device_id="device" + user_id=self.test_user_id.to_string(), token_id=5, device_id="device" ) self.store.get_user_by_access_token = AsyncMock(return_value=user_info) self.store.mark_access_token_as_used = AsyncMock(return_value=None) @@ -81,7 +81,7 @@ def test_get_user_by_req_user_valid_token(self) -> None: request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = self.get_success(self.auth.get_user_by_req(request)) - self.assertEqual(requester.user.to_string(), self.test_user) + self.assertEqual(requester.user, self.test_user_id) def test_get_user_by_req_user_bad_token(self) -> None: self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -96,7 +96,7 @@ def test_get_user_by_req_user_bad_token(self) -> None: self.assertEqual(f.errcode, "M_UNKNOWN_TOKEN") def test_get_user_by_req_user_missing_token(self) -> None: - user_info = TokenLookupResult(user_id=self.test_user, token_id=5) + user_info = TokenLookupResult(user_id=self.test_user_id.to_string(), token_id=5) self.store.get_user_by_access_token = AsyncMock(return_value=user_info) request = Mock(args={}) @@ -109,7 +109,10 @@ def test_get_user_by_req_user_missing_token(self) -> None: def test_get_user_by_req_appservice_valid_token(self) -> None: app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -119,7 +122,7 @@ def test_get_user_by_req_appservice_valid_token(self) -> None: request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = self.get_success(self.auth.get_user_by_req(request)) - self.assertEqual(requester.user.to_string(), self.test_user) + self.assertEqual(requester.user, self.test_user_id) def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: from netaddr import IPSet @@ -127,7 +130,7 @@ def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: app_service = Mock( token="foobar", url="a_url", - sender=self.test_user, + sender=self.test_user_id.to_string(), ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -138,7 +141,7 @@ def test_get_user_by_req_appservice_valid_token_good_ip(self) -> None: request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = self.get_success(self.auth.get_user_by_req(request)) - self.assertEqual(requester.user.to_string(), self.test_user) + self.assertEqual(requester.user, self.test_user_id) def test_get_user_by_req_appservice_valid_token_bad_ip(self) -> None: from netaddr import IPSet @@ -146,7 +149,7 @@ def test_get_user_by_req_appservice_valid_token_bad_ip(self) -> None: app_service = Mock( token="foobar", url="a_url", - sender=self.test_user, + sender=self.test_user_id, ip_range_whitelist=IPSet(["192.168.0.0/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -176,7 +179,7 @@ def test_get_user_by_req_appservice_bad_token(self) -> None: self.assertEqual(f.errcode, "M_UNKNOWN_TOKEN") def test_get_user_by_req_appservice_missing_token(self) -> None: - app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service = Mock(token="foobar", url="a_url", sender=self.test_user_id) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = AsyncMock(return_value=None) @@ -191,7 +194,10 @@ def test_get_user_by_req_appservice_missing_token(self) -> None: def test_get_user_by_req_appservice_valid_token_valid_user_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -215,7 +221,10 @@ class FakeUserInfo: def test_get_user_by_req_appservice_valid_token_bad_user_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=False) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -238,7 +247,10 @@ def test_get_user_by_req_appservice_valid_token_valid_device_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" masquerading_device_id = b"DOPPELDEVICE" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -270,7 +282,10 @@ def test_get_user_by_req_appservice_valid_token_invalid_device_id(self) -> None: masquerading_user_id = b"@doppelganger:matrix.org" masquerading_device_id = b"NOT_A_REAL_DEVICE_ID" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None + token="foobar", + url="a_url", + sender=self.test_user_id, + ip_range_whitelist=None, ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) From b3ecd5cd888d2975fa152f037b73a5e5706e6702 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 18:03:03 -0500 Subject: [PATCH 18/24] Fix `ApplicationService` `sender` usage in test --- tests/appservice/test_appservice.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index b6c8fb0e83d..620c2b907b2 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -231,7 +231,7 @@ def test_interested_in_self(self) -> Generator["defer.Deferred[Any]", object, No self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.type = "m.room.member" self.event.content = {"membership": "invite"} - self.event.state_key = self.service.sender + self.event.state_key = self.service.sender.to_string() self.assertTrue( ( yield self.service.is_interested_in_event( From f7e6f0967fc7cb2875f3bc3bf88b22b941a52a25 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 18:05:07 -0500 Subject: [PATCH 19/24] Fix `CacheMetric` splatting label objects as arguments --- synapse/util/caches/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 6e66a8d8219..3d4cf68d848 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -175,14 +175,14 @@ def collect(self) -> None: response_cache_hits.labels(**labels_base).set(self.hits) for reason in EvictionReason: response_cache_evicted.labels( - {**labels_base, "reason": reason.name} + **{**labels_base, "reason": reason.name} ).set(self.eviction_size_by_reason[reason]) response_cache_total.labels(**labels_base).set(self.hits + self.misses) else: cache_size.labels(**labels_base).set(len(self._cache)) cache_hits.labels(**labels_base).set(self.hits) for reason in EvictionReason: - cache_evicted.labels({**labels_base, "reason": reason.name}).set( + cache_evicted.labels(**{**labels_base, "reason": reason.name}).set( self.eviction_size_by_reason[reason] ) cache_total.labels(**labels_base).set(self.hits + self.misses) From 90780763c874f9ce47dca68e03a8f38a236fc515 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 30 Jun 2025 18:06:14 -0500 Subject: [PATCH 20/24] Fix `ApplicationService` `sender` usage in test2 --- tests/rest/client/test_login.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index fa34f4fa5e6..b8bcc235e91 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -1530,7 +1530,10 @@ def test_login_appservice_user_bot(self) -> None: params = { "type": login.LoginRestServlet.APPSERVICE_TYPE, - "identifier": {"type": "m.id.user", "user": self.service.sender}, + "identifier": { + "type": "m.id.user", + "user": self.service.sender.to_string(), + }, } channel = self.make_request( b"POST", LOGIN_URL, params, access_token=self.service.token From 42699c4e70456293020319f9f4d5d5a77f6d2ecd Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 3 Jul 2025 16:30:45 -0500 Subject: [PATCH 21/24] `instance` -> `server_name` label See https://github.com/element-hq/synapse/pull/18604#discussion_r2181726951 --- synapse/metrics/__init__.py | 16 ++++++++++------ synapse/util/caches/__init__.py | 24 ++++++++++++------------ 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 2d6bb07b8c3..da58df6b968 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -66,15 +66,19 @@ HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") -INSTANCE_LABEL_NAME = "instance" +SERVER_NAME_LABEL = "server_name" """ -The standard Prometheus label name used to identify which server instance the metrics -came from. +The `server_name` label is used to identify the homeserver that the metrics correspond +to. Because we support multiple instances of Synapse running in the same process and all +metrics are in a single global `REGISTRY`, we need to manually label any metrics. + In the case of a Synapse homeserver, this should be set to the homeserver name (`hs.hostname`). -Normally, this would be set automatically by the Prometheus server scraping the data but -since we support multiple instances of Synapse running in the same process and all -metrics are in a single global `REGISTRY`, we need to manually label any metrics. + +We're purposely not using the `instance` label for this purpose as that should be "The +: part of the target's URL that was scraped.". Also: "In Prometheus +terms, an endpoint you can scrape is called an *instance*, usually corresponding to a +single process." (source: https://prometheus.io/docs/concepts/jobs_instances/) """ diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 3d4cf68d848..873103bc7f0 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -31,7 +31,7 @@ from prometheus_client.core import Gauge from synapse.config.cache import add_resizable_cache -from synapse.metrics import INSTANCE_LABEL_NAME +from synapse.metrics import SERVER_NAME_LABEL from synapse.util.metrics import DynamicCollectorRegistry logger = logging.getLogger(__name__) @@ -49,62 +49,62 @@ cache_size = Gauge( "synapse_util_caches_cache_size", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) cache_hits = Gauge( "synapse_util_caches_cache_hits", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) cache_evicted = Gauge( "synapse_util_caches_cache_evicted_size", "", - labelnames=["name", "reason", INSTANCE_LABEL_NAME], + labelnames=["name", "reason", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) cache_total = Gauge( "synapse_util_caches_cache", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) cache_max_size = Gauge( "synapse_util_caches_cache_max_size", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) cache_memory_usage = Gauge( "synapse_util_caches_cache_size_bytes", "Estimated memory usage of the caches", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_size = Gauge( "synapse_util_caches_response_cache_size", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_hits = Gauge( "synapse_util_caches_response_cache_hits", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_evicted = Gauge( "synapse_util_caches_response_cache_evicted_size", "", - labelnames=["name", "reason", INSTANCE_LABEL_NAME], + labelnames=["name", "reason", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) response_cache_total = Gauge( "synapse_util_caches_response_cache", "", - labelnames=["name", INSTANCE_LABEL_NAME], + labelnames=["name", SERVER_NAME_LABEL], registry=CACHE_METRIC_REGISTRY, ) @@ -168,7 +168,7 @@ def collect(self) -> None: try: labels_base = { "name": self._cache_name, - INSTANCE_LABEL_NAME: self._server_name, + SERVER_NAME_LABEL: self._server_name, } if self._cache_type == "response_cache": response_cache_size.labels(**labels_base).set(len(self._cache)) From 22cee6f38d36f49701e6d9a3b2fec6271a2f5dd9 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Thu, 3 Jul 2025 16:41:31 -0500 Subject: [PATCH 22/24] Add comment about why `self.server_name` necessary for `@cached` Add https://github.com/element-hq/synapse/pull/18604#discussion_r2181729951 --- synapse/appservice/__init__.py | 2 +- synapse/handlers/profile.py | 2 +- synapse/handlers/room_list.py | 2 +- .../server_notices/server_notices_manager.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/controllers/state.py | 2 +- .../test_module_cache_invalidation.py | 2 +- tests/util/caches/test_descriptors.py | 30 +++++++++---------- 8 files changed, 22 insertions(+), 22 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 26b20951533..2d8d382e68c 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -97,7 +97,7 @@ def __init__( # The full Matrix ID for this application service's sender. self.sender = sender # The application service user should be part of the server's domain. - self.server_name = sender.domain + self.server_name = sender.domain # nb must be called this for @cached self.namespaces = self._check_namespaces(namespaces) self.id = id self.ip_range_whitelist = ip_range_whitelist diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 944c3c0afd4..284d3362ff8 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -55,7 +55,7 @@ class ProfileHandler: """ def __init__(self, hs: "HomeServer"): - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self.store = hs.get_datastores().main self.clock = hs.get_clock() self.hs = hs diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 258b015f0ff..9d4307fb078 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -61,7 +61,7 @@ class RoomListHandler: def __init__(self, hs: "HomeServer"): - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self.hs = hs diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index f19995b520f..19f86b5a563 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -35,7 +35,7 @@ class ServerNoticesManager: def __init__(self, hs: "HomeServer"): - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self._store = hs.get_datastores().main self._config = hs.config self._account_data_handler = hs.get_account_data_handler() diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 548e7df9305..d55c9e18ed6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -55,7 +55,7 @@ def __init__( hs: "HomeServer", ): self.hs = hs - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self._clock = hs.get_clock() self.database_engine = database.engine self.db_pool = database diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 7c2946c15d9..601fa81ab20 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -68,7 +68,7 @@ class StateStorageController: """ def __init__(self, hs: "HomeServer", stores: "Databases"): - self.server_name = hs.hostname + self.server_name = hs.hostname # nb must be called this for @cached self._is_mine_id = hs.is_mine_id self._clock = hs.get_clock() self.stores = stores diff --git a/tests/replication/test_module_cache_invalidation.py b/tests/replication/test_module_cache_invalidation.py index 1ad4468e5d9..8d5d0cce9a6 100644 --- a/tests/replication/test_module_cache_invalidation.py +++ b/tests/replication/test_module_cache_invalidation.py @@ -35,7 +35,7 @@ class TestCache: current_value = FIRST_VALUE - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() async def cached_function(self, user_id: str) -> str: diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 21c190a12d2..7865a677093 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -213,7 +213,7 @@ def test_cache_with_sync_exception(self) -> None: """If the wrapped function throws synchronously, things should continue to work""" class Cls: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def fn(self, arg1: int) -> NoReturn: @@ -238,7 +238,7 @@ def test_cache_with_async_exception(self) -> None: class Cls: result: Optional[Deferred] = None call_count = 0 - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def fn(self, arg1: int) -> Deferred: @@ -475,7 +475,7 @@ def test_invalidate_cascade(self) -> None: """Invalidations should cascade up through cache contexts""" class Cls: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached(cache_context=True) async def func1(self, key: str, cache_context: _CacheContext) -> int: @@ -536,7 +536,7 @@ def test_cancel_logcontexts(self) -> None: class Cls: inner_context_was_finished = False - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() async def fn(self, arg1: int) -> str: @@ -582,7 +582,7 @@ class CacheDecoratorTestCase(unittest.HomeserverTestCase): @defer.inlineCallbacks def test_passthrough(self) -> Generator["Deferred[Any]", object, None]: class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> str: @@ -598,7 +598,7 @@ def test_hit(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> str: @@ -618,7 +618,7 @@ def test_invalidate(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> str: @@ -638,7 +638,7 @@ def func(self, key: str) -> str: def test_invalidate_missing(self) -> None: class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> str: @@ -651,7 +651,7 @@ def test_max_entries(self) -> Generator["Deferred[Any]", object, None]: callcount = [0] class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached(max_entries=10) def func(self, key: int) -> int: @@ -680,7 +680,7 @@ def test_prefill(self) -> None: d = defer.succeed(123) class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> "Deferred[int]": @@ -700,7 +700,7 @@ def test_invalidate_context(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> str: @@ -735,7 +735,7 @@ def test_eviction_context(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached(max_entries=2) def func(self, key: str) -> str: @@ -774,7 +774,7 @@ def test_double_get(self) -> Generator["Deferred[Any]", object, None]: callcount2 = [0] class A: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def func(self, key: str) -> str: @@ -974,7 +974,7 @@ def test_cancel(self) -> None: complete_lookup: "Deferred[None]" = Deferred() class Cls: - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def fn(self, arg1: int) -> None: @@ -1010,7 +1010,7 @@ def test_cancel_logcontexts(self) -> None: class Cls: inner_context_was_finished = False - server_name = "test_server" + server_name = "test_server" # nb must be called this for @cached @cached() def fn(self, arg1: int) -> None: From 0bb7eaee93922bdb982226bfaf356271258783e7 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 15 Jul 2025 16:21:57 -0500 Subject: [PATCH 23/24] Move comment to match arg order --- synapse/http/federation/well_known_resolver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index e67ae333a0d..70242ad0aef 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -96,10 +96,10 @@ def __init__( ): """ Args: + server_name: Our homeserver name (used to label metrics) (`hs.hostname`). reactor agent user_agent - server_name: Our homeserver name (used to label metrics) (`hs.hostname`). well_known_cache had_well_known_cache """ From b5f00c8377c7f4de7e56bfd2f2aa1b6f20cf19f3 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 16 Jul 2025 09:05:07 -0500 Subject: [PATCH 24/24] Remove Cargo.lock changes --- Cargo.lock | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 671eec05799..abb3c6bed36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,12 +65,6 @@ dependencies = [ "windows-targets", ] -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" @@ -380,7 +374,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "headers-core", "http", @@ -500,7 +494,7 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "futures-channel", "futures-core", @@ -1190,7 +1184,7 @@ version = "0.12.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "futures-core", "futures-util", @@ -1464,7 +1458,7 @@ name = "synapse" version = "0.1.0" dependencies = [ "anyhow", - "base64 0.21.7", + "base64", "blake2", "bytes", "futures",