From a36327fa5a92a4b509472d038560241e72fd2108 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Fri, 22 Sep 2023 18:26:27 -0500 Subject: [PATCH] [REVERT THIS] Track some metrics to see how big this gets --- synapse/storage/databases/main/events_worker.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index e41a807b1299..c8e7b0301959 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -108,6 +108,10 @@ "synapse_event_fetch_ongoing", "The number of event fetchers that are running", ) +event_metadata_cache_size_gauge = Gauge( + "synapse_event_metadata_cache_size", + "The size of event metadata cache in number of entries", +) class InvalidEventError(Exception): @@ -326,6 +330,11 @@ def get_chain_id_txn(txn: Cursor) -> int: self._un_partial_stated_events_stream_id_gen: AbstractStreamIdGenerator + # This is a crap way to track metrics, but will do for this experiment. + self.track_metadata_size_loop = self._clock.looping_call( + self.track_metadata_size, 1000 + ) + if isinstance(database.engine, PostgresEngine): self._un_partial_stated_events_stream_id_gen = MultiWriterIdGenerator( db_conn=db_conn, @@ -348,6 +357,13 @@ def get_chain_id_txn(txn: Cursor) -> int: "stream_id", ) + def track_metadata_size(self) -> None: + # Get the total number of entries + size = len(self._event_metadata) + for room_id in self._event_metadata: + size += len(self._event_metadata[room_id].events) + event_metadata_cache_size_gauge.set(size) + def get_un_partial_stated_events_token(self, instance_name: str) -> int: return ( self._un_partial_stated_events_stream_id_gen.get_current_token_for_writer(