Move event fetch vars to EventWorkStore

This commit is contained in:
Erik Johnston 2019-12-03 14:08:48 +00:00
parent ddd48b6851
commit 6b2867096b
4 changed files with 15 additions and 14 deletions

View file

@ -18,7 +18,6 @@ import itertools
import logging import logging
import random import random
import sys import sys
import threading
import time import time
from typing import Iterable, Tuple from typing import Iterable, Tuple
@ -36,7 +35,6 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.storage.engines import PostgresEngine, Sqlite3Engine
from synapse.types import get_domain_from_id from synapse.types import get_domain_from_id
from synapse.util import batch_iter from synapse.util import batch_iter
from synapse.util.caches.descriptors import Cache
from synapse.util.stringutils import exception_to_unicode from synapse.util.stringutils import exception_to_unicode
# import a function which will return a monotonic time, in seconds # import a function which will return a monotonic time, in seconds
@ -237,16 +235,6 @@ class SQLBaseStore(object):
# to watch it # to watch it
self._txn_perf_counters = PerformanceCounters() self._txn_perf_counters = PerformanceCounters()
self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
)
self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
self._event_fetch_ongoing = 0
self._pending_ds = []
self.database_engine = hs.database_engine self.database_engine = hs.database_engine
# A set of tables that are not safe to use native upserts in. # A set of tables that are not safe to use native upserts in.

View file

@ -21,7 +21,7 @@ from twisted.internet import defer
from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage import background_updates from synapse.storage import background_updates
from synapse.storage._base import Cache from synapse.util.caches.descriptors import Cache
from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.caches import CACHE_SIZE_FACTOR
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -31,11 +31,11 @@ from synapse.logging.opentracing import (
) )
from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import ( from synapse.storage._base import (
Cache,
SQLBaseStore, SQLBaseStore,
db_to_json, db_to_json,
make_in_list_sql_clause, make_in_list_sql_clause,
) )
from synapse.util.caches.descriptors import Cache
from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.background_updates import BackgroundUpdateStore
from synapse.types import get_verify_key_from_cross_signing_key from synapse.types import get_verify_key_from_cross_signing_key
from synapse.util import batch_iter from synapse.util import batch_iter

View file

@ -17,6 +17,7 @@ from __future__ import division
import itertools import itertools
import logging import logging
import threading
from collections import namedtuple from collections import namedtuple
from canonicaljson import json from canonicaljson import json
@ -34,6 +35,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
from synapse.types import get_domain_from_id from synapse.types import get_domain_from_id
from synapse.util import batch_iter from synapse.util import batch_iter
from synapse.util.caches.descriptors import Cache
from synapse.util.metrics import Measure from synapse.util.metrics import Measure
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -53,6 +55,17 @@ _EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))
class EventsWorkerStore(SQLBaseStore): class EventsWorkerStore(SQLBaseStore):
def __init__(self, db_conn, hs):
super(EventsWorkerStore, self).__init__(db_conn, hs)
self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
)
self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
self._event_fetch_ongoing = 0
def get_received_ts(self, event_id): def get_received_ts(self, event_id):
"""Get received_ts (when it was persisted) for the event. """Get received_ts (when it was persisted) for the event.