feat: implement caching for duty-related data and enhance performance
All checks were successful
CI / lint-and-test (push) Successful in 24s
Docker Build and Release / build-and-push (push) Successful in 49s
Docker Build and Release / release (push) Successful in 8s

- Added a TTLCache class for in-memory caching of duty-related data, improving performance by reducing database queries.
- Integrated caching into the group duty pin functionality, allowing for efficient retrieval of message text and next shift end times.
- Introduced new methods to invalidate caches when relevant data changes, ensuring data consistency.
- Created a new Alembic migration to add indexes on the duties table for improved query performance.
- Updated tests to cover the new caching behavior and ensure proper functionality.
This commit is contained in:
2026-02-25 13:25:34 +03:00
parent 5334a4aeac
commit 0e8d1453e2
14 changed files with 413 additions and 113 deletions

View File

@@ -7,12 +7,15 @@ from urllib.error import URLError
from icalendar import Calendar
from duty_teller.cache import TTLCache
from duty_teller.utils.http_client import safe_urlopen
log = logging.getLogger(__name__)
# In-memory cache: url -> (cached_at_timestamp, raw_ics_bytes)
# Raw ICS bytes cache: url -> (cached_at_timestamp, raw_ics_bytes)
_ics_cache: dict[str, tuple[float, bytes]] = {}
# Parsed events cache: url -> list of {date, summary}. TTL 7 days.
_parsed_events_cache = TTLCache(ttl_seconds=7 * 24 * 3600, max_size=100)
CACHE_TTL_SECONDS = 7 * 24 * 3600 # 1 week
FETCH_TIMEOUT_SECONDS = 15
@@ -68,8 +71,8 @@ def _event_date_range(component) -> tuple[date | None, date | None]:
return (start_d, last_d)
def _get_events_from_ics(raw: bytes, from_date: str, to_date: str) -> list[dict]:
"""Parse ICS bytes and return list of {date, summary} in [from_date, to_date]. One-time events only."""
def _parse_ics_to_events(raw: bytes) -> list[dict]:
"""Parse ICS bytes and return all events as list of {date, summary}. One-time events only."""
result: list[dict] = []
try:
cal = Calendar.from_ical(raw)
@@ -79,9 +82,6 @@ def _get_events_from_ics(raw: bytes, from_date: str, to_date: str) -> list[dict]
log.warning("Failed to parse ICS: %s", e)
return result
from_d = date.fromisoformat(from_date)
to_d = date.fromisoformat(to_date)
for component in cal.walk():
if component.name != "VEVENT":
continue
@@ -95,13 +95,27 @@ def _get_events_from_ics(raw: bytes, from_date: str, to_date: str) -> list[dict]
d = start_d
while d <= end_d:
if from_d <= d <= to_d:
result.append({"date": d.strftime("%Y-%m-%d"), "summary": summary_str})
result.append({"date": d.strftime("%Y-%m-%d"), "summary": summary_str})
d += timedelta(days=1)
return result
def _filter_events_by_range(
events: list[dict], from_date: str, to_date: str
) -> list[dict]:
"""Filter events list to [from_date, to_date] range."""
from_d = date.fromisoformat(from_date)
to_d = date.fromisoformat(to_date)
return [e for e in events if from_d <= date.fromisoformat(e["date"]) <= to_d]
def _get_events_from_ics(raw: bytes, from_date: str, to_date: str) -> list[dict]:
"""Parse ICS bytes and return events in [from_date, to_date]. Wrapper for tests."""
events = _parse_ics_to_events(raw)
return _filter_events_by_range(events, from_date, to_date)
def get_calendar_events(
url: str,
from_date: str,
@@ -135,4 +149,10 @@ def get_calendar_events(
return []
_ics_cache[url] = (now, raw)
return _get_events_from_ics(raw, from_date, to_date)
# Use parsed events cache to avoid repeated Calendar.from_ical() + walk()
cache_key = (url,)
events, found = _parsed_events_cache.get(cache_key)
if not found:
events = _parse_ics_to_events(raw)
_parsed_events_cache.set(cache_key, events)
return _filter_events_by_range(events, from_date, to_date)