feat: implement caching for duty-related data and enhance performance
- Added a TTLCache class for in-memory caching of duty-related data, improving performance by reducing database queries. - Integrated caching into the group duty pin functionality, allowing for efficient retrieval of message text and next shift end times. - Introduced new methods to invalidate caches when relevant data changes, ensuring data consistency. - Created a new Alembic migration to add indexes on the duties table for improved query performance. - Updated tests to cover the new caching behavior and ensure proper functionality.
This commit is contained in:
@@ -19,7 +19,7 @@ from duty_teller.db.repository import (
|
||||
ROLE_USER,
|
||||
ROLE_ADMIN,
|
||||
)
|
||||
from duty_teller.handlers.common import is_admin_async
|
||||
from duty_teller.handlers.common import invalidate_is_admin_cache, is_admin_async
|
||||
from duty_teller.i18n import get_lang, t
|
||||
from duty_teller.utils.user import build_full_name
|
||||
|
||||
@@ -230,6 +230,7 @@ async def set_role(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
||||
|
||||
ok = await asyncio.get_running_loop().run_in_executor(None, do_set_role)
|
||||
if ok:
|
||||
invalidate_is_admin_cache(target_user.telegram_user_id)
|
||||
await update.message.reply_text(
|
||||
t(lang, "set_role.done", name=target_user.full_name, role=role_name)
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user