From 8ce7cce01a01b454fe4565b61c142920419204e8 Mon Sep 17 00:00:00 2001 From: Bulat Kurbanov Date: Fri, 30 Dec 2022 23:46:51 +0100 Subject: [PATCH] Remove data field --- src/app/alembic/versions/62d57916ec53_.py | 32 +++++++++++++++++ src/app/models.py | 15 +++++--- src/app/services/cache_updater.py | 5 ++- src/app/utils.py | 3 -- src/app/views.py | 44 ++++++++--------------- 5 files changed, 61 insertions(+), 38 deletions(-) create mode 100644 src/app/alembic/versions/62d57916ec53_.py delete mode 100644 src/app/utils.py diff --git a/src/app/alembic/versions/62d57916ec53_.py b/src/app/alembic/versions/62d57916ec53_.py new file mode 100644 index 0000000..677e0a0 --- /dev/null +++ b/src/app/alembic/versions/62d57916ec53_.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: 62d57916ec53 +Revises: f77b0b14f9eb +Create Date: 2022-12-30 23:30:50.867163 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "62d57916ec53" +down_revision = "f77b0b14f9eb" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_column("cached_files", "data") + op.create_unique_constraint( + "uc_cached_files_message_id_chat_id", "cached_files", ["message_id", "chat_id"] + ) + op.create_index( + op.f("ix_cached_files_message_id"), "cached_files", ["message_id"], unique=True + ) + + +def downgrade(): + op.add_column("cached_files", sa.Column("data", sa.JSON(), nullable=False)) + op.drop_constraint("uc_cached_files_message_id_chat_id", "cached_files") + op.drop_index("ix_cached_files_message_id", "cached_files") diff --git a/src/app/models.py b/src/app/models.py index db0e2e3..9b677f9 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -11,13 +11,20 @@ class BaseMeta(ormar.ModelMeta): class CachedFile(ormar.Model): class Meta(BaseMeta): tablename = "cached_files" - constraints = [ormar.UniqueColumns("object_id", "object_type")] + constraints = [ + ormar.UniqueColumns("object_id", "object_type"), + ormar.UniqueColumns("message_id", "chat_id"), + ] id: int = ormar.Integer(primary_key=True) # type: ignore object_id: int = ormar.Integer(index=True) # type: ignore - object_type: str = ormar.String(max_length=8, index=True) # type: ignore + object_type: str = ormar.String( + max_length=8, index=True, unique=True + ) # type: ignore - message_id: int = ormar.BigInteger() # type: ignore + message_id: int = ormar.BigInteger(index=True) # type: ignore chat_id: int = ormar.BigInteger() # type: ignore - data: dict = ormar.JSON() # type: ignore + @property + def data(self) -> dict: + return {"message_id": self.message_id, "chat_id": self.chat_id} diff --git a/src/app/services/cache_updater.py b/src/app/services/cache_updater.py index 0698d07..c56d69b 100644 --- a/src/app/services/cache_updater.py +++ b/src/app/services/cache_updater.py @@ -107,7 +107,10 @@ async def cache_file(book: Book, file_type: str) -> Optional[CachedFile]: return None return await CachedFile.objects.create( - object_id=book.id, object_type=file_type, data=upload_data.data + object_id=book.id, + object_type=file_type, + message_id=upload_data.data["message_id"], + chat_id=upload_data.data["chat_id"], ) diff --git a/src/app/utils.py b/src/app/utils.py deleted file mode 100644 index 1e6ed57..0000000 --- a/src/app/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -class DummyWriter: - def write(self, line): - return line diff --git a/src/app/views.py b/src/app/views.py index 64104b6..034de9e 100644 --- a/src/app/views.py +++ b/src/app/views.py @@ -1,7 +1,5 @@ import asyncio import base64 -import csv -from typing import AsyncIterator from fastapi import APIRouter, Depends, HTTPException, status, Request from fastapi.responses import StreamingResponse @@ -9,7 +7,6 @@ from fastapi.responses import StreamingResponse from starlette.background import BackgroundTask from arq.connections import ArqRedis -from asyncpg import exceptions from app.depends import check_token from app.models import CachedFile as CachedFileDB @@ -19,7 +16,6 @@ from app.services.caption_getter import get_caption from app.services.downloader import get_filename from app.services.files_client import download_file as download_file_from_cache from app.services.library_client import get_book -from app.utils import DummyWriter router = APIRouter( @@ -112,18 +108,22 @@ async def delete_cached_file(object_id: int, object_type: str): @router.post("/", response_model=CachedFile) async def create_or_update_cached_file(data: CreateCachedFile): - try: - return await CachedFileDB.objects.create(**data.dict()) - except exceptions.UniqueViolationError: - data_dict = data.dict() - object_id = data_dict.pop("object_id") - object_type = data_dict.pop("object_type") - cached_file = await CachedFileDB.objects.get( - object_id=object_id, object_type=object_type - ) - cached_file.update_from_dict(data_dict) + cached_file = await CachedFileDB.objects.get_or_none( + object_id=data.data["object_id"], object_type=data.data["object_type"] + ) + + if cached_file is not None: + cached_file.message_id = data.data["message_id"] + cached_file.chat_id = data.data["chat_id"] return await cached_file.update() + return await CachedFileDB.objects.create( + object_id=data.object_id, + object_type=data.object_type, + message_id=data.data["message_id"], + chat_id=data.data["chat_id"], + ) + @router.post("/update_cache") async def update_cache(request: Request): @@ -133,22 +133,6 @@ async def update_cache(request: Request): return "Ok!" -@router.get("/download_dump") -async def download_dump(): - async def get_data() -> AsyncIterator[str]: - writer = csv.writer(DummyWriter()) - - async for c_file in CachedFileDB.objects.iterate(): - yield writer.writerow([c_file.object_id, c_file.object_type, c_file.data]) - - return StreamingResponse( - get_data(), - headers={ - "Content-Disposition": "attachment; filename=dump.csv", - }, - ) - - healthcheck_router = APIRouter( tags=["healthcheck"], )