mirror of
https://github.com/flibusta-apps/telegram_files_cache_server.git
synced 2025-12-06 14:45:36 +01:00
Remove data field
This commit is contained in:
32
src/app/alembic/versions/62d57916ec53_.py
Normal file
32
src/app/alembic/versions/62d57916ec53_.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 62d57916ec53
|
||||||
|
Revises: f77b0b14f9eb
|
||||||
|
Create Date: 2022-12-30 23:30:50.867163
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "62d57916ec53"
|
||||||
|
down_revision = "f77b0b14f9eb"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.drop_column("cached_files", "data")
|
||||||
|
op.create_unique_constraint(
|
||||||
|
"uc_cached_files_message_id_chat_id", "cached_files", ["message_id", "chat_id"]
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_cached_files_message_id"), "cached_files", ["message_id"], unique=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.add_column("cached_files", sa.Column("data", sa.JSON(), nullable=False))
|
||||||
|
op.drop_constraint("uc_cached_files_message_id_chat_id", "cached_files")
|
||||||
|
op.drop_index("ix_cached_files_message_id", "cached_files")
|
||||||
@@ -11,13 +11,20 @@ class BaseMeta(ormar.ModelMeta):
|
|||||||
class CachedFile(ormar.Model):
|
class CachedFile(ormar.Model):
|
||||||
class Meta(BaseMeta):
|
class Meta(BaseMeta):
|
||||||
tablename = "cached_files"
|
tablename = "cached_files"
|
||||||
constraints = [ormar.UniqueColumns("object_id", "object_type")]
|
constraints = [
|
||||||
|
ormar.UniqueColumns("object_id", "object_type"),
|
||||||
|
ormar.UniqueColumns("message_id", "chat_id"),
|
||||||
|
]
|
||||||
|
|
||||||
id: int = ormar.Integer(primary_key=True) # type: ignore
|
id: int = ormar.Integer(primary_key=True) # type: ignore
|
||||||
object_id: int = ormar.Integer(index=True) # type: ignore
|
object_id: int = ormar.Integer(index=True) # type: ignore
|
||||||
object_type: str = ormar.String(max_length=8, index=True) # type: ignore
|
object_type: str = ormar.String(
|
||||||
|
max_length=8, index=True, unique=True
|
||||||
|
) # type: ignore
|
||||||
|
|
||||||
message_id: int = ormar.BigInteger() # type: ignore
|
message_id: int = ormar.BigInteger(index=True) # type: ignore
|
||||||
chat_id: int = ormar.BigInteger() # type: ignore
|
chat_id: int = ormar.BigInteger() # type: ignore
|
||||||
|
|
||||||
data: dict = ormar.JSON() # type: ignore
|
@property
|
||||||
|
def data(self) -> dict:
|
||||||
|
return {"message_id": self.message_id, "chat_id": self.chat_id}
|
||||||
|
|||||||
@@ -107,7 +107,10 @@ async def cache_file(book: Book, file_type: str) -> Optional[CachedFile]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
return await CachedFile.objects.create(
|
return await CachedFile.objects.create(
|
||||||
object_id=book.id, object_type=file_type, data=upload_data.data
|
object_id=book.id,
|
||||||
|
object_type=file_type,
|
||||||
|
message_id=upload_data.data["message_id"],
|
||||||
|
chat_id=upload_data.data["chat_id"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
class DummyWriter:
|
|
||||||
def write(self, line):
|
|
||||||
return line
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import base64
|
import base64
|
||||||
import csv
|
|
||||||
from typing import AsyncIterator
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
@@ -9,7 +7,6 @@ from fastapi.responses import StreamingResponse
|
|||||||
from starlette.background import BackgroundTask
|
from starlette.background import BackgroundTask
|
||||||
|
|
||||||
from arq.connections import ArqRedis
|
from arq.connections import ArqRedis
|
||||||
from asyncpg import exceptions
|
|
||||||
|
|
||||||
from app.depends import check_token
|
from app.depends import check_token
|
||||||
from app.models import CachedFile as CachedFileDB
|
from app.models import CachedFile as CachedFileDB
|
||||||
@@ -19,7 +16,6 @@ from app.services.caption_getter import get_caption
|
|||||||
from app.services.downloader import get_filename
|
from app.services.downloader import get_filename
|
||||||
from app.services.files_client import download_file as download_file_from_cache
|
from app.services.files_client import download_file as download_file_from_cache
|
||||||
from app.services.library_client import get_book
|
from app.services.library_client import get_book
|
||||||
from app.utils import DummyWriter
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(
|
||||||
@@ -112,18 +108,22 @@ async def delete_cached_file(object_id: int, object_type: str):
|
|||||||
|
|
||||||
@router.post("/", response_model=CachedFile)
|
@router.post("/", response_model=CachedFile)
|
||||||
async def create_or_update_cached_file(data: CreateCachedFile):
|
async def create_or_update_cached_file(data: CreateCachedFile):
|
||||||
try:
|
cached_file = await CachedFileDB.objects.get_or_none(
|
||||||
return await CachedFileDB.objects.create(**data.dict())
|
object_id=data.data["object_id"], object_type=data.data["object_type"]
|
||||||
except exceptions.UniqueViolationError:
|
)
|
||||||
data_dict = data.dict()
|
|
||||||
object_id = data_dict.pop("object_id")
|
if cached_file is not None:
|
||||||
object_type = data_dict.pop("object_type")
|
cached_file.message_id = data.data["message_id"]
|
||||||
cached_file = await CachedFileDB.objects.get(
|
cached_file.chat_id = data.data["chat_id"]
|
||||||
object_id=object_id, object_type=object_type
|
|
||||||
)
|
|
||||||
cached_file.update_from_dict(data_dict)
|
|
||||||
return await cached_file.update()
|
return await cached_file.update()
|
||||||
|
|
||||||
|
return await CachedFileDB.objects.create(
|
||||||
|
object_id=data.object_id,
|
||||||
|
object_type=data.object_type,
|
||||||
|
message_id=data.data["message_id"],
|
||||||
|
chat_id=data.data["chat_id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/update_cache")
|
@router.post("/update_cache")
|
||||||
async def update_cache(request: Request):
|
async def update_cache(request: Request):
|
||||||
@@ -133,22 +133,6 @@ async def update_cache(request: Request):
|
|||||||
return "Ok!"
|
return "Ok!"
|
||||||
|
|
||||||
|
|
||||||
@router.get("/download_dump")
|
|
||||||
async def download_dump():
|
|
||||||
async def get_data() -> AsyncIterator[str]:
|
|
||||||
writer = csv.writer(DummyWriter())
|
|
||||||
|
|
||||||
async for c_file in CachedFileDB.objects.iterate():
|
|
||||||
yield writer.writerow([c_file.object_id, c_file.object_type, c_file.data])
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
get_data(),
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": "attachment; filename=dump.csv",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
healthcheck_router = APIRouter(
|
healthcheck_router = APIRouter(
|
||||||
tags=["healthcheck"],
|
tags=["healthcheck"],
|
||||||
)
|
)
|
||||||
|
|||||||
Reference in New Issue
Block a user