Move to sqlx

This commit is contained in:
2024-12-24 19:43:38 +01:00
parent 83cd82b9ba
commit 1a6ef5537f
15 changed files with 671 additions and 6462 deletions

View File

@@ -1,2 +0,0 @@
[alias]
prisma = "run -p prisma-cli --"

2349
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,43 +4,40 @@ version = "0.1.0"
edition = "2021"
[workspace]
members = [
"prisma-cli"
]
[dependencies]
once_cell = "1.20.1"
once_cell = "1.20.2"
dotenvy = "0.15.0"
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.11", default-features = false, features = ["postgresql"] }
serde = { version = "1.0.210", features = ["derive"] }
serde_json = "1.0.128"
reqwest = { version = "0.12.8", features = ["json", "stream", "multipart"] }
tokio = { version = "1.42.0", features = ["full"] }
tokio-util = { version = "0.7.13", features = ["compat"] }
tokio = { version = "1.40.0", features = ["full"] }
tokio-util = { version = "0.7.12", features = ["compat"] }
axum = { version = "0.7.7", features = ["json"] }
axum = { version = "0.7.9", features = ["json"] }
axum-prometheus = "0.7.0"
chrono = "0.4.38"
sentry = { version = "0.34.0", features = ["debug-images"] }
serde = { version = "1.0.216", features = ["derive"] }
serde_json = "1.0.134"
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"]}
sentry-tracing = "0.34.0"
tower-http = { version = "0.6.1", features = ["trace"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
sentry-tracing = "0.35.0"
tower-http = { version = "0.6.2", features = ["trace"] }
reqwest = { version = "0.12.9", features = ["json", "stream", "multipart"] }
chrono = "0.4.39"
sentry = { version = "0.35.0", features = ["debug-images"] }
base64 = "0.22.1"
futures = "0.3.30"
futures-core = "0.3.30"
futures = "0.3.31"
futures-core = "0.3.31"
async-stream = "0.3.6"
tempfile = "3.13.0"
bytes = "1.7.2"
tempfile = "3.14.0"
bytes = "1.9.0"
teloxide = { version = "0.13.0", features = ["macros", "webhooks-axum", "cache-me", "throttle"] }
moka = { version = "0.12.8", features = ["future"] }
sqlx = { version = "0.8.2", features = ["runtime-tokio", "postgres", "macros"] }

View File

@@ -1,3 +0,0 @@
node_modules
# Keep environment variables out of version control
.env

3195
prisma-cli/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +0,0 @@
[package]
name = "prisma-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.10", default-features = false, features = ["postgresql"] }

View File

@@ -1,3 +0,0 @@
fn main() {
prisma_client_rust_cli::run();
}

BIN
src/.DS_Store vendored

Binary file not shown.

View File

@@ -1,6 +1,8 @@
use crate::{config::CONFIG, prisma::PrismaClient};
use crate::config::CONFIG;
pub async fn get_prisma_client() -> PrismaClient {
use sqlx::{postgres::PgPoolOptions, PgPool};
pub async fn get_pg_pool() -> PgPool {
let database_url: String = format!(
"postgresql://{}:{}@{}:{}/{}?connection_limit=10&pool_timeout=300",
CONFIG.postgres_user,
@@ -10,9 +12,9 @@ pub async fn get_prisma_client() -> PrismaClient {
CONFIG.postgres_db
);
PrismaClient::_builder()
.with_url(database_url)
.build()
PgPoolOptions::new()
.max_connections(5)
.connect(&database_url)
.await
.unwrap()
}

View File

@@ -1,7 +1,7 @@
pub mod config;
pub mod db;
pub mod prisma;
pub mod repository;
pub mod serializers;
pub mod services;
pub mod views;

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,4 @@
use prisma_client_rust::QueryError;
use crate::{prisma::cached_file, views::Database};
use crate::{serializers::CachedFile, views::Database};
pub struct CachedFileRepository {
db: Database,
@@ -15,11 +13,18 @@ impl CachedFileRepository {
&self,
object_id: i32,
object_type: String,
) -> Result<cached_file::Data, QueryError> {
self.db
.cached_file()
.delete(cached_file::object_id_object_type(object_id, object_type))
.exec()
.await
) -> Result<CachedFile, sqlx::Error> {
sqlx::query_as!(
CachedFile,
r#"
DELETE FROM cached_files
WHERE object_id = $1 AND object_type = $2
RETURNING *
"#,
object_id,
object_type
)
.fetch_one(&self.db)
.await
}
}

8
src/serializers.rs Normal file
View File

@@ -0,0 +1,8 @@
#[derive(sqlx::FromRow, serde::Serialize)]
pub struct CachedFile {
pub id: i32,
pub object_id: i32,
pub object_type: String,
pub message_id: i64,
pub chat_id: i64,
}

View File

@@ -14,7 +14,7 @@ use teloxide::{
};
use tracing::log;
use crate::{config, prisma::cached_file, repository::CachedFileRepository, views::Database};
use crate::{config, repository::CachedFileRepository, serializers::CachedFile, views::Database};
use self::{
book_library::{get_book, get_books, types::BaseBook},
@@ -55,16 +55,18 @@ pub async fn get_cached_file_or_cache(
object_id: i32,
object_type: String,
db: Database,
) -> Option<cached_file::Data> {
let cached_file = db
.cached_file()
.find_unique(cached_file::object_id_object_type(
object_id,
object_type.clone(),
))
.exec()
.await
.unwrap();
) -> Option<CachedFile> {
let cached_file = sqlx::query_as!(
CachedFile,
r#"
SELECT * FROM cached_files
WHERE object_id = $1 AND object_type = $2"#,
object_id,
object_type
)
.fetch_optional(&db)
.await
.unwrap();
match cached_file {
Some(cached_file) => Some(cached_file),
@@ -72,7 +74,7 @@ pub async fn get_cached_file_or_cache(
}
}
pub async fn get_cached_file_copy(original: cached_file::Data, db: Database) -> CacheData {
pub async fn get_cached_file_copy(original: CachedFile, db: Database) -> CacheData {
let bot = ROUND_ROBIN_BOT.get_bot();
let message_id = match bot
@@ -85,11 +87,16 @@ pub async fn get_cached_file_copy(original: cached_file::Data, db: Database) ->
{
Ok(v) => v,
Err(_) => {
let _ = db
.cached_file()
.delete(cached_file::id::equals(original.id))
.exec()
.await;
sqlx::query!(
r#"
DELETE FROM cached_files
WHERE id = $1
"#,
original.id
)
.execute(&db)
.await
.unwrap();
let new_original =
get_cached_file_or_cache(original.object_id, original.object_type.clone(), db)
@@ -117,11 +124,7 @@ pub async fn get_cached_file_copy(original: cached_file::Data, db: Database) ->
}
}
pub async fn cache_file(
object_id: i32,
object_type: String,
db: Database,
) -> Option<cached_file::Data> {
pub async fn cache_file(object_id: i32, object_type: String, db: Database) -> Option<CachedFile> {
let book = match get_book(object_id).await {
Ok(v) => v,
Err(err) => {
@@ -154,18 +157,23 @@ pub async fn cache_file(
};
Some(
db.cached_file()
.create(object_id, object_type, message_id, chat_id, vec![])
.exec()
.await
.unwrap(),
sqlx::query_as!(
CachedFile,
r#"INSERT INTO cached_files (object_id, object_type, message_id, chat_id)
VALUES ($1, $2, $3, $4)
RETURNING *"#,
object_id,
object_type,
message_id,
chat_id
)
.fetch_one(&db)
.await
.unwrap(),
)
}
pub async fn download_from_cache(
cached_data: cached_file::Data,
db: Database,
) -> Option<DownloadResult> {
pub async fn download_from_cache(cached_data: CachedFile, db: Database) -> Option<DownloadResult> {
let response_task = tokio::task::spawn(download_from_telegram_files(
cached_data.message_id,
cached_data.chat_id,
@@ -300,14 +308,14 @@ pub async fn start_update_cache(db: Database) {
for book in books {
'types: for available_type in book.available_types {
let cached_file = match db
.cached_file()
.find_unique(cached_file::object_id_object_type(
book.id,
available_type.clone(),
))
.exec()
.await
let cached_file = match sqlx::query_as!(
CachedFile,
r#"SELECT * FROM cached_files WHERE object_id = $1 AND object_type = $2"#,
book.id,
available_type.clone()
)
.fetch_optional(&db)
.await
{
Ok(v) => v,
Err(err) => {

View File

@@ -9,27 +9,26 @@ use axum::{
};
use axum_prometheus::PrometheusMetricLayer;
use base64::{engine::general_purpose, Engine};
use serde::Deserialize;
use std::sync::Arc;
use sqlx::PgPool;
use tokio_util::io::ReaderStream;
use tower_http::trace::{self, TraceLayer};
use tracing::Level;
use crate::{
config::CONFIG,
db::get_prisma_client,
prisma::{cached_file, PrismaClient},
db::get_pg_pool,
serializers::CachedFile,
services::{
download_from_cache, download_utils::get_response_async_read, get_cached_file_copy,
get_cached_file_or_cache, start_update_cache, CacheData,
},
};
pub type Database = Arc<PrismaClient>;
pub type Database = PgPool;
//
#[derive(Deserialize)]
#[derive(serde::Deserialize)]
pub struct GetCachedFileQuery {
pub copy: bool,
}
@@ -111,26 +110,20 @@ async fn delete_cached_file(
Path((object_id, object_type)): Path<(i32, String)>,
Extension(Ext { db, .. }): Extension<Ext>,
) -> impl IntoResponse {
let cached_file = db
.cached_file()
.find_unique(cached_file::object_id_object_type(
object_id,
object_type.clone(),
))
.exec()
.await
.unwrap();
let cached_file: Option<CachedFile> = sqlx::query_as!(
CachedFile,
r#"DELETE FROM cached_files
WHERE object_id = $1 AND object_type = $2
RETURNING *"#,
object_id,
object_type
)
.fetch_optional(&db)
.await
.unwrap();
match cached_file {
Some(v) => {
db.cached_file()
.delete(cached_file::object_id_object_type(object_id, object_type))
.exec()
.await
.unwrap();
Json(v).into_response()
}
Some(v) => Json::<CachedFile>(v).into_response(),
None => StatusCode::NO_CONTENT.into_response(),
}
}
@@ -164,11 +157,11 @@ async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, St
#[derive(Clone)]
struct Ext {
pub db: Arc<PrismaClient>,
pub db: PgPool,
}
pub async fn get_router() -> Router {
let db = Arc::new(get_prisma_client().await);
let db = get_pg_pool().await;
let ext = Ext { db };