1 Commits

Author SHA1 Message Date
95d5c8f511 Fix building 2023-08-11 22:23:58 +02:00
66 changed files with 25889 additions and 5245 deletions

2
.cargo/config.toml Normal file
View File

@@ -0,0 +1,2 @@
[alias]
prisma = "run -p prisma-cli --"

View File

@@ -11,20 +11,20 @@ jobs:
steps: steps:
- -
name: Checkout name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v3
- -
name: Set up Docker Buildx name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v2
- id: repository_name - id: repository_name
uses: ASzc/change-string-case-action@v6 uses: ASzc/change-string-case-action@v5
with: with:
string: ${{ github.repository }} string: ${{ github.repository }}
- -
name: Login to ghcr.io name: Login to ghcr.io
uses: docker/login-action@v3 uses: docker/login-action@v2
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -33,7 +33,7 @@ jobs:
- -
name: Build and push name: Build and push
id: docker_build id: docker_build
uses: docker/build-push-action@v6 uses: docker/build-push-action@v4
env: env:
IMAGE: ${{ steps.repository_name.outputs.lowercase }} IMAGE: ${{ steps.repository_name.outputs.lowercase }}
with: with:

1
.gitignore vendored
View File

@@ -2,4 +2,3 @@
.env .env
.vscode .vscode
.idea

View File

@@ -1,7 +0,0 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: fmt
- id: cargo-check
- id: clippy

View File

@@ -1,23 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) FROM book_sequences bs\n JOIN books b ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "078bb62c5139d159bc17d98480846591fe42a466b788e7c27e1a64a6549bfda3"
}

View File

@@ -1,40 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n aa.id,\n aa.title,\n aa.text,\n aa.file\n FROM author_annotations aa\n WHERE aa.author = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "text",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "file",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "1875c11e55b61fd58e916f7663e2649e0c09ae604e620274718dd465e7958f64"
}

View File

@@ -1,29 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
false,
null
]
},
"hash": "191a5f0ddc1e4631b594f07710157cfdb3e002821f1ebb112fe772b274d08b1c"
}

View File

@@ -1,66 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n genres.id,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta,\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = genres.source\n ) AS \"source!: Source\"\n FROM genres\n ORDER BY genres.id ASC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "code",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "meta",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
false,
false,
null
]
},
"hash": "1f78b5cbdae5f9732e3637fcfd1605477bafc12a443900276c46df644a7f6d26"
}

View File

@@ -1,175 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n WHERE b.id = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4Array"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null
]
},
"hash": "2d44679efadfba5a350a1612b9cf1ba241c4ddb38babf0cac5bdd8049e894ac8"
}

View File

@@ -1,29 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
false,
null
]
},
"hash": "4144af136af6f9a148a87030ce0f7c7625f4296bbee0ffdd24a81571e2afd54e"
}

View File

@@ -1,28 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, name FROM sequences WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false
]
},
"hash": "57f37e885a05ace86e5768a8ad7ac04f5d48784885db7d04d04e277d8c51970c"
}

View File

@@ -1,23 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*)\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "600cfd73a3a1c465c19d98dc4ba6381872d82f954b0733aa9518df2ee7701b6e"
}

View File

@@ -1,47 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n ORDER BY a.id ASC\n OFFSET $1\n LIMIT $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "first_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "last_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "middle_name!: String",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
null,
null
]
},
"hash": "6e1d93e7773059ec3cb4fd29259f4f0250868c3f56e7b017ba8e5c20ccffb57d"
}

View File

@@ -1,137 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n OFFSET $3\n LIMIT $4\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null
]
},
"hash": "71ddfa47ccbd71543a0ff402f9b077d7035ad35fb5e714f5d88357169b46b0fe"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) FROM genres\n WHERE (meta = $1 OR $1 IS NULL)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
null
]
},
"hash": "78e41ab1e7ca7b6acaf21aec5015a75c2962f6085a5774773fba8acb5e166e2e"
}

View File

@@ -1,276 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n genres.id,\n ROW(\n sources.id,\n sources.name\n )::source_type,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta\n )::genre_type\n )\n FROM book_genres\n JOIN genres ON genres.id = book_genres.genre\n JOIN sources ON sources.id = genres.source\n WHERE book_genres.book = b.id\n ),\n ARRAY[]::genre_type[]\n ) AS \"genres!: Vec<Genre>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id,\n b.is_deleted,\n b.pages\n FROM books b\n WHERE b.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "genres!: Vec<Genre>",
"type_info": {
"Custom": {
"name": "genre_type[]",
"kind": {
"Array": {
"Custom": {
"name": "genre_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"source",
{
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
],
[
"remote_id",
"Int4"
],
[
"code",
"Varchar"
],
[
"description",
"Varchar"
],
[
"meta",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 11,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 12,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
},
{
"ordinal": 13,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 14,
"name": "is_deleted",
"type_info": "Bool"
},
{
"ordinal": 15,
"name": "pages",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null,
null,
null,
false,
false,
true
]
},
"hash": "981703669c9152946a541f70a84ec5dbf481e7a28f3d5949fbc34588561104e5"
}

View File

@@ -1,35 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ORDER BY b.id ASC\n OFFSET $7\n LIMIT $8\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"TextArray",
"Bool",
"Date",
"Date",
"Int4",
"Int4",
"Int8",
"Int8"
]
},
"nullable": [
false,
null
]
},
"hash": "9bb82eaa3dcf8ead767d5f9ac9dbe8d70f8e68b12f6b004a9e495aa8ebc6d250"
}

View File

@@ -1,23 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*)\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "9be35f43d7faa0c65c88ced8ee10347ae67e6a906461fb4858fc003824f4b260"
}

View File

@@ -1,70 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n genres.id,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta,\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = genres.source\n ) AS \"source!: Source\"\n FROM genres\n WHERE (meta = $1 OR $1 IS NULL)\n ORDER BY genres.id ASC\n LIMIT $2 OFFSET $3\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "code",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "meta",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
],
"parameters": {
"Left": [
"Text",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null
]
},
"hash": "a22bfa2e92bf4a3b0710388c6c5bbfa50f24864b183bb304d35cea18babd8ce3"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) FROM authors",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "b4733c7414c62520fb74e3302f9c01bc351153930117c58832981990db038e74"
}

View File

@@ -1,29 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_sequences bs ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
false,
null
]
},
"hash": "b4c8511c5b3c157a64e4783ff6acd469abb21c5fda9ed9728e36b5b1d02d9aba"
}

View File

@@ -1,155 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n bs.position\n FROM books b\n JOIN book_sequences bs ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)\n ORDER BY bs.position\n LIMIT $3 OFFSET $4\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 10,
"name": "position",
"type_info": "Int2"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
false
]
},
"hash": "b6556c3bf60306517850e476d764c01e1e4538d6cf937096ad4a8e42a9657b9c"
}

View File

@@ -1,40 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n id,\n title,\n text,\n file\n FROM book_annotations\n WHERE book = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "text",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "file",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "b83f6df4dea9bad87d0423ad307da8c72e2c343181afa0f5bce3e1a43dee7c8c"
}

View File

@@ -1,210 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id\n FROM books b\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ORDER BY b.id ASC\n OFFSET $7\n LIMIT $8\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 11,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
},
{
"ordinal": 12,
"name": "remote_id",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"TextArray",
"Bool",
"Date",
"Date",
"Int4",
"Int4",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null,
null,
false
]
},
"hash": "bb036838069e57b6f88ec4dd3b53d6b44b1d9a4e01c5f80343e33e116e422bb5"
}

View File

@@ -1,27 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) FROM books\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"TextArray",
"Bool",
"Date",
"Date",
"Int4",
"Int4"
]
},
"nullable": [
null
]
},
"hash": "c0be89ba0ef10d97bb82401fed4196ffd2be48ce4e5586ba6da63c78793bb1db"
}

View File

@@ -1,28 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, name FROM sequences WHERE id = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4Array"
]
},
"nullable": [
false,
false
]
},
"hash": "d14c08d4d25201d30178c2313650db1aaef355968970f7f0a75b88bba209dc20"
}

View File

@@ -1,46 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n WHERE a.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "first_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "last_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "middle_name!: String",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
null,
null
]
},
"hash": "d6584aea52bc3abcbb4d9f491ef357845b562cf83d2e135b7542ebca2024a3f3"
}

View File

@@ -1,46 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n WHERE a.id = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "first_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "last_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "middle_name!: String",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4Array"
]
},
"nullable": [
false,
false,
false,
null,
null
]
},
"hash": "eaeeab7481036b78b9323b5d9e99e9a14a39e4f6c1489fe564045e937c38769c"
}

View File

@@ -1,277 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n genres.id,\n ROW(\n sources.id,\n sources.name\n )::source_type,\n remote_id,\n genres.code,\n genres.description,\n genres.meta\n )::genre_type\n )\n FROM book_genres\n JOIN genres ON genres.id = book_genres.genre\n JOIN sources ON sources.id = genres.source\n WHERE book_genres.book = b.id\n ),\n ARRAY[]::genre_type[]\n ) AS \"genres!: Vec<Genre>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id,\n b.is_deleted,\n b.pages\n FROM books b\n WHERE b.source = $1 AND b.remote_id = $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "genres!: Vec<Genre>",
"type_info": {
"Custom": {
"name": "genre_type[]",
"kind": {
"Array": {
"Custom": {
"name": "genre_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"source",
{
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
],
[
"remote_id",
"Int4"
],
[
"code",
"Varchar"
],
[
"description",
"Varchar"
],
[
"meta",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 11,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 12,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
},
{
"ordinal": 13,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 14,
"name": "is_deleted",
"type_info": "Bool"
},
{
"ordinal": 15,
"name": "pages",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Int2",
"Int4"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null,
null,
null,
false,
false,
true
]
},
"hash": "fb0d1b13928611d566514fd103df973ad1c81fd60efada560e89a2b40a6d3fc1"
}

View File

@@ -1,137 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ORDER BY b.title ASC\n OFFSET $3\n LIMIT $4\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null
]
},
"hash": "ff9694275aad3c0cbb3bddb87a45550615d1996328ffba98a6d01aaa2b17ec2b"
}

5131
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,29 +3,33 @@ name = "book_library_server"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
[workspace]
members = [
"prisma-cli"
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
once_cell = "1.21.1" once_cell = "1.18.0"
tokio = { version = "1.44.2", features = ["full"] } prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
tracing = "0.1.41" tokio = { version = "1.28.2", features = ["full"] }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
sentry-tracing = "0.36.0"
tower-http = { version = "0.6.2", features = ["trace"] }
axum = { version = "0.8.1", features = ["json"] } tracing = "0.1.37"
axum-extra = { version ="0.10.0", features = ["query"] } tracing-subscriber = { version = "0.3.17", features = ["env-filter"]}
axum-prometheus = "0.8.0" tower-http = { version = "0.4.3", features = ["trace"] }
serde = { version = "1.0.219", features = ["derive"] }
serde_json = { version = "1.0.140", features = ["raw_value"] }
sentry = { version = "0.36.0", features = ["debug-images"] } axum = { version = "0.6.18", features = ["json"] }
axum-extra = { version ="0.7.7", features = ["query"] }
axum-prometheus = "0.4.0"
serde = { version = "1.0.163", features = ["derive"] }
meilisearch-sdk = "0.28.0" sentry = { version = "0.31.3", features = ["debug-images"] }
rand = "0.9.0" meilisearch-sdk = "0.24.1"
chrono = { version = "0.4.40", features = ["serde"] } rand = "0.8.5"
sqlx = { version = "0.8.3", features = ["runtime-tokio", "postgres", "macros", "chrono", "json"] } chrono = "0.4.26"

View File

@@ -1,4 +1,4 @@
FROM rust:bookworm AS builder FROM rust:bullseye AS builder
WORKDIR /app WORKDIR /app
@@ -7,18 +7,15 @@ COPY . .
RUN cargo build --release --bin book_library_server RUN cargo build --release --bin book_library_server
FROM debian:bookworm-slim FROM debian:bullseye-slim
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y openssl ca-certificates curl jq \ && apt-get install -y openssl ca-certificates \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
RUN update-ca-certificates RUN update-ca-certificates
COPY ./scripts/*.sh /
RUN chmod +x /*.sh
WORKDIR /app WORKDIR /app
COPY --from=builder /app/target/release/book_library_server /usr/local/bin COPY --from=builder /app/target/release/book_library_server /usr/local/bin
CMD ["/start.sh"] ENTRYPOINT ["/usr/local/bin/book_library_server"]

3
prisma-cli/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
node_modules
# Keep environment variables out of version control
.env

4622
prisma-cli/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

9
prisma-cli/Cargo.toml Normal file
View File

@@ -0,0 +1,9 @@
[package]
name = "prisma-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }

3
prisma-cli/src/main.rs Normal file
View File

@@ -0,0 +1,3 @@
fn main() {
prisma_client_rust_cli::run();
}

165
prisma/schema.prisma Normal file
View File

@@ -0,0 +1,165 @@
generator client {
provider = "cargo prisma"
output = "../src/prisma.rs"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model AuthorAnnotation {
id Int @id @default(autoincrement())
author_id Int @unique @map("author")
title String @db.VarChar(256)
text String
file String? @db.VarChar(256)
author Author @relation(fields: [author_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_author_annotations_authors_id_author")
@@map("author_annotations")
}
model Author {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
first_name String @db.VarChar(256)
last_name String @db.VarChar(256)
middle_name String? @db.VarChar(256)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_authors_sources_id_source")
author_annotation AuthorAnnotation?
book_authors BookAuthor[]
translations Translator[]
@@unique([source_id, remote_id], map: "uc_authors_source_remote_id")
@@index([last_name(ops: raw("gin_trgm_ops"))], map: "tgrm_authors_l", type: Gin)
@@map("authors")
}
model BookAnnotation {
id Int @id @default(autoincrement())
book_id Int @unique @map("book")
title String @db.VarChar(256)
text String
file String? @db.VarChar(256)
book Book @relation(fields: [book_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_book_annotations_books_id_book")
@@map("book_annotations")
}
model BookAuthor {
id Int @id @default(autoincrement())
author_id Int @map("author")
book_id Int @map("book")
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_book_authors_authors_author_id")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_authors_books_book_id")
@@unique([book_id, author_id], map: "uc_book_authors_book_author")
@@index([author_id], map: "book_authors_author")
@@index([book_id], map: "book_authors_book")
@@map("book_authors")
}
model BookGenre {
id Int @id @default(autoincrement())
genre_id Int @map("genre")
book_id Int @map("book")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_genres_books_book_id")
genre Genre @relation(fields: [genre_id], references: [id], onDelete: Cascade, map: "fk_book_genres_genres_genre_id")
@@unique([book_id, genre_id], map: "uc_book_genres_book_genre")
@@index([book_id], map: "book_genres_book")
@@index([genre_id], map: "book_genres_genre")
@@map("book_genres")
}
model BookSequence {
id Int @id @default(autoincrement())
position Int @db.SmallInt
sequence_id Int @map("sequence")
book_id Int @map("book")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_books_book_id")
sequence Sequence @relation(fields: [sequence_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_sequences_sequence_id")
@@unique([book_id, sequence_id], map: "uc_book_sequences_book_sequence")
@@index([book_id], map: "book_sequences_book")
@@index([sequence_id], map: "book_sequences_sequence")
@@map("book_sequences")
}
model Book {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
title String @db.VarChar(256)
lang String @db.VarChar(3)
file_type String @db.VarChar(4)
uploaded DateTime @db.Date
is_deleted Boolean @default(false)
pages Int?
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_books_sources_id_source")
book_annotation BookAnnotation?
book_authors BookAuthor[]
book_genres BookGenre[]
book_sequences BookSequence[]
translations Translator[]
@@unique([source_id, remote_id], map: "uc_books_source_remote_id")
@@index([file_type], map: "ix_books_file_type")
@@index([title], map: "ix_books_title")
@@index([title(ops: raw("gin_trgm_ops"))], map: "trgm_books_title", type: Gin)
@@map("books")
}
model Genre {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
code String @db.VarChar(45)
description String @db.VarChar(99)
meta String @db.VarChar(45)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_genres_sources_id_source")
book_genres BookGenre[]
@@unique([source_id, remote_id], map: "uc_genres_source_remote_id")
@@map("genres")
}
model Sequence {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
name String @db.VarChar(256)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_sequences_sources_id_source")
book_sequences BookSequence[]
@@unique([source_id, remote_id], map: "uc_sequences_source_remote_id")
@@index([name], map: "ix_sequences_name")
@@index([name(ops: raw("gin_trgm_ops"))], map: "tgrm_sequences_name", type: Gin)
@@map("sequences")
}
model Source {
id Int @id @default(autoincrement()) @db.SmallInt
name String @unique @db.VarChar(32)
authors Author[]
books Book[]
genres Genre[]
sequences Sequence[]
@@map("sources")
}
model Translator {
id Int @id @default(autoincrement())
position Int @db.SmallInt
author_id Int @map("author")
book_id Int @map("book")
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_translations_authors_author_id")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_translations_books_book_id")
@@unique([book_id, author_id], map: "uc_translations_book_author")
@@index([author_id], map: "translations_author")
@@index([book_id], map: "translations_book")
@@map("translations")
}

View File

@@ -1,12 +0,0 @@
#! /usr/bin/env sh
response=`curl -X 'GET' "https://$VAULT_HOST/v1/$VAULT_SECRET_PATH" -s \
-H 'accept: application/json' \
-H "X-Vault-Token: $VAULT_TOKEN"`
data=`echo $response | jq -r '.data.data'`
for key in $(echo "$data" | jq -r 'keys[]'); do
value=$(echo "$data" | jq -r ".\"$key\"") # Corrected syntax
echo "$key"="$value"
done

View File

@@ -1,5 +0,0 @@
#! /usr/bin/env sh
export $(/env.sh)
exec /usr/local/bin/book_library_server

View File

@@ -16,7 +16,7 @@ pub struct Config {
pub meili_host: String, pub meili_host: String,
pub meili_master_key: String, pub meili_master_key: String,
pub sentry_dsn: String, pub sentry_dsn: String
} }
impl Config { impl Config {
@@ -33,9 +33,11 @@ impl Config {
meili_host: get_env("MEILI_HOST"), meili_host: get_env("MEILI_HOST"),
meili_master_key: get_env("MEILI_MASTER_KEY"), meili_master_key: get_env("MEILI_MASTER_KEY"),
sentry_dsn: get_env("SENTRY_DSN"), sentry_dsn: get_env("SENTRY_DSN")
} }
} }
} }
pub static CONFIG: Lazy<Config> = Lazy::new(Config::load); pub static CONFIG: Lazy<Config> = Lazy::new(|| {
Config::load()
});

View File

@@ -1,10 +1,9 @@
use crate::config::CONFIG; use crate::{prisma::PrismaClient, config::CONFIG};
use sqlx::{postgres::PgPoolOptions, PgPool};
pub async fn get_postgres_pool() -> PgPool { pub async fn get_prisma_client() -> PrismaClient {
let database_url: String = format!( let database_url: String = format!(
"postgresql://{}:{}@{}:{}/{}", "postgresql://{}:{}@{}:{}/{}?connection_limit=4",
CONFIG.postgres_user, CONFIG.postgres_user,
CONFIG.postgres_password, CONFIG.postgres_password,
CONFIG.postgres_host, CONFIG.postgres_host,
@@ -12,10 +11,9 @@ pub async fn get_postgres_pool() -> PgPool {
CONFIG.postgres_db CONFIG.postgres_db
); );
PgPoolOptions::new() PrismaClient::_builder()
.max_connections(10) .with_url(database_url)
.acquire_timeout(std::time::Duration::from_secs(300)) .build()
.connect(&database_url)
.await .await
.unwrap() .unwrap()
} }

View File

@@ -1,17 +1,17 @@
pub mod config; pub mod config;
pub mod db;
pub mod meilisearch;
pub mod serializers;
pub mod views; pub mod views;
pub mod prisma;
pub mod db;
pub mod serializers;
pub mod meilisearch;
use sentry::{integrations::debug_images::DebugImagesIntegration, types::Dsn, ClientOptions};
use sentry_tracing::EventFilter;
use std::{net::SocketAddr, str::FromStr}; use std::{net::SocketAddr, str::FromStr};
use sentry::{ClientOptions, types::Dsn, integrations::debug_images::DebugImagesIntegration};
use tracing::info; use tracing::info;
use tracing_subscriber::{filter, layer::SubscriberExt, util::SubscriberInitExt};
use crate::views::get_router; use crate::views::get_router;
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let options = ClientOptions { let options = ClientOptions {
@@ -23,15 +23,9 @@ async fn main() {
let _guard = sentry::init(options); let _guard = sentry::init(options);
let sentry_layer = sentry_tracing::layer().event_filter(|md| match md.level() { tracing_subscriber::fmt()
&tracing::Level::ERROR => EventFilter::Event, .with_target(false)
_ => EventFilter::Ignore, .compact()
});
tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer().with_target(false))
.with(filter::LevelFilter::INFO)
.with(sentry_layer)
.init(); .init();
let addr = SocketAddr::from(([0, 0, 0, 0], 8080)); let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
@@ -39,7 +33,9 @@ async fn main() {
let app = get_router().await; let app = get_router().await;
info!("Start webserver..."); info!("Start webserver...");
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap(); axum::Server::bind(&addr)
axum::serve(listener, app).await.unwrap(); .serve(app.into_make_service())
.await
.unwrap();
info!("Webserver shutdown...") info!("Webserver shutdown...")
} }

View File

@@ -1,14 +1,14 @@
use meilisearch_sdk::client::Client; use meilisearch_sdk::Client;
use serde::Deserialize; use serde::Deserialize;
use crate::config::CONFIG; use crate::config::CONFIG;
pub fn get_meili_client() -> Client {
Client::new(&CONFIG.meili_host, Some(CONFIG.meili_master_key.clone())).unwrap()
}
pub trait GetId { pub fn get_meili_client() -> Client {
fn get_id(&self) -> i32; Client::new(
&CONFIG.meili_host,
Some(CONFIG.meili_master_key.clone())
)
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@@ -19,13 +19,7 @@ pub struct AuthorMeili {
pub middle_name: String, pub middle_name: String,
pub author_langs: Vec<String>, pub author_langs: Vec<String>,
pub translator_langs: Vec<String>, pub translator_langs: Vec<String>,
pub books_count: i32, pub books_count: i32
}
impl GetId for AuthorMeili {
fn get_id(&self) -> i32 {
self.id
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@@ -33,13 +27,7 @@ pub struct BookMeili {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub genres: Vec<i32>, pub genres: Vec<i32>
}
impl GetId for BookMeili {
fn get_id(&self) -> i32 {
self.id
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@@ -48,13 +36,7 @@ pub struct GenreMeili {
pub description: String, pub description: String,
pub meta: String, pub meta: String,
pub langs: Vec<String>, pub langs: Vec<String>,
pub books_count: i32, pub books_count: i32
}
impl GetId for GenreMeili {
fn get_id(&self) -> i32 {
self.id
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@@ -62,11 +44,5 @@ pub struct SequenceMeili {
pub id: i32, pub id: i32,
pub name: String, pub name: String,
pub langs: Vec<String>, pub langs: Vec<String>,
pub books_count: i32, pub books_count: i32
}
impl GetId for SequenceMeili {
fn get_id(&self) -> i32 {
self.id
}
} }

16099
src/prisma.rs Normal file

File diff suppressed because one or more lines are too long

View File

@@ -1,11 +1,6 @@
use serde::Deserialize; use serde::Deserialize;
fn default_langs() -> Vec<String> {
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct AllowedLangs { pub struct AllowedLangs {
#[serde(default = "default_langs")] pub allowed_langs: Vec<String>
pub allowed_langs: Vec<String>,
} }

View File

@@ -1,11 +1,10 @@
use chrono::NaiveDate;
use serde::Serialize; use serde::Serialize;
use super::date::naive_date_serializer; use crate::prisma::{author, book};
use super::sequence::Sequence;
#[derive(sqlx::FromRow, sqlx::Type, Serialize)] use super::{sequence::Sequence, utils::{get_available_types, get_translators, get_sequences}};
#[sqlx(type_name = "author_type")]
#[derive(Serialize)]
pub struct Author { pub struct Author {
pub id: i32, pub id: i32,
pub first_name: String, pub first_name: String,
@@ -14,17 +13,66 @@ pub struct Author {
pub annotation_exists: bool, pub annotation_exists: bool,
} }
#[derive(sqlx::FromRow, Serialize)] impl From<author::Data> for Author {
fn from(val: author::Data) -> Self {
let author::Data {
id,
first_name,
last_name,
middle_name,
author_annotation,
..
} = val;
Author {
id,
first_name,
last_name,
middle_name: middle_name.unwrap_or("".to_string()),
annotation_exists: author_annotation.unwrap().is_some(),
}
}
}
#[derive(Serialize)]
pub struct AuthorBook { pub struct AuthorBook {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub file_type: String, pub file_type: String,
pub year: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
#[serde(serialize_with = "naive_date_serializer::serialize")] pub uploaded: String,
pub uploaded: NaiveDate,
pub translators: Vec<Author>, pub translators: Vec<Author>,
pub sequences: Vec<Sequence>, pub sequences: Vec<Sequence>,
pub annotation_exists: bool, pub annotation_exists: bool,
} }
impl From<book::Data> for AuthorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
translations,
book_sequences,
book_annotation,
source,
..
} = val;
AuthorBook {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,9 +1,24 @@
use serde::Serialize; use serde::Serialize;
#[derive(sqlx::FromRow, Serialize)] use crate::prisma::author_annotation;
#[derive(Serialize)]
pub struct AuthorAnnotation { pub struct AuthorAnnotation {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub text: String, pub text: String,
pub file: Option<String>, pub file: Option<String>
}
impl From<author_annotation::Data> for AuthorAnnotation {
fn from(val: author_annotation::Data) -> Self {
let author_annotation::Data { id, title, text, file, .. } = val;
AuthorAnnotation {
id,
title,
text,
file
}
}
} }

View File

@@ -1,35 +1,78 @@
use chrono::NaiveDate; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Serialize, Deserialize};
use super::date::naive_date_serializer; use crate::prisma::book::{self};
use super::{author::Author, genre::Genre, sequence::Sequence, source::Source}; use super::{source::Source, utils::{get_available_types, get_translators, get_sequences, get_authors, get_genres}, author::Author, sequence::Sequence, genre::Genre};
fn default_langs() -> Vec<String> {
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct BookFilter { pub struct BookFilter {
#[serde(default = "default_langs")]
pub allowed_langs: Vec<String>, pub allowed_langs: Vec<String>,
pub is_deleted: Option<bool>, pub is_deleted: Option<bool>,
pub uploaded_gte: Option<NaiveDate>, pub uploaded_gte: Option<DateTime<Utc>>,
pub uploaded_lte: Option<NaiveDate>, pub uploaded_lte: Option<DateTime<Utc>>,
pub id_gte: Option<i32>, pub id_gte: Option<i32>,
pub id_lte: Option<i32>, pub id_lte: Option<i32>,
} }
impl BookFilter {
pub fn get_filter_vec(self) -> Vec<book::WhereParam> {
let mut result = vec![];
result.push(
book::lang::in_vec(self.allowed_langs)
);
match self.is_deleted {
Some(v) => {
result.push(
book::is_deleted::equals(v)
);
},
None => {
result.push(
book::is_deleted::equals(false)
);
},
};
if let Some(uploaded_gte) = self.uploaded_gte {
result.push(
book::uploaded::gte(uploaded_gte.into())
);
};
if let Some(uploaded_lte) = self.uploaded_lte {
result.push(
book::uploaded::lte(uploaded_lte.into())
);
};
if let Some(id_gte) = self.id_gte {
result.push(
book::id::gte(id_gte)
);
};
if let Some(id_lte) = self.id_lte {
result.push(
book::id::lte(id_lte)
);
};
result
}
}
#[derive(Serialize)] #[derive(Serialize)]
pub struct RemoteBook { pub struct RemoteBook {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub file_type: String, pub file_type: String,
pub year: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
#[serde(serialize_with = "naive_date_serializer::serialize")] pub uploaded: String,
pub uploaded: NaiveDate,
pub authors: Vec<Author>, pub authors: Vec<Author>,
pub translators: Vec<Author>, pub translators: Vec<Author>,
pub sequences: Vec<Sequence>, pub sequences: Vec<Sequence>,
@@ -38,22 +81,70 @@ pub struct RemoteBook {
pub remote_id: i32, pub remote_id: i32,
} }
impl From<book::Data> for RemoteBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id
}
}
}
#[derive(Serialize)] #[derive(Serialize)]
pub struct BaseBook { pub struct BaseBook {
pub id: i32, pub id: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
} }
impl From<book::Data> for BaseBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
file_type,
source,
..
} = value;
Self {
id,
available_types: get_available_types(file_type, source.clone().unwrap().name),
}
}
}
#[derive(Serialize)] #[derive(Serialize)]
pub struct DetailBook { pub struct DetailBook {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub file_type: String, pub file_type: String,
pub year: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
#[serde(serialize_with = "naive_date_serializer::serialize")] pub uploaded: String,
pub uploaded: NaiveDate,
pub authors: Vec<Author>, pub authors: Vec<Author>,
pub translators: Vec<Author>, pub translators: Vec<Author>,
pub sequences: Vec<Sequence>, pub sequences: Vec<Sequence>,
@@ -62,13 +153,53 @@ pub struct DetailBook {
pub remote_id: i32, pub remote_id: i32,
pub genres: Vec<Genre>, pub genres: Vec<Genre>,
pub is_deleted: bool, pub is_deleted: bool,
pub pages: Option<i32>, pub pages: Option<i32>
}
impl From<book::Data> for DetailBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
book_genres,
is_deleted,
pages,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id,
genres: get_genres(book_genres),
is_deleted,
pages,
}
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct RandomBookFilter { pub struct RandomBookFilter {
pub allowed_langs: Vec<String>, pub allowed_langs: Vec<String>,
pub genre: Option<i32>, pub genre: Option<i32>
} }
#[derive(Serialize)] #[derive(Serialize)]
@@ -77,12 +208,41 @@ pub struct Book {
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub file_type: String, pub file_type: String,
pub year: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
#[serde(serialize_with = "naive_date_serializer::serialize")] pub uploaded: String,
pub uploaded: NaiveDate,
pub authors: Vec<Author>, pub authors: Vec<Author>,
pub translators: Vec<Author>, pub translators: Vec<Author>,
pub sequences: Vec<Sequence>, pub sequences: Vec<Sequence>,
pub annotation_exists: bool, pub annotation_exists: bool,
} }
impl From<book::Data> for Book {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,9 +1,31 @@
use serde::Serialize; use serde::Serialize;
#[derive(sqlx::FromRow, Serialize)] use crate::prisma::book_annotation;
#[derive(Serialize)]
pub struct BookAnnotation { pub struct BookAnnotation {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub text: String, pub text: String,
pub file: Option<String>, pub file: Option<String>
}
impl From<book_annotation::Data> for BookAnnotation {
fn from(value: book_annotation::Data) -> Self {
let book_annotation::Data {
id,
title,
text,
file,
..
} = value;
Self {
id,
title,
text,
file
}
}
} }

View File

@@ -1,16 +0,0 @@
use chrono::NaiveDate;
use serde::Serializer;
const FORMAT: &str = "%Y-%m-%d";
pub mod naive_date_serializer {
use super::*;
pub fn serialize<S>(date: &NaiveDate, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let formatted_date = date.format(FORMAT).to_string();
serializer.serialize_str(&formatted_date)
}
}

View File

@@ -1,18 +1,43 @@
use serde::{Deserialize, Serialize}; use serde::{Serialize, Deserialize};
use crate::prisma::genre;
use super::source::Source; use super::source::Source;
#[derive(sqlx::FromRow, sqlx::Type, Serialize)] #[derive(Serialize)]
#[sqlx(type_name = "genre_type")]
pub struct Genre { pub struct Genre {
pub id: i32, pub id: i32,
pub source: Source, pub source: Source,
pub remote_id: i32, pub remote_id: i32,
pub code: String, pub code: String,
pub description: String, pub description: String,
pub meta: String, pub meta: String
} }
impl From<genre::Data> for Genre {
fn from(val: genre::Data) -> Self {
let genre::Data {
id,
remote_id,
code,
description,
meta,
source,
..
} = val;
Genre {
id,
remote_id,
code,
description,
meta,
source: source.unwrap().as_ref().clone().into()
}
}
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct GenreFilter { pub struct GenreFilter {
pub meta: Option<String>, pub meta: Option<String>,

View File

@@ -1,12 +1,11 @@
pub mod allowed_langs; pub mod pagination;
pub mod author; pub mod author;
pub mod author_annotation; pub mod author_annotation;
pub mod book;
pub mod book_annotation;
pub mod date;
pub mod genre; pub mod genre;
pub mod pagination;
pub mod sequence;
pub mod source; pub mod source;
pub mod translator; pub mod book;
pub mod sequence;
pub mod utils; pub mod utils;
pub mod translator;
pub mod allowed_langs;
pub mod book_annotation;

View File

@@ -1,5 +1,6 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
fn default_page() -> i64 { fn default_page() -> i64 {
1 1
} }
@@ -13,16 +14,17 @@ pub struct Pagination {
#[serde(default = "default_page")] #[serde(default = "default_page")]
pub page: i64, pub page: i64,
#[serde(default = "default_size")] #[serde(default = "default_size")]
pub size: i64, pub size: i64
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct Page<T> { pub struct Page<T> {
pub items: Vec<T>, pub items: Vec<T>,
pub total: i64, pub total: i64,
pub page: i64, pub page: i64,
pub size: i64, pub size: i64,
pub pages: i64, pub pages: i64
} }
#[derive(Serialize)] #[derive(Serialize)]
@@ -32,7 +34,7 @@ pub struct PageWithParent<T, P> {
pub page: i64, pub page: i64,
pub size: i64, pub size: i64,
pub pages: i64, pub pages: i64,
pub parent_item: P, pub parent_item: P
} }
impl<T> Page<T> { impl<T> Page<T> {
@@ -42,7 +44,7 @@ impl<T> Page<T> {
total, total,
page: pagination.page, page: pagination.page,
size: pagination.size, size: pagination.size,
pages: (total + pagination.size - 1) / pagination.size, pages: (total + pagination.size - 1) / pagination.size
} }
} }
} }
@@ -55,7 +57,7 @@ impl<T, P> PageWithParent<T, P> {
page: pagination.page, page: pagination.page,
size: pagination.size, size: pagination.size,
pages: (total + pagination.size - 1) / pagination.size, pages: (total + pagination.size - 1) / pagination.size,
parent_item, parent_item
} }
} }
} }

View File

@@ -1,28 +1,62 @@
use chrono::NaiveDate;
use serde::Serialize; use serde::Serialize;
use super::author::Author; use crate::prisma::{sequence, book};
use super::date::naive_date_serializer;
#[derive(sqlx::FromRow, sqlx::Type, Serialize)] use super::{author::Author, utils::{get_available_types, get_authors, get_translators}};
#[sqlx(type_name = "sequence_type")]
#[derive(Serialize)]
pub struct Sequence { pub struct Sequence {
pub id: i32, pub id: i32,
pub name: String, pub name: String,
} }
#[derive(sqlx::FromRow, Serialize)] impl From<sequence::Data> for Sequence {
fn from(val: sequence::Data) -> Self {
let sequence::Data { id, name, .. } = val;
Sequence { id, name }
}
}
#[derive(Serialize)]
pub struct SequenceBook { pub struct SequenceBook {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub file_type: String, pub file_type: String,
pub year: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
#[serde(serialize_with = "naive_date_serializer::serialize")] pub uploaded: String,
pub uploaded: NaiveDate,
pub authors: Vec<Author>, pub authors: Vec<Author>,
pub translators: Vec<Author>, pub translators: Vec<Author>,
pub annotation_exists: bool, pub annotation_exists: bool,
pub position: i32, }
impl From<book::Data> for SequenceBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
} }

View File

@@ -1,8 +1,25 @@
use serde::Serialize; use serde::Serialize;
#[derive(sqlx::FromRow, sqlx::Type, Serialize)] use crate::prisma::source;
#[sqlx(type_name = "source_type")]
#[derive(Serialize)]
pub struct Source { pub struct Source {
pub id: i32, pub id: i32,
pub name: String, pub name: String
}
impl From<source::Data> for Source
{
fn from(val: source::Data) -> Self {
let source::Data {
id,
name,
..
} = val;
Source {
id,
name
}
}
} }

View File

@@ -1,21 +1,47 @@
use chrono::NaiveDate;
use serde::Serialize; use serde::Serialize;
use super::date::naive_date_serializer; use crate::prisma::book;
use super::{author::Author, sequence::Sequence}; use super::{author::Author, sequence::Sequence, utils::{get_available_types, get_authors, get_sequences}};
#[derive(sqlx::FromRow, Serialize)] #[derive(Serialize)]
pub struct TranslatorBook { pub struct TranslatorBook {
pub id: i32, pub id: i32,
pub title: String, pub title: String,
pub lang: String, pub lang: String,
pub file_type: String, pub file_type: String,
pub year: i32,
pub available_types: Vec<String>, pub available_types: Vec<String>,
#[serde(serialize_with = "naive_date_serializer::serialize")] pub uploaded: String,
pub uploaded: NaiveDate,
pub authors: Vec<Author>, pub authors: Vec<Author>,
pub sequences: Vec<Sequence>, pub sequences: Vec<Sequence>,
pub annotation_exists: bool, pub annotation_exists: bool,
} }
impl From<book::Data> for TranslatorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
book_sequences,
book_annotation,
source,
..
} = val;
TranslatorBook {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type.clone(), source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,3 +1,7 @@
use crate::prisma::{translator, book_sequence, book_author, book_genre};
use super::{author::Author, sequence::Sequence, genre::Genre};
pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> { pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> {
if file_type == "fb2" && source_name == "flibusta" { if file_type == "fb2" && source_name == "flibusta" {
vec![ vec![
@@ -10,3 +14,43 @@ pub fn get_available_types(file_type: String, source_name: String) -> Vec<String
vec![file_type] vec![file_type]
} }
} }
pub fn get_authors(
book_authors: Option<Vec<book_author::Data>>
) -> Vec<Author> {
book_authors
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_translators(
translations: Option<Vec<translator::Data>>
) -> Vec<Author> {
translations
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_sequences(
book_sequences: Option<Vec<book_sequence::Data>>
) -> Vec<Sequence> {
book_sequences
.unwrap()
.iter()
.map(|item| item.sequence.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_genres(
book_genres: Option<Vec<book_genre::Data>>
) -> Vec<Genre> {
book_genres
.unwrap()
.iter()
.map(|item| item.genre.clone().unwrap().as_ref().clone().into())
.collect()
}

View File

@@ -1,185 +1,159 @@
use std::collections::HashSet; use std::collections::HashSet;
use axum::{ use axum::{Router, extract::{Query, Path}, Json, response::IntoResponse, routing::get, http::StatusCode};
extract::{Path, Query},
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use crate::{ use rand::Rng;
meilisearch::{get_meili_client, AuthorMeili},
serializers::{
allowed_langs::AllowedLangs,
author::{Author, AuthorBook},
author_annotation::AuthorAnnotation,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::Sequence,
},
};
use super::{common::get_random_item::get_random_item, Database}; use crate::{prisma::{author, author_annotation::{self}, book, book_author, translator, book_sequence}, serializers::{pagination::{Pagination, Page, PageWithParent}, author::{Author, AuthorBook}, author_annotation::AuthorAnnotation, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}};
async fn get_authors(db: Database, pagination: Query<Pagination>) -> impl IntoResponse { use super::Database;
let authors_count = sqlx::query_scalar!("SELECT COUNT(*) FROM authors",)
.fetch_one(&db.0)
async fn get_authors(
db: Database,
pagination: Query<Pagination>
) -> impl IntoResponse {
let authors_count = db
.author()
.count(vec![])
.exec()
.await .await
.unwrap()
.unwrap(); .unwrap();
let authors = sqlx::query_as!( let authors = db
Author, .author()
r#" .find_many(vec![])
SELECT .with(
a.id, author::author_annotation::fetch()
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
ORDER BY a.id ASC
OFFSET $1
LIMIT $2
"#,
(pagination.page - 1) * pagination.size,
pagination.size
) )
.fetch_all(&db.0) .order_by(author::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await .await
.unwrap(); .unwrap();
let page: Page<Author> = Page::new(authors, authors_count, &pagination); let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
authors_count,
&pagination
);
Json(page) Json(page)
} }
async fn get_random_author( async fn get_random_author(
db: Database, db: Database,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
AllowedLangs,
>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let author_id = {
let client = get_meili_client(); let client = get_meili_client();
let authors_index = client.index("authors"); let authors_index = client.index("authors");
let filter = format!("author_langs IN [{}]", allowed_langs.join(", ")); let filter = format!(
"author_langs IN [{}]",
allowed_langs.join(", ")
);
get_random_item::<AuthorMeili>(authors_index, filter).await let result = authors_index
}; .search()
.with_filter(&filter)
let author = sqlx::query_as!( .execute::<AuthorMeili>()
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_one(&db.0)
.await .await
.unwrap(); .unwrap();
Json::<Author>(author) let author_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<AuthorMeili>()
.await
.unwrap();
let author = &result.hits.get(0).unwrap().result;
author.id
};
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap()
.unwrap();
Json::<Author>(author.into())
} }
async fn get_author(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
let author = sqlx::query_as!( async fn get_author(
Author, db: Database,
r#" Path(author_id): Path<i32>
SELECT ) -> impl IntoResponse {
a.id, let author = db
a.first_name, .author()
a.last_name, .find_unique(
COALESCE(a.middle_name, '') AS "middle_name!: String", author::id::equals(author_id)
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
) )
.fetch_optional(&db.0) .with(
author::author_annotation::fetch()
)
.exec()
.await .await
.unwrap(); .unwrap();
match author { match author {
Some(author) => Json::<Author>(author).into_response(), Some(author) => Json::<Author>(author.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(), None => StatusCode::NOT_FOUND.into_response(),
} }
} }
async fn get_author_annotation(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
let author_annotation = sqlx::query_as!( async fn get_author_annotation(
AuthorAnnotation, db: Database,
r#" Path(author_id): Path<i32>,
SELECT ) -> impl IntoResponse {
aa.id, let author_annotation = db
aa.title, .author_annotation()
aa.text, .find_unique(
aa.file author_annotation::author_id::equals(author_id)
FROM author_annotations aa
WHERE aa.author = $1
"#,
author_id
) )
.fetch_optional(&db.0) .exec()
.await .await
.unwrap(); .unwrap();
match author_annotation { match author_annotation {
Some(annotation) => Json::<AuthorAnnotation>(annotation).into_response(), Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(), None => StatusCode::NOT_FOUND.into_response(),
} }
} }
async fn get_author_books( async fn get_author_books(
db: Database, db: Database,
Path(author_id): Path<i32>, Path(author_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
AllowedLangs, pagination: Query<Pagination>
>,
pagination: Query<Pagination>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let author = sqlx::query_as!( let author = db
Author, .author()
r#" .find_unique(
SELECT author::id::equals(author_id)
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
) )
.fetch_optional(&db.0) .with(
author::author_annotation::fetch()
)
.exec()
.await .await
.unwrap(); .unwrap();
@@ -188,152 +162,118 @@ async fn get_author_books(
None => return StatusCode::NOT_FOUND.into_response(), None => return StatusCode::NOT_FOUND.into_response(),
}; };
let books_count = sqlx::query_scalar!( let books_count = db
r#" .book()
SELECT COUNT(*) .count(vec![
FROM books b book::book_authors::some(vec![
JOIN book_authors ba ON b.id = ba.book book_author::author_id::equals(author_id)
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2) ]),
"#, book::lang::in_vec(allowed_langs.clone())
author_id, ])
&allowed_langs .exec()
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = sqlx::query_as!(
AuthorBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
b.uploaded,
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
),
ARRAY[]::author_type[]
) AS "translators!: Vec<Author>",
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
sequences.id,
sequences.name
)::sequence_type
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
),
ARRAY[]::sequence_type[]
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
ORDER BY b.title ASC
OFFSET $3
LIMIT $4
"#,
author_id,
&allowed_langs,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
let page: PageWithParent<AuthorBook, Author> = let books = db
PageWithParent::new(author, books, books_count, &pagination); .book()
.find_many(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs)
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
author.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page).into_response() Json(page).into_response()
} }
async fn get_author_books_available_types( async fn get_author_books_available_types(
db: Database, db: Database,
Path(author_id): Path<i32>, Path(author_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
AllowedLangs,
>,
) -> impl IntoResponse { ) -> impl IntoResponse {
// TODO: refactor let books = db
.book()
let books = sqlx::query_as!( .find_many(vec![
BaseBook, book::book_authors::some(vec![
r#" book_author::author_id::equals(author_id)
SELECT ]),
b.id, book::lang::in_vec(allowed_langs)
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>" ])
FROM books b .exec()
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
"#,
author_id,
&allowed_langs
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
let mut file_types: HashSet<String> = HashSet::new(); let mut file_types: HashSet<String> = HashSet::new();
for book in books { for book in books {
for file_type in book.available_types { file_types.insert(book.file_type.clone());
file_types.insert(file_type);
} }
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
} }
Json::<Vec<String>>(file_types.into_iter().collect()) Json::<Vec<String>>(file_types.into_iter().collect())
} }
async fn search_authors( async fn search_authors(
db: Database, db: Database,
Path(query): Path<String>, Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
AllowedLangs, pagination: Query<Pagination>
>,
pagination: Query<Pagination>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let client = get_meili_client(); let client = get_meili_client();
let authors_index = client.index("authors"); let authors_index = client.index("authors");
let filter = format!("author_langs IN [{}]", allowed_langs.join(", ")); let filter = format!(
"author_langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index let result = authors_index
.search() .search()
.with_query(&query) .with_query(&query)
.with_filter(&filter) .with_filter(&filter)
.with_offset( .with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
((pagination.page - 1) * pagination.size)
.try_into()
.unwrap(),
)
.with_limit(pagination.size.try_into().unwrap()) .with_limit(pagination.size.try_into().unwrap())
.execute::<AuthorMeili>() .execute::<AuthorMeili>()
.await .await
@@ -342,25 +282,16 @@ async fn search_authors(
let total = result.estimated_total_hits.unwrap(); let total = result.estimated_total_hits.unwrap();
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect(); let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut authors = sqlx::query_as!( let mut authors = db
Author, .author()
r#" .find_many(vec![
SELECT author::id::in_vec(author_ids.clone())
a.id, ])
a.first_name, .with(
a.last_name, author::author_annotation::fetch()
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = ANY($1)
"#,
&author_ids
) )
.fetch_all(&db.0) .order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
.await .await
.unwrap(); .unwrap();
@@ -371,21 +302,23 @@ async fn search_authors(
a_pos.cmp(&b_pos) a_pos.cmp(&b_pos)
}); });
let page: Page<Author> = Page::new(authors, total.try_into().unwrap(), &pagination); let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page) Json(page)
} }
pub async fn get_authors_router() -> Router { pub async fn get_authors_router() -> Router {
Router::new() Router::new()
.route("/", get(get_authors)) .route("/", get(get_authors))
.route("/random", get(get_random_author)) .route("/random", get(get_random_author))
.route("/{author_id}", get(get_author)) .route("/:author_id", get(get_author))
.route("/{author_id}/annotation", get(get_author_annotation)) .route("/:author_id/annotation", get(get_author_annotation))
.route("/{author_id}/books", get(get_author_books)) .route("/:author_id/books", get(get_author_books))
.route( .route("/:author_id/available_types", get(get_author_books_available_types))
"/{author_id}/available_types", .route("/search/:query", get(search_authors))
get(get_author_books_available_types),
)
.route("/search/{query}", get(search_authors))
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,31 +0,0 @@
use meilisearch_sdk::indexes::Index;
use rand::Rng;
use serde::de::DeserializeOwned;
use crate::meilisearch::GetId;
pub async fn get_random_item<'a, T>(index: Index, filter: String) -> i32
where
T: DeserializeOwned + GetId + 'static + Send + Sync,
{
let result = index
.search()
.with_filter(&filter)
.execute::<T>()
.await
.unwrap();
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap());
let result = index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<T>()
.await
.unwrap();
let item = &result.hits.first().unwrap().result;
item.get_id()
}

View File

@@ -1 +0,0 @@
pub mod get_random_item;

View File

@@ -1,93 +1,65 @@
use std::collections::HashSet; use std::collections::HashSet;
use axum::{extract::Query, response::IntoResponse, routing::get, Json, Router}; use axum::{Router, routing::get, extract::Query, Json, response::IntoResponse};
use prisma_client_rust::Direction;
use crate::serializers::{ use crate::{serializers::{pagination::{Pagination, Page}, genre::{Genre, GenreFilter}}, prisma::genre};
genre::{Genre, GenreFilter},
pagination::{Page, Pagination},
};
use crate::serializers::source::Source;
use super::Database; use super::Database;
pub async fn get_genres( pub async fn get_genres(
db: Database, db: Database,
pagination: Query<Pagination>, pagination: Query<Pagination>,
Query(GenreFilter { meta }): Query<GenreFilter>, Query(GenreFilter { meta }): Query<GenreFilter>
) -> impl IntoResponse { ) -> impl IntoResponse {
let genres_count = sqlx::query_scalar!( let filter = {
r#" match meta {
SELECT COUNT(*) FROM genres Some(meta) => vec![
WHERE (meta = $1 OR $1 IS NULL) genre::meta::equals(meta)
"#, ],
meta None => vec![],
) }
.fetch_one(&db.0) };
.await
.unwrap()
.unwrap();
let genres = sqlx::query_as!( let genres_count = db
Genre, .genre()
r#" .count(filter.clone())
SELECT .exec()
genres.id,
genres.remote_id,
genres.code,
genres.description,
genres.meta,
(
SELECT
ROW(
sources.id,
sources.name
)::source_type
FROM sources
WHERE sources.id = genres.source
) AS "source!: Source"
FROM genres
WHERE (meta = $1 OR $1 IS NULL)
ORDER BY genres.id ASC
LIMIT $2 OFFSET $3
"#,
meta,
pagination.size,
(pagination.page - 1) * pagination.size
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
let page: Page<Genre> = Page::new(genres, genres_count, &pagination); let genres = db
.genre()
.find_many(filter)
.with(
genre::source::fetch()
)
.order_by(genre::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<Genre> = Page::new(
genres.iter().map(|item| item.clone().into()).collect(),
genres_count,
&pagination
);
Json(page) Json(page)
} }
pub async fn get_genre_metas(db: Database) -> impl IntoResponse {
let genres = sqlx::query_as!( pub async fn get_genre_metas(
Genre, db: Database
r#" ) -> impl IntoResponse {
SELECT let genres = db
genres.id, .genre()
genres.remote_id, .find_many(vec![])
genres.code, .order_by(genre::id::order(Direction::Asc))
genres.description, .exec()
genres.meta,
(
SELECT
ROW(
sources.id,
sources.name
)::source_type
FROM sources
WHERE sources.id = genres.source
) AS "source!: Source"
FROM genres
ORDER BY genres.id ASC
"#
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
@@ -97,12 +69,10 @@ pub async fn get_genre_metas(db: Database) -> impl IntoResponse {
metas.insert(genre.meta.clone()); metas.insert(genre.meta.clone());
} }
let mut metas: Vec<String> = metas.into_iter().collect(); Json::<Vec<String>>(metas.into_iter().collect())
metas.sort();
Json::<Vec<String>>(metas)
} }
pub async fn get_genres_router() -> Router { pub async fn get_genres_router() -> Router {
Router::new() Router::new()
.route("/", get(get_genres)) .route("/", get(get_genres))

View File

@@ -1,35 +1,27 @@
use axum::{ use std::sync::Arc;
http::{self, Request, StatusCode},
middleware::{self, Next}, use axum::{Router, routing::get, middleware::{self, Next}, Extension, http::{Request, StatusCode, self}, response::Response};
response::Response,
routing::get,
Extension, Router,
};
use axum_prometheus::PrometheusMetricLayer; use axum_prometheus::PrometheusMetricLayer;
use sqlx::PgPool; use tower_http::trace::{TraceLayer, self};
use tower_http::trace::{self, TraceLayer};
use tracing::Level; use tracing::Level;
use crate::{config::CONFIG, db::get_postgres_pool}; use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
use self::{authors::get_authors_router, genres::get_genres_router, books::get_books_router, sequences::get_sequences_router};
use self::translators::get_translators_router; use self::translators::get_translators_router;
use self::{
authors::get_authors_router, books::get_books_router, genres::get_genres_router,
sequences::get_sequences_router,
};
pub mod authors; pub mod authors;
pub mod books; pub mod books;
pub mod common;
pub mod genres; pub mod genres;
pub mod sequences; pub mod sequences;
pub mod translators; pub mod translators;
pub type Database = Extension<PgPool>;
async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, StatusCode> { pub type Database = Extension<Arc<PrismaClient>>;
let auth_header = req
.headers()
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
let auth_header = req.headers()
.get(http::header::AUTHORIZATION) .get(http::header::AUTHORIZATION)
.and_then(|header| header.to_str().ok()); .and_then(|header| header.to_str().ok());
@@ -46,27 +38,35 @@ async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, St
Ok(next.run(req).await) Ok(next.run(req).await)
} }
pub async fn get_router() -> Router { pub async fn get_router() -> Router {
let client = get_postgres_pool().await; let client = Arc::new(get_prisma_client().await);
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair(); let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
let app_router = Router::new() let app_router = Router::new()
.nest("/api/v1/authors", get_authors_router().await) .nest("/api/v1/authors", get_authors_router().await)
.nest("/api/v1/translators", get_translators_router().await) .nest("/api/v1/translators", get_translators_router().await)
.nest("/api/v1/genres", get_genres_router().await) .nest("/api/v1/genres", get_genres_router().await)
.nest("/api/v1/books", get_books_router().await) .nest("/api/v1/books", get_books_router().await)
.nest("/api/v1/sequences", get_sequences_router().await) .nest("/api/v1/sequences", get_sequences_router().await)
.layer(middleware::from_fn(auth)) .layer(middleware::from_fn(auth))
.layer(Extension(client)) .layer(Extension(client))
.layer(prometheus_layer); .layer(prometheus_layer);
let metric_router = let metric_router = Router::new()
Router::new().route("/metrics", get(|| async move { metric_handle.render() })); .route("/metrics", get(|| async move { metric_handle.render() }));
Router::new().merge(app_router).merge(metric_router).layer( Router::new()
.nest("/", app_router)
.nest("/", metric_router)
.layer(
TraceLayer::new_for_http() TraceLayer::new_for_http()
.make_span_with(trace::DefaultMakeSpan::new().level(Level::INFO)) .make_span_with(trace::DefaultMakeSpan::new()
.on_response(trace::DefaultOnResponse::new().level(Level::INFO)), .level(Level::INFO))
.on_response(trace::DefaultOnResponse::new()
.level(Level::INFO)),
) )
} }

View File

@@ -1,79 +1,82 @@
use std::collections::HashSet; use std::collections::HashSet;
use axum::{ use axum::{Router, routing::get, extract::{Path, Query}, http::StatusCode, response::IntoResponse, Json};
extract::{Path, Query}, use rand::Rng;
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use crate::{ use crate::{prisma::{sequence, book_sequence, book, book_author, author, translator}, serializers::{sequence::{Sequence, SequenceBook}, allowed_langs::AllowedLangs, pagination::{PageWithParent, Pagination, Page}}, meilisearch::{get_meili_client, SequenceMeili}};
meilisearch::{get_meili_client, SequenceMeili},
serializers::{ use super::Database;
allowed_langs::AllowedLangs,
author::Author,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::{Sequence, SequenceBook},
},
};
use super::{common::get_random_item::get_random_item, Database};
async fn get_random_sequence( async fn get_random_sequence(
db: Database, db: Database,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
AllowedLangs,
>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let sequence_id = {
let client = get_meili_client(); let client = get_meili_client();
let authors_index = client.index("sequences"); let authors_index = client.index("sequences");
let filter = format!("langs IN [{}]", allowed_langs.join(", ")); let filter = format!(
"langs IN [{}]",
allowed_langs.join(", ")
);
get_random_item::<SequenceMeili>(authors_index, filter).await let result = authors_index
}; .search()
.with_filter(&filter)
let sequence = sqlx::query_as!( .execute::<SequenceMeili>()
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_one(&db.0)
.await .await
.unwrap(); .unwrap();
Json::<Sequence>(sequence) let sequence_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<SequenceMeili>()
.await
.unwrap();
let sequence = &result.hits.get(0).unwrap().result;
sequence.id
};
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap()
.unwrap();
Json::<Sequence>(sequence.into())
} }
async fn search_sequence( async fn search_sequence(
db: Database, db: Database,
Path(query): Path<String>, Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
AllowedLangs, pagination: Query<Pagination>
>,
pagination: Query<Pagination>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let client = get_meili_client(); let client = get_meili_client();
let sequence_index = client.index("sequences"); let sequence_index = client.index("sequences");
let filter = format!("langs IN [{}]", allowed_langs.join(", ")); let filter = format!(
"langs IN [{}]",
allowed_langs.join(", ")
);
let result = sequence_index let result = sequence_index
.search() .search()
.with_query(&query) .with_query(&query)
.with_filter(&filter) .with_filter(&filter)
.with_offset( .with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
((pagination.page - 1) * pagination.size)
.try_into()
.unwrap(),
)
.with_limit(pagination.size.try_into().unwrap()) .with_limit(pagination.size.try_into().unwrap())
.execute::<SequenceMeili>() .execute::<SequenceMeili>()
.await .await
@@ -82,14 +85,12 @@ async fn search_sequence(
let total = result.estimated_total_hits.unwrap(); let total = result.estimated_total_hits.unwrap();
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect(); let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut sequences = sqlx::query_as!( let mut sequences = db
Sequence, .sequence()
r#" .find_many(vec![
SELECT id, name FROM sequences WHERE id = ANY($1) sequence::id::in_vec(sequence_ids.clone())
"#, ])
&sequence_ids .exec()
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
@@ -100,25 +101,30 @@ async fn search_sequence(
a_pos.cmp(&b_pos) a_pos.cmp(&b_pos)
}); });
let page: Page<Sequence> = Page::new(sequences, total.try_into().unwrap(), &pagination); let page: Page<Sequence> = Page::new(
sequences.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page) Json(page)
} }
async fn get_sequence(db: Database, Path(sequence_id): Path<i32>) -> impl IntoResponse { async fn get_sequence(
let sequence = sqlx::query_as!( db: Database,
Sequence, Path(sequence_id): Path<i32>
r#" ) -> impl IntoResponse {
SELECT id, name FROM sequences WHERE id = $1 let sequence = db
"#, .sequence()
sequence_id .find_unique(
sequence::id::equals(sequence_id)
) )
.fetch_optional(&db.0) .exec()
.await .await
.unwrap(); .unwrap();
match sequence { match sequence {
Some(sequence) => Json::<Sequence>(sequence).into_response(), Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
None => StatusCode::NOT_FOUND.into_response(), None => StatusCode::NOT_FOUND.into_response(),
} }
} }
@@ -126,38 +132,30 @@ async fn get_sequence(db: Database, Path(sequence_id): Path<i32>) -> impl IntoRe
async fn get_sequence_available_types( async fn get_sequence_available_types(
db: Database, db: Database,
Path(sequence_id): Path<i32>, Path(sequence_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
AllowedLangs,
>,
) -> impl IntoResponse { ) -> impl IntoResponse {
// TODO: refactor let books = db
.book()
let books = sqlx::query_as!( .find_many(vec![
BaseBook, book::book_sequences::some(vec![
r#" book_sequence::sequence_id::equals(sequence_id)
SELECT ]),
b.id, book::lang::in_vec(allowed_langs)
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>" ])
FROM books b .exec()
JOIN book_sequences bs ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)
"#,
sequence_id,
&allowed_langs
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
let mut file_types: HashSet<String> = HashSet::new(); let mut file_types: HashSet<String> = HashSet::new();
for book in books { for book in books {
for file_type in book.available_types { file_types.insert(book.file_type.clone());
file_types.insert(file_type);
} }
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
} }
Json::<Vec<String>>(file_types.into_iter().collect()) Json::<Vec<String>>(file_types.into_iter().collect())
@@ -166,19 +164,15 @@ async fn get_sequence_available_types(
async fn get_sequence_books( async fn get_sequence_books(
db: Database, db: Database,
Path(sequence_id): Path<i32>, Path(sequence_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
AllowedLangs, pagination: Query<Pagination>
>,
pagination: Query<Pagination>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let sequence = sqlx::query_as!( let sequence = db
Sequence, .sequence()
r#" .find_unique(
SELECT id, name FROM sequences WHERE id = $1 sequence::id::equals(sequence_id)
"#,
sequence_id
) )
.fetch_optional(&db.0) .exec()
.await .await
.unwrap(); .unwrap();
@@ -187,110 +181,73 @@ async fn get_sequence_books(
None => return StatusCode::NOT_FOUND.into_response(), None => return StatusCode::NOT_FOUND.into_response(),
}; };
let books_count = sqlx::query_scalar!( let books_count = db
"SELECT COUNT(*) FROM book_sequences bs .book()
JOIN books b ON b.id = bs.book .count(vec![
WHERE book::book_sequences::some(vec![
b.is_deleted = FALSE AND book_sequence::sequence_id::equals(sequence_id)
bs.sequence = $1 AND ]),
b.lang = ANY($2)", book::lang::in_vec(allowed_langs.clone())
sequence.id, ])
&allowed_langs .exec()
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let mut books = sqlx::query_as!(
SequenceBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
b.uploaded,
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
),
ARRAY[]::author_type[]
) AS "authors!: Vec<Author>",
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
),
ARRAY[]::author_type[]
) AS "translators!: Vec<Author>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
bs.position
FROM books b
JOIN book_sequences bs ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)
ORDER BY bs.position
LIMIT $3 OFFSET $4
"#,
sequence.id,
&allowed_langs,
pagination.size,
(pagination.page - 1) * pagination.size,
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
books.sort_by(|a, b| a.position.cmp(&b.position)); let books = db
.book()
.find_many(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: PageWithParent<SequenceBook, Sequence> = let page: PageWithParent<SequenceBook, Sequence> = PageWithParent::new(
PageWithParent::new(sequence, books, books_count, &pagination); sequence.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page).into_response() Json(page).into_response()
} }
pub async fn get_sequences_router() -> Router { pub async fn get_sequences_router() -> Router {
Router::new() Router::new()
.route("/random", get(get_random_sequence)) .route("/random", get(get_random_sequence))
.route("/search/{query}", get(search_sequence)) .route("/search/:query", get(search_sequence))
.route("/{sequence_id}", get(get_sequence)) .route("/:sequence_id", get(get_sequence))
.route( .route("/:sequence_id/available_types", get(get_sequence_available_types))
"/{sequence_id}/available_types", .route("/:sequence_id/books", get(get_sequence_books))
get(get_sequence_available_types),
)
.route("/{sequence_id}/books", get(get_sequence_books))
} }

View File

@@ -1,54 +1,27 @@
use std::collections::HashSet; use std::collections::HashSet;
use axum::{ use axum::{Router, routing::get, extract::{Path, Query}, response::IntoResponse, Json, http::StatusCode};
extract::{Path, Query},
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use crate::{ use crate::{serializers::{pagination::{Pagination, Page, PageWithParent}, author::Author, translator::TranslatorBook, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}, prisma::{author, book::{self}, translator, book_author, book_sequence}};
meilisearch::{get_meili_client, AuthorMeili},
serializers::{
allowed_langs::AllowedLangs,
author::Author,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::Sequence,
translator::TranslatorBook,
},
};
use super::Database; use super::Database;
async fn get_translated_books( async fn get_translated_books(
db: Database, db: Database,
Path(translator_id): Path<i32>, Path(translator_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
AllowedLangs, pagination: Query<Pagination>
>,
pagination: Query<Pagination>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let translator = sqlx::query_as!( let translator = db
Author, .author()
r#" .find_unique(
SELECT author::id::equals(translator_id)
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
translator_id
) )
.fetch_optional(&db.0) .with(
author::author_annotation::fetch()
)
.exec()
.await .await
.unwrap(); .unwrap();
@@ -57,160 +30,118 @@ async fn get_translated_books(
None => return StatusCode::NOT_FOUND.into_response(), None => return StatusCode::NOT_FOUND.into_response(),
}; };
let books_count = sqlx::query_scalar!( let books_count = db
r#" .book()
SELECT COUNT(*) .count(vec![
FROM books b book::translations::some(vec![
JOIN book_authors ba ON b.id = ba.book translator::author_id::equals(translator_id)
WHERE ]),
b.is_deleted = false book::lang::in_vec(allowed_langs.clone())
AND ba.author = $1 ])
AND b.lang = ANY($2) .exec()
"#,
translator_id,
&allowed_langs
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = sqlx::query_as!(
TranslatorBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
b.uploaded,
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
),
ARRAY[]::author_type[]
) AS "authors!: Vec<Author>",
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
sequences.id,
sequences.name
)::sequence_type
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
),
ARRAY[]::sequence_type[]
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
OFFSET $3
LIMIT $4
"#,
translator_id,
&allowed_langs,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
let page: PageWithParent<TranslatorBook, Author> = let books = db
PageWithParent::new(translator, books, books_count, &pagination); .book()
.find_many(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs)
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
translator.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
Json(page).into_response() Json(page).into_response()
} }
async fn get_translated_books_available_types( async fn get_translated_books_available_types(
db: Database, db: Database,
Path(translator_id): Path<i32>, Path(translator_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
AllowedLangs,
>,
) -> impl IntoResponse { ) -> impl IntoResponse {
// TODO: refactor let books = db
.book()
let books = sqlx::query_as!( .find_many(vec![
BaseBook, book::translations::some(vec![
r#" translator::author_id::equals(translator_id)
SELECT ]),
b.id, book::lang::in_vec(allowed_langs)
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>" ])
FROM books b .exec()
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
"#,
translator_id,
&allowed_langs
)
.fetch_all(&db.0)
.await .await
.unwrap(); .unwrap();
let mut file_types: HashSet<String> = HashSet::new(); let mut file_types: HashSet<String> = HashSet::new();
for book in books { for book in books {
for file_type in book.available_types { file_types.insert(book.file_type.clone());
file_types.insert(file_type);
} }
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
} }
Json::<Vec<String>>(file_types.into_iter().collect()) Json::<Vec<String>>(file_types.into_iter().collect())
} }
async fn search_translators( async fn search_translators(
db: Database, db: Database,
Path(query): Path<String>, Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query< axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
AllowedLangs, pagination: Query<Pagination>
>,
pagination: Query<Pagination>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let client = get_meili_client(); let client = get_meili_client();
let authors_index = client.index("authors"); let authors_index = client.index("authors");
let filter = format!("translator_langs IN [{}]", allowed_langs.join(", ")); let filter = format!(
"translator_langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index let result = authors_index
.search() .search()
.with_query(&query) .with_query(&query)
.with_filter(&filter) .with_filter(&filter)
.with_offset( .with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
((pagination.page - 1) * pagination.size)
.try_into()
.unwrap(),
)
.with_limit(pagination.size.try_into().unwrap()) .with_limit(pagination.size.try_into().unwrap())
.execute::<AuthorMeili>() .execute::<AuthorMeili>()
.await .await
@@ -219,25 +150,16 @@ async fn search_translators(
let total = result.estimated_total_hits.unwrap(); let total = result.estimated_total_hits.unwrap();
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect(); let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut translators = sqlx::query_as!( let mut translators = db
Author, .author()
r#" .find_many(vec![
SELECT author::id::in_vec(translator_ids.clone())
a.id, ])
a.first_name, .with(
a.last_name, author::author_annotation::fetch()
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = ANY($1)
"#,
&translator_ids
) )
.fetch_all(&db.0) .order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
.await .await
.unwrap(); .unwrap();
@@ -248,17 +170,19 @@ async fn search_translators(
a_pos.cmp(&b_pos) a_pos.cmp(&b_pos)
}); });
let page: Page<Author> = Page::new(translators, total.try_into().unwrap(), &pagination); let page: Page<Author> = Page::new(
translators.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
Json(page) Json(page)
} }
pub async fn get_translators_router() -> Router { pub async fn get_translators_router() -> Router {
Router::new() Router::new()
.route("/{translator_id}/books", get(get_translated_books)) .route("/:translator_id/books", get(get_translated_books))
.route( .route("/:translator_id/available_types", get(get_translated_books_available_types))
"/{translator_id}/available_types", .route("/search/:query", get(search_translators))
get(get_translated_books_available_types),
)
.route("/search/{query}", get(search_translators))
} }