mirror of
https://github.com/flibusta-apps/library_updater.git
synced 2025-12-06 15:45:36 +01:00
Add simple rust implementation
This commit is contained in:
8
.github/workflows/build_docker_image.yml
vendored
8
.github/workflows/build_docker_image.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
IMAGE: ${{ steps.repository_name.outputs.lowercase }}
|
IMAGE: ${{ steps.repository_name.outputs.lowercase }}
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64
|
||||||
tags: ghcr.io/${{ env.IMAGE }}:latest
|
tags: ghcr.io/${{ env.IMAGE }}:latest
|
||||||
context: .
|
context: .
|
||||||
file: ./docker/build.dockerfile
|
file: ./docker/build.dockerfile
|
||||||
@@ -54,9 +54,3 @@ jobs:
|
|||||||
uses: joelwmale/webhook-action@master
|
uses: joelwmale/webhook-action@master
|
||||||
with:
|
with:
|
||||||
url: ${{ secrets.WEBHOOK_URL }}
|
url: ${{ secrets.WEBHOOK_URL }}
|
||||||
|
|
||||||
-
|
|
||||||
name: Invoke deployment hook
|
|
||||||
uses: joelwmale/webhook-action@master
|
|
||||||
with:
|
|
||||||
url: ${{ secrets.WEBHOOK_URL2 }}
|
|
||||||
|
|||||||
35
.github/workflows/codeql-analysis.yml
vendored
35
.github/workflows/codeql-analysis.yml
vendored
@@ -1,35 +0,0 @@
|
|||||||
name: "CodeQL"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
schedule:
|
|
||||||
- cron: '0 12 * * *'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Analyze
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
language: [ 'python' ]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v2
|
|
||||||
with:
|
|
||||||
languages: ${{ matrix.language }}
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
||||||
35
.github/workflows/linters.yaml
vendored
35
.github/workflows/linters.yaml
vendored
@@ -1,35 +0,0 @@
|
|||||||
name: Linters
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
Run-Pre-Commit:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 32
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
|
|
||||||
- name: Install pre-commit
|
|
||||||
run: pip3 install pre-commit
|
|
||||||
|
|
||||||
- name: Pre-commit (Push)
|
|
||||||
env:
|
|
||||||
SETUPTOOLS_USE_DISTUTILS: stdlib
|
|
||||||
if: ${{ github.event_name == 'push' }}
|
|
||||||
run: pre-commit run --source ${{ github.event.before }} --origin ${{ github.event.after }} --show-diff-on-failure
|
|
||||||
|
|
||||||
- name: Pre-commit (Pull-Request)
|
|
||||||
env:
|
|
||||||
SETUPTOOLS_USE_DISTUTILS: stdlib
|
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
|
||||||
run: pre-commit run --source ${{ github.event.pull_request.base.sha }} --origin ${{ github.event.pull_request.head.sha }} --show-diff-on-failure
|
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,5 +1 @@
|
|||||||
__pycache__
|
/target
|
||||||
|
|
||||||
venv
|
|
||||||
|
|
||||||
.vscode
|
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
exclude: 'docs|node_modules|migrations|.git|.tox'
|
|
||||||
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/ambv/black
|
|
||||||
rev: 22.3.0
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
language_version: python3.9
|
|
||||||
- repo: https://github.com/pycqa/isort
|
|
||||||
rev: 5.10.1
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
||||||
- repo: https://github.com/csachs/pyproject-flake8
|
|
||||||
rev: v0.0.1a3
|
|
||||||
hooks:
|
|
||||||
- id: pyproject-flake8
|
|
||||||
additional_dependencies: [
|
|
||||||
'-e', 'git+https://github.com/pycqa/pyflakes@1911c20#egg=pyflakes',
|
|
||||||
'-e', 'git+https://github.com/pycqa/pycodestyle@d219c68#egg=pycodestyle',
|
|
||||||
]
|
|
||||||
1879
Cargo.lock
generated
Normal file
1879
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
Cargo.toml
Normal file
25
Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[package]
|
||||||
|
name = "library_updater"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
sql-parse = "0.8.0"
|
||||||
|
tokio = { version = "1.21.1", features = ["full"] }
|
||||||
|
tokio-postgres = { version = "0.7.6", features = ["with-chrono-0_4"] }
|
||||||
|
deadpool-postgres = "0.10.2"
|
||||||
|
async-trait = "0.1.57"
|
||||||
|
chrono = "0.4.22"
|
||||||
|
futures = "0.3.24"
|
||||||
|
reqwest = { version = "0.11.11", features = ["stream"] }
|
||||||
|
tokio-util = { version = "0.7.3", features = ["compat"] }
|
||||||
|
async-compression = { version = "0.3.14", features = ["futures-io", "gzip"] }
|
||||||
|
sentry = "0.27.0"
|
||||||
|
lazy_static = "1.4.0"
|
||||||
|
log = "0.4"
|
||||||
|
env_logger = "0.9.0"
|
||||||
|
serde = { version = "1.0.144", features = ["derive"] }
|
||||||
|
serde_json = "1.0.85"
|
||||||
|
tokio-cron-scheduler = "0.8.1"
|
||||||
@@ -1,29 +1,29 @@
|
|||||||
FROM ghcr.io/kurbezz/base_docker_images:3.10-postgres-asyncpg-poetry-buildtime as build-image
|
FROM lukemathwalker/cargo-chef:latest-rust-slim-buster AS chef
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
WORKDIR /root/poetry
|
FROM chef AS planner
|
||||||
COPY pyproject.toml poetry.lock /root/poetry/
|
COPY . .
|
||||||
|
RUN cargo chef prepare --recipe-path recipe.json
|
||||||
ENV VENV_PATH=/opt/venv
|
|
||||||
RUN poetry export --without-hashes > requirements.txt \
|
|
||||||
&& . "${VENV_PATH}/bin/activate" \
|
|
||||||
&& pip install -r requirements.txt --no-cache-dir
|
|
||||||
|
|
||||||
|
|
||||||
FROM ghcr.io/kurbezz/base_docker_images:3.10-postgres-runtime as runtime-image
|
|
||||||
|
|
||||||
|
FROM chef AS builder
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install --no-install-recommends -y wget default-mysql-client-core \
|
&& apt-get install -y pkg-config libssl-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ENV VENV_PATH=/opt/venv
|
COPY --from=planner /app/recipe.json recipe.json
|
||||||
ENV PATH="$VENV_PATH/bin:$PATH"
|
RUN cargo chef cook --release --recipe-path recipe.json
|
||||||
|
COPY . .
|
||||||
|
RUN cargo build --release --bin library_updater
|
||||||
|
|
||||||
WORKDIR /app/
|
FROM debian:bullseye-slim
|
||||||
|
|
||||||
COPY ./src/ /app/
|
RUN apt-get update \
|
||||||
COPY --from=build-image $VENV_PATH $VENV_PATH
|
&& apt-get install -y openssl ca-certificates \
|
||||||
COPY ./scripts/healthcheck.py /root/
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
EXPOSE 8080
|
RUN update-ca-certificates
|
||||||
|
|
||||||
CMD gunicorn -k uvicorn.workers.UvicornWorker main:app --bind 0.0.0.0:8080
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY --from=builder /app/target/release/library_updater /usr/local/bin
|
||||||
|
ENTRYPOINT ["/usr/local/bin/library_updater"]
|
||||||
|
|||||||
608
poetry.lock
generated
608
poetry.lock
generated
@@ -1,608 +0,0 @@
|
|||||||
[[package]]
|
|
||||||
name = "aiologger"
|
|
||||||
version = "0.6.1"
|
|
||||||
description = "Asynchronous logging for python and asyncio"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
aiofiles = ["aiofiles (==0.4.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "aiomysql"
|
|
||||||
version = "0.0.22"
|
|
||||||
description = "MySQL driver for asyncio."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
PyMySQL = ">=0.9,<=0.9.3"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
sa = ["sqlalchemy (>=1.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "aioredis"
|
|
||||||
version = "1.3.1"
|
|
||||||
description = "asyncio (PEP 3156) Redis support"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
async-timeout = "*"
|
|
||||||
hiredis = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "anyio"
|
|
||||||
version = "3.4.0"
|
|
||||||
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.2"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
idna = ">=2.8"
|
|
||||||
sniffio = ">=1.1"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
|
|
||||||
test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
|
|
||||||
trio = ["trio (>=0.16)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "arq"
|
|
||||||
version = "0.22"
|
|
||||||
description = "Job queues in python with asyncio and redis"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
aioredis = ">=1.1.0,<2.0.0"
|
|
||||||
click = ">=6.7"
|
|
||||||
pydantic = ">=1"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
watch = ["watchgod (>=0.4)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "asgiref"
|
|
||||||
version = "3.4.1"
|
|
||||||
description = "ASGI specs, helper code, and adapters"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "async-timeout"
|
|
||||||
version = "4.0.2"
|
|
||||||
description = "Timeout context manager for asyncio programs"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "asyncpg"
|
|
||||||
version = "0.25.0"
|
|
||||||
description = "An asyncio PostgreSQL driver"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
|
|
||||||
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"]
|
|
||||||
test = ["pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "certifi"
|
|
||||||
version = "2021.10.8"
|
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click"
|
|
||||||
version = "8.0.3"
|
|
||||||
description = "Composable command line interface toolkit"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.4"
|
|
||||||
description = "Cross-platform colored terminal text."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fastapi"
|
|
||||||
version = "0.75.1"
|
|
||||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.1"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
|
|
||||||
starlette = "0.17.1"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
all = ["requests (>=2.24.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<3.0.0)", "pyyaml (>=5.3.1,<6.0.0)", "ujson (>=4.0.1,<5.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.16.0)"]
|
|
||||||
dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.16.0)"]
|
|
||||||
doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "typer (>=0.4.1,<0.5.0)", "pyyaml (>=5.3.1,<6.0.0)"]
|
|
||||||
test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<4.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,<5.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==0.1.1)", "types-orjson (==3.6.0)", "types-dataclasses (==0.1.7)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "gunicorn"
|
|
||||||
version = "20.1.0"
|
|
||||||
description = "WSGI HTTP Server for UNIX"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
eventlet = ["eventlet (>=0.24.1)"]
|
|
||||||
gevent = ["gevent (>=1.4.0)"]
|
|
||||||
setproctitle = ["setproctitle"]
|
|
||||||
tornado = ["tornado (>=0.2)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "h11"
|
|
||||||
version = "0.12.0"
|
|
||||||
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hiredis"
|
|
||||||
version = "2.0.0"
|
|
||||||
description = "Python wrapper for hiredis"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "httpcore"
|
|
||||||
version = "0.15.0"
|
|
||||||
description = "A minimal low-level HTTP client."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
anyio = ">=3.0.0,<4.0.0"
|
|
||||||
certifi = "*"
|
|
||||||
h11 = ">=0.11,<0.13"
|
|
||||||
sniffio = ">=1.0.0,<2.0.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
http2 = ["h2 (>=3,<5)"]
|
|
||||||
socks = ["socksio (>=1.0.0,<2.0.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "httpx"
|
|
||||||
version = "0.23.0"
|
|
||||||
description = "The next generation HTTP client."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
certifi = "*"
|
|
||||||
httpcore = ">=0.15.0,<0.16.0"
|
|
||||||
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
|
|
||||||
sniffio = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
brotli = ["brotlicffi", "brotli"]
|
|
||||||
cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"]
|
|
||||||
http2 = ["h2 (>=3,<5)"]
|
|
||||||
socks = ["socksio (>=1.0.0,<2.0.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "idna"
|
|
||||||
version = "3.3"
|
|
||||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pydantic"
|
|
||||||
version = "1.9.0"
|
|
||||||
description = "Data validation and settings management using python 3.6 type hinting"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.1"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""}
|
|
||||||
typing-extensions = ">=3.7.4.3"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
|
||||||
email = ["email-validator (>=1.0.3)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pymysql"
|
|
||||||
version = "0.9.3"
|
|
||||||
description = "Pure Python MySQL Driver"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
rsa = ["cryptography"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "python-dotenv"
|
|
||||||
version = "0.19.2"
|
|
||||||
description = "Read key-value pairs from a .env file and set them as environment variables"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
cli = ["click (>=5.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rfc3986"
|
|
||||||
version = "1.5.0"
|
|
||||||
description = "Validating URI References per RFC 3986"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
idna2008 = ["idna"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sentry-sdk"
|
|
||||||
version = "1.5.10"
|
|
||||||
description = "Python client for Sentry (https://sentry.io)"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
certifi = "*"
|
|
||||||
urllib3 = ">=1.10.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
aiohttp = ["aiohttp (>=3.5)"]
|
|
||||||
beam = ["apache-beam (>=2.12)"]
|
|
||||||
bottle = ["bottle (>=0.12.13)"]
|
|
||||||
celery = ["celery (>=3)"]
|
|
||||||
chalice = ["chalice (>=1.16.0)"]
|
|
||||||
django = ["django (>=1.8)"]
|
|
||||||
falcon = ["falcon (>=1.4)"]
|
|
||||||
flask = ["flask (>=0.11)", "blinker (>=1.1)"]
|
|
||||||
httpx = ["httpx (>=0.16.0)"]
|
|
||||||
pure_eval = ["pure-eval", "executing", "asttokens"]
|
|
||||||
pyspark = ["pyspark (>=2.4.4)"]
|
|
||||||
quart = ["quart (>=0.16.1)", "blinker (>=1.1)"]
|
|
||||||
rq = ["rq (>=0.6)"]
|
|
||||||
sanic = ["sanic (>=0.8)"]
|
|
||||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
|
||||||
tornado = ["tornado (>=5)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sniffio"
|
|
||||||
version = "1.2.0"
|
|
||||||
description = "Sniff out which async library your code is running under"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "starlette"
|
|
||||||
version = "0.17.1"
|
|
||||||
description = "The little ASGI library that shines."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
anyio = ">=3.0.0,<4"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typing-extensions"
|
|
||||||
version = "4.0.1"
|
|
||||||
description = "Backported and Experimental Type Hints for Python 3.6+"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "urllib3"
|
|
||||||
version = "1.26.9"
|
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
|
|
||||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "uvicorn"
|
|
||||||
version = "0.16.0"
|
|
||||||
description = "The lightning-fast ASGI server."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
asgiref = ">=3.4.0"
|
|
||||||
click = ">=7.0"
|
|
||||||
h11 = ">=0.8"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
standard = ["httptools (>=0.2.0,<0.4.0)", "watchgod (>=0.6)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "websockets (>=9.1)", "websockets (>=10.0)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "uvloop"
|
|
||||||
version = "0.16.0"
|
|
||||||
description = "Fast implementation of asyncio event loop on top of libuv"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=3.6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"]
|
|
||||||
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"]
|
|
||||||
test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"]
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
lock-version = "1.1"
|
|
||||||
python-versions = "^3.9"
|
|
||||||
content-hash = "fd87b0632998ed997c5faed4c4f8aa18f80fe1597d565dd5bc91aae4a246ff29"
|
|
||||||
|
|
||||||
[metadata.files]
|
|
||||||
aiologger = [
|
|
||||||
{file = "aiologger-0.6.1.tar.gz", hash = "sha256:1b6b8f00d74a588339b657ff60ffa9f64c53873887a008934c66e1a673ea68cd"},
|
|
||||||
]
|
|
||||||
aiomysql = [
|
|
||||||
{file = "aiomysql-0.0.22-py3-none-any.whl", hash = "sha256:4e4a65914daacc40e70f992ddbeef32457561efbad8de41393e8ac5a84126a5a"},
|
|
||||||
{file = "aiomysql-0.0.22.tar.gz", hash = "sha256:9bcf8f26d22e550f75cabd635fa19a55c45f835eea008275960cb37acadd622a"},
|
|
||||||
]
|
|
||||||
aioredis = [
|
|
||||||
{file = "aioredis-1.3.1-py3-none-any.whl", hash = "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"},
|
|
||||||
{file = "aioredis-1.3.1.tar.gz", hash = "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a"},
|
|
||||||
]
|
|
||||||
anyio = [
|
|
||||||
{file = "anyio-3.4.0-py3-none-any.whl", hash = "sha256:2855a9423524abcdd652d942f8932fda1735210f77a6b392eafd9ff34d3fe020"},
|
|
||||||
{file = "anyio-3.4.0.tar.gz", hash = "sha256:24adc69309fb5779bc1e06158e143e0b6d2c56b302a3ac3de3083c705a6ed39d"},
|
|
||||||
]
|
|
||||||
arq = [
|
|
||||||
{file = "arq-0.22-py3-none-any.whl", hash = "sha256:55a0f933636c804b82c366a0e3710e9e5ed26a716251fa6742777d0b039f7f30"},
|
|
||||||
{file = "arq-0.22.tar.gz", hash = "sha256:c7bd98151cc83cec941ce5f660ede4bee888effd9a4692258ec8a9a0aff2f9f9"},
|
|
||||||
]
|
|
||||||
asgiref = [
|
|
||||||
{file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"},
|
|
||||||
{file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"},
|
|
||||||
]
|
|
||||||
async-timeout = [
|
|
||||||
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
|
|
||||||
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
|
|
||||||
]
|
|
||||||
asyncpg = [
|
|
||||||
{file = "asyncpg-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf5e3408a14a17d480f36ebaf0401a12ff6ae5457fdf45e4e2775c51cc9517d3"},
|
|
||||||
{file = "asyncpg-0.25.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bc197fc4aca2fd24f60241057998124012469d2e414aed3f992579db0c88e3a"},
|
|
||||||
{file = "asyncpg-0.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a70783f6ffa34cc7dd2de20a873181414a34fd35a4a208a1f1a7f9f695e4ec4"},
|
|
||||||
{file = "asyncpg-0.25.0-cp310-cp310-win32.whl", hash = "sha256:43cde84e996a3afe75f325a68300093425c2f47d340c0fc8912765cf24a1c095"},
|
|
||||||
{file = "asyncpg-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:56d88d7ef4341412cd9c68efba323a4519c916979ba91b95d4c08799d2ff0c09"},
|
|
||||||
{file = "asyncpg-0.25.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a84d30e6f850bac0876990bcd207362778e2208df0bee8be8da9f1558255e634"},
|
|
||||||
{file = "asyncpg-0.25.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:beaecc52ad39614f6ca2e48c3ca15d56e24a2c15cbfdcb764a4320cc45f02fd5"},
|
|
||||||
{file = "asyncpg-0.25.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6f8f5fc975246eda83da8031a14004b9197f510c41511018e7b1bedde6968e92"},
|
|
||||||
{file = "asyncpg-0.25.0-cp36-cp36m-win32.whl", hash = "sha256:ddb4c3263a8d63dcde3d2c4ac1c25206bfeb31fa83bd70fd539e10f87739dee4"},
|
|
||||||
{file = "asyncpg-0.25.0-cp36-cp36m-win_amd64.whl", hash = "sha256:bf6dc9b55b9113f39eaa2057337ce3f9ef7de99a053b8a16360395ce588925cd"},
|
|
||||||
{file = "asyncpg-0.25.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:acb311722352152936e58a8ee3c5b8e791b24e84cd7d777c414ff05b3530ca68"},
|
|
||||||
{file = "asyncpg-0.25.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a61fb196ce4dae2f2fa26eb20a778db21bbee484d2e798cb3cc988de13bdd1b"},
|
|
||||||
{file = "asyncpg-0.25.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2633331cbc8429030b4f20f712f8d0fbba57fa8555ee9b2f45f981b81328b256"},
|
|
||||||
{file = "asyncpg-0.25.0-cp37-cp37m-win32.whl", hash = "sha256:863d36eba4a7caa853fd7d83fad5fd5306f050cc2fe6e54fbe10cdb30420e5e9"},
|
|
||||||
{file = "asyncpg-0.25.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fe471ccd915b739ca65e2e4dbd92a11b44a5b37f2e38f70827a1c147dafe0fa8"},
|
|
||||||
{file = "asyncpg-0.25.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72a1e12ea0cf7c1e02794b697e3ca967b2360eaa2ce5d4bfdd8604ec2d6b774b"},
|
|
||||||
{file = "asyncpg-0.25.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4327f691b1bdb222df27841938b3e04c14068166b3a97491bec2cb982f49f03e"},
|
|
||||||
{file = "asyncpg-0.25.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:739bbd7f89a2b2f6bc44cb8bf967dab12c5bc714fcbe96e68d512be45ecdf962"},
|
|
||||||
{file = "asyncpg-0.25.0-cp38-cp38-win32.whl", hash = "sha256:18d49e2d93a7139a2fdbd113e320cc47075049997268a61bfbe0dde680c55471"},
|
|
||||||
{file = "asyncpg-0.25.0-cp38-cp38-win_amd64.whl", hash = "sha256:191fe6341385b7fdea7dbdcf47fd6db3fd198827dcc1f2b228476d13c05a03c6"},
|
|
||||||
{file = "asyncpg-0.25.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fab7f1b2c29e187dd8781fce896249500cf055b63471ad66332e537e9b5f7e"},
|
|
||||||
{file = "asyncpg-0.25.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a738f1b2876f30d710d3dc1e7858160a0afe1603ba16bf5f391f5316eb0ed855"},
|
|
||||||
{file = "asyncpg-0.25.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4105f57ad1e8fbc8b1e535d8fcefa6ce6c71081228f08680c6dea24384ff0e"},
|
|
||||||
{file = "asyncpg-0.25.0-cp39-cp39-win32.whl", hash = "sha256:f55918ded7b85723a5eaeb34e86e7b9280d4474be67df853ab5a7fa0cc7c6bf2"},
|
|
||||||
{file = "asyncpg-0.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:649e2966d98cc48d0646d9a4e29abecd8b59d38d55c256d5c857f6b27b7407ac"},
|
|
||||||
{file = "asyncpg-0.25.0.tar.gz", hash = "sha256:63f8e6a69733b285497c2855464a34de657f2cccd25aeaeeb5071872e9382540"},
|
|
||||||
]
|
|
||||||
certifi = [
|
|
||||||
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
|
|
||||||
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
|
|
||||||
]
|
|
||||||
click = [
|
|
||||||
{file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"},
|
|
||||||
{file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"},
|
|
||||||
]
|
|
||||||
colorama = [
|
|
||||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
|
||||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
|
||||||
]
|
|
||||||
fastapi = [
|
|
||||||
{file = "fastapi-0.75.1-py3-none-any.whl", hash = "sha256:f46f8fc81261c2bd956584114da9da98c84e2410c807bc2487532dabf55e7ab8"},
|
|
||||||
{file = "fastapi-0.75.1.tar.gz", hash = "sha256:8b62bde916d657803fb60fffe88e2b2c9fb854583784607e4347681cae20ad01"},
|
|
||||||
]
|
|
||||||
gunicorn = [
|
|
||||||
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
|
|
||||||
{file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
|
|
||||||
]
|
|
||||||
h11 = [
|
|
||||||
{file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
|
|
||||||
{file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
|
|
||||||
]
|
|
||||||
hiredis = [
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"},
|
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"},
|
|
||||||
{file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"},
|
|
||||||
{file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"},
|
|
||||||
{file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"},
|
|
||||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"},
|
|
||||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"},
|
|
||||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"},
|
|
||||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"},
|
|
||||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"},
|
|
||||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"},
|
|
||||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"},
|
|
||||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"},
|
|
||||||
{file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"},
|
|
||||||
]
|
|
||||||
httpcore = [
|
|
||||||
{file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
|
|
||||||
{file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
|
|
||||||
]
|
|
||||||
httpx = [
|
|
||||||
{file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"},
|
|
||||||
{file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
|
|
||||||
]
|
|
||||||
idna = [
|
|
||||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
|
||||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
|
||||||
]
|
|
||||||
pydantic = [
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"},
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"},
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"},
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"},
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"},
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"},
|
|
||||||
{file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"},
|
|
||||||
{file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"},
|
|
||||||
{file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"},
|
|
||||||
{file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"},
|
|
||||||
{file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"},
|
|
||||||
{file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"},
|
|
||||||
{file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"},
|
|
||||||
{file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"},
|
|
||||||
{file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"},
|
|
||||||
{file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"},
|
|
||||||
{file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"},
|
|
||||||
{file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"},
|
|
||||||
{file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"},
|
|
||||||
{file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"},
|
|
||||||
{file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"},
|
|
||||||
{file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"},
|
|
||||||
{file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"},
|
|
||||||
]
|
|
||||||
pymysql = [
|
|
||||||
{file = "PyMySQL-0.9.3-py2.py3-none-any.whl", hash = "sha256:3943fbbbc1e902f41daf7f9165519f140c4451c179380677e6a848587042561a"},
|
|
||||||
{file = "PyMySQL-0.9.3.tar.gz", hash = "sha256:d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7"},
|
|
||||||
]
|
|
||||||
python-dotenv = [
|
|
||||||
{file = "python-dotenv-0.19.2.tar.gz", hash = "sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"},
|
|
||||||
{file = "python_dotenv-0.19.2-py2.py3-none-any.whl", hash = "sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3"},
|
|
||||||
]
|
|
||||||
rfc3986 = [
|
|
||||||
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
|
|
||||||
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
|
|
||||||
]
|
|
||||||
sentry-sdk = [
|
|
||||||
{file = "sentry-sdk-1.5.10.tar.gz", hash = "sha256:0a9eb20a84f4c17c08c57488d59fdad18669db71ebecb28fb0721423a33535f9"},
|
|
||||||
{file = "sentry_sdk-1.5.10-py2.py3-none-any.whl", hash = "sha256:972c8fe9318a415b5cf35f687f568321472ef94b36806407c370ce9c88a67f2e"},
|
|
||||||
]
|
|
||||||
sniffio = [
|
|
||||||
{file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"},
|
|
||||||
{file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"},
|
|
||||||
]
|
|
||||||
starlette = [
|
|
||||||
{file = "starlette-0.17.1-py3-none-any.whl", hash = "sha256:26a18cbda5e6b651c964c12c88b36d9898481cd428ed6e063f5f29c418f73050"},
|
|
||||||
{file = "starlette-0.17.1.tar.gz", hash = "sha256:57eab3cc975a28af62f6faec94d355a410634940f10b30d68d31cb5ec1b44ae8"},
|
|
||||||
]
|
|
||||||
typing-extensions = [
|
|
||||||
{file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"},
|
|
||||||
{file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"},
|
|
||||||
]
|
|
||||||
urllib3 = [
|
|
||||||
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
|
|
||||||
{file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
|
|
||||||
]
|
|
||||||
uvicorn = [
|
|
||||||
{file = "uvicorn-0.16.0-py3-none-any.whl", hash = "sha256:d8c839231f270adaa6d338d525e2652a0b4a5f4c2430b5c4ef6ae4d11776b0d2"},
|
|
||||||
{file = "uvicorn-0.16.0.tar.gz", hash = "sha256:eacb66afa65e0648fcbce5e746b135d09722231ffffc61883d4fac2b62fbea8d"},
|
|
||||||
]
|
|
||||||
uvloop = [
|
|
||||||
{file = "uvloop-0.16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6224f1401025b748ffecb7a6e2652b17768f30b1a6a3f7b44660e5b5b690b12d"},
|
|
||||||
{file = "uvloop-0.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30ba9dcbd0965f5c812b7c2112a1ddf60cf904c1c160f398e7eed3a6b82dcd9c"},
|
|
||||||
{file = "uvloop-0.16.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bd53f7f5db562f37cd64a3af5012df8cac2c464c97e732ed556800129505bd64"},
|
|
||||||
{file = "uvloop-0.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:772206116b9b57cd625c8a88f2413df2fcfd0b496eb188b82a43bed7af2c2ec9"},
|
|
||||||
{file = "uvloop-0.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b572256409f194521a9895aef274cea88731d14732343da3ecdb175228881638"},
|
|
||||||
{file = "uvloop-0.16.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:04ff57aa137230d8cc968f03481176041ae789308b4d5079118331ab01112450"},
|
|
||||||
{file = "uvloop-0.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a19828c4f15687675ea912cc28bbcb48e9bb907c801873bd1519b96b04fb805"},
|
|
||||||
{file = "uvloop-0.16.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e814ac2c6f9daf4c36eb8e85266859f42174a4ff0d71b99405ed559257750382"},
|
|
||||||
{file = "uvloop-0.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd8f42ea1ea8f4e84d265769089964ddda95eb2bb38b5cbe26712b0616c3edee"},
|
|
||||||
{file = "uvloop-0.16.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:647e481940379eebd314c00440314c81ea547aa636056f554d491e40503c8464"},
|
|
||||||
{file = "uvloop-0.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0d26fa5875d43ddbb0d9d79a447d2ace4180d9e3239788208527c4784f7cab"},
|
|
||||||
{file = "uvloop-0.16.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ccd57ae8db17d677e9e06192e9c9ec4bd2066b77790f9aa7dede2cc4008ee8f"},
|
|
||||||
{file = "uvloop-0.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:089b4834fd299d82d83a25e3335372f12117a7d38525217c2258e9b9f4578897"},
|
|
||||||
{file = "uvloop-0.16.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98d117332cc9e5ea8dfdc2b28b0a23f60370d02e1395f88f40d1effd2cb86c4f"},
|
|
||||||
{file = "uvloop-0.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e5f2e2ff51aefe6c19ee98af12b4ae61f5be456cd24396953244a30880ad861"},
|
|
||||||
{file = "uvloop-0.16.0.tar.gz", hash = "sha256:f74bc20c7b67d1c27c72601c78cf95be99d5c2cdd4514502b4f3eb0933ff1228"},
|
|
||||||
]
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
[tool.poetry]
|
|
||||||
name = "library_updater"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = ""
|
|
||||||
authors = ["Kurbanov Bulat <kurbanovbul@gmail.com>"]
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "^3.9"
|
|
||||||
fastapi = ">=0.71.0"
|
|
||||||
pydantic = {extras = ["dotenv"], version = "^1.9.0"}
|
|
||||||
httpx = ">=0.23.0"
|
|
||||||
aiologger = "^0.6.1"
|
|
||||||
asyncpg = "^0.25.0"
|
|
||||||
aiomysql = "^0.0.22"
|
|
||||||
uvicorn = {extras = ["standart"], version = "^0.16.0"}
|
|
||||||
arq = "^0.22"
|
|
||||||
uvloop = "^0.16.0"
|
|
||||||
gunicorn = "^20.1.0"
|
|
||||||
sentry-sdk = "^1.5.10"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["poetry-core>=1.0.0"]
|
|
||||||
build-backend = "poetry.core.masonry.api"
|
|
||||||
|
|
||||||
[tool.black]
|
|
||||||
include = '\.pyi?$'
|
|
||||||
exclude = '''
|
|
||||||
/(
|
|
||||||
\.git
|
|
||||||
| \.vscode
|
|
||||||
| \venv
|
|
||||||
| alembic
|
|
||||||
)/
|
|
||||||
'''
|
|
||||||
|
|
||||||
[tool.flake8]
|
|
||||||
ignore = [
|
|
||||||
# Whitespace before ':' ( https://www.flake8rules.com/rules/E203.html )
|
|
||||||
"E203"
|
|
||||||
]
|
|
||||||
max-line-length=88
|
|
||||||
max-complexity = 15
|
|
||||||
select = "B,C,E,F,W,T4,B9"
|
|
||||||
exclude = [
|
|
||||||
# No need to traverse our git directory
|
|
||||||
".git",
|
|
||||||
# There's no value in checking cache directories
|
|
||||||
"__pycache__",
|
|
||||||
# The conf file is mostly autogenerated, ignore it
|
|
||||||
"src/app/alembic/*",
|
|
||||||
# The old directory contains Flake8 2.0
|
|
||||||
]
|
|
||||||
per-file-ignores = [
|
|
||||||
"src/app/services/updaters/fl_updater.py:E501",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.isort]
|
|
||||||
profile = "black"
|
|
||||||
only_sections = true
|
|
||||||
force_sort_within_sections = true
|
|
||||||
lines_after_imports = 2
|
|
||||||
lexicographical = true
|
|
||||||
sections = ["FUTURE", "STDLIB", "BASEFRAMEWORK", "FRAMEWORKEXT", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
|
||||||
known_baseframework = ["fastapi",]
|
|
||||||
known_frameworkext = ["starlette",]
|
|
||||||
src_paths = ["src"]
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
from fastapi import Security, HTTPException, status
|
|
||||||
|
|
||||||
from core.auth import default_security
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
async def check_token(api_key: str = Security(default_security)):
|
|
||||||
if api_key != env_config.API_KEY:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN, detail="Wrong api key!"
|
|
||||||
)
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class UpdaterTypes(Enum):
|
|
||||||
FL = "fl"
|
|
||||||
|
|
||||||
|
|
||||||
UPDATERS: dict[UpdaterTypes, str] = {UpdaterTypes.FL: "run_fl_update"}
|
|
||||||
@@ -1,865 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import aiomysql
|
|
||||||
from arq.connections import ArqRedis
|
|
||||||
from arq.worker import Retry
|
|
||||||
import asyncpg
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from app.services.updaters.utils.cmd import run_cmd
|
|
||||||
from app.services.updaters.utils.tasks import is_jobs_complete
|
|
||||||
from app.services.updaters.utils.text import remove_wrong_ch, fix_annotation_text
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
ch = logging.StreamHandler()
|
|
||||||
ch.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
logger.addHandler(ch)
|
|
||||||
|
|
||||||
|
|
||||||
class JobId(Enum):
|
|
||||||
import_libavtor = "import_libavtor"
|
|
||||||
import_libbook = "import_libbook"
|
|
||||||
import_libavtorname = "import_libavtorname"
|
|
||||||
import_libtranslator = "import_libtranslator"
|
|
||||||
import_libseqname = "import_libseqname"
|
|
||||||
import_libseq = "import_libseq"
|
|
||||||
import_libgenre = "import_libgenre"
|
|
||||||
import_libgenrelist = "import_libgenrelsit"
|
|
||||||
import_lib_b_annotations = "import_lib_b_annotations"
|
|
||||||
import_lib_b_annotations_pics = "import_lib_b_annotations_pics"
|
|
||||||
import_lib_a_annotations = "import_lib_a_annotations"
|
|
||||||
import_lib_a_annotations_pics = "import_lib_a_annotations_pics"
|
|
||||||
|
|
||||||
update_authors = "update_fl_authors"
|
|
||||||
update_books = "update_fl_books"
|
|
||||||
update_books_authors = "update_fl_books_authors"
|
|
||||||
update_translations = "update_fl_translations"
|
|
||||||
update_sequences = "update_fl_sequences"
|
|
||||||
update_sequences_info = "update_fl_sequences_info"
|
|
||||||
update_book_annotations = "update_fl_book_annotations"
|
|
||||||
update_book_annotations_pic = "update_fl_book_annotations_pic"
|
|
||||||
update_author_annotations = "update_fl_author_annotations"
|
|
||||||
update_author_annotations_pics = "update_fl_author_annotations_pics"
|
|
||||||
update_genres = "update_fl_genres"
|
|
||||||
update_books_genres = "update_fl_books_genres"
|
|
||||||
|
|
||||||
webhook = "update_fl_webhook"
|
|
||||||
|
|
||||||
|
|
||||||
async def import_fl_dump(ctx: dict, filename: str, *args, **kwargs):
|
|
||||||
stdout, stderr, return_code = await run_cmd(
|
|
||||||
f"wget -O - {env_config.FL_BASE_URL}/sql/{filename}.gz | gunzip | "
|
|
||||||
f"mysql -h {env_config.MYSQL_HOST} -u {env_config.MYSQL_USER} "
|
|
||||||
f'-p"{env_config.MYSQL_PASSWORD}" {env_config.MYSQL_DB_NAME}'
|
|
||||||
)
|
|
||||||
|
|
||||||
if return_code != 0:
|
|
||||||
raise InterruptedError(stdout, stderr)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_db_cons() -> tuple[asyncpg.Connection, aiomysql.Connection]:
|
|
||||||
postgres = await asyncpg.connect(
|
|
||||||
database=env_config.POSTGRES_DB_NAME,
|
|
||||||
host=env_config.POSTGRES_HOST,
|
|
||||||
port=env_config.POSTGRES_PORT,
|
|
||||||
user=env_config.POSTGRES_USER,
|
|
||||||
password=env_config.POSTGRES_PASSWORD,
|
|
||||||
)
|
|
||||||
|
|
||||||
mysql = await aiomysql.connect(
|
|
||||||
db=env_config.MYSQL_DB_NAME,
|
|
||||||
host=env_config.MYSQL_HOST,
|
|
||||||
port=env_config.MYSQL_PORT,
|
|
||||||
user=env_config.MYSQL_USER,
|
|
||||||
password=env_config.MYSQL_PASSWORD,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert postgres
|
|
||||||
|
|
||||||
return postgres, mysql
|
|
||||||
|
|
||||||
|
|
||||||
async def get_source(postgres: asyncpg.Connection) -> int:
|
|
||||||
source_row = await postgres.fetchrow(
|
|
||||||
"SELECT id FROM sources WHERE name = 'flibusta';"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not source_row:
|
|
||||||
await postgres.execute("INSERT INTO sources (name) VALUES ('flibusta');")
|
|
||||||
|
|
||||||
source_row = await postgres.fetchrow(
|
|
||||||
"SELECT id FROM sources WHERE name = 'flibusta';"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert source_row
|
|
||||||
|
|
||||||
return source_row["id"]
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_authors(ctx: dict, *args, prefix: Optional[str] = None, **kwargs):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool, [JobId.import_libavtorname.value], prefix=prefix
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
def prepare_author(row: list):
|
|
||||||
return [
|
|
||||||
source,
|
|
||||||
row[0],
|
|
||||||
remove_wrong_ch(row[1]),
|
|
||||||
remove_wrong_ch(row[2]),
|
|
||||||
remove_wrong_ch(row[3]),
|
|
||||||
]
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_author(
|
|
||||||
source_ smallint, remote_id_ int, first_name_ varchar, last_name_ varchar, middle_name_ varchar
|
|
||||||
) RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT * FROM authors WHERE source = source_ AND remote_id = remote_id_) THEN
|
|
||||||
UPDATE authors SET first_name = first_name_, last_name = last_name_, middle_name = middle_name_
|
|
||||||
WHERE source = source_ AND remote_id = remote_id_;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO authors (source, remote_id, first_name, last_name, middle_name)
|
|
||||||
VALUES (source_, remote_id_, first_name_, last_name_, middle_name_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute(
|
|
||||||
"SELECT AvtorId, FirstName, LastName, MiddleName FROM libavtorname;"
|
|
||||||
)
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_author($1, $2, cast($3 as varchar), cast($4 as varchar), cast($5 as varchar));",
|
|
||||||
(prepare_author(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_books(ctx: dict, *args, prefix: Optional[str] = None, **kwargs):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool, [JobId.import_libbook.value], prefix=prefix
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
replace_dict = {"ru-": "ru", "ru~": "ru"}
|
|
||||||
|
|
||||||
def fix_lang(lang: str) -> str:
|
|
||||||
lower_lang = lang.lower()
|
|
||||||
replaced_lang = replace_dict.get(lower_lang, lower_lang)
|
|
||||||
return replaced_lang
|
|
||||||
|
|
||||||
def prepare_book(row: list):
|
|
||||||
return [
|
|
||||||
source,
|
|
||||||
row[0],
|
|
||||||
remove_wrong_ch(row[1]),
|
|
||||||
fix_lang(row[2]),
|
|
||||||
row[3],
|
|
||||||
row[4],
|
|
||||||
row[5] == "1",
|
|
||||||
row[6],
|
|
||||||
]
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_book(
|
|
||||||
source_ smallint, remote_id_ int, title_ varchar, lang_ varchar,
|
|
||||||
file_type_ varchar, uploaded_ date, is_deleted_ boolean, pages_ int
|
|
||||||
) RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT * FROM books WHERE source = source_ AND remote_id = remote_id_) THEN
|
|
||||||
UPDATE books SET title = title_, lang = lang_, file_type = file_type_,
|
|
||||||
uploaded = uploaded_, is_deleted = is_deleted, pages = pages_
|
|
||||||
WHERE source = source_ AND remote_id = remote_id_;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO books (source, remote_id, title, lang, file_type, uploaded, is_deleted, pages)
|
|
||||||
VALUES (source_, remote_id_, title_, lang_, file_type_, uploaded_, is_deleted_, pages_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute(
|
|
||||||
"SELECT BookId, Title, Lang, FileType, Time, Deleted, Pages FROM libbook;"
|
|
||||||
)
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(1024):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_book($1, $2, cast($3 as varchar), cast($4 as varchar), cast($5 as varchar), $6, $7, $8);",
|
|
||||||
(prepare_book(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_books_authors(
|
|
||||||
ctx: dict, *arsg, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_libavtor.value,
|
|
||||||
JobId.update_authors.value,
|
|
||||||
JobId.update_books.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_book_author(source_ smallint, book_ integer, author_ integer) RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
book_id integer := -1;
|
|
||||||
author_id integer := -1;
|
|
||||||
BEGIN
|
|
||||||
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
|
||||||
SELECT id INTO author_id FROM authors WHERE source = source_ AND remote_id = author_;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT * FROM book_authors WHERE book = book_id AND author = author_id) THEN
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO book_authors (book, author) VALUES (book_id, author_id);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute("SELECT BookId, AvtorId FROM libavtor;")
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_book_author($1, $2, $3);",
|
|
||||||
((source, *row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_translations(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_libtranslator.value,
|
|
||||||
JobId.update_authors.value,
|
|
||||||
JobId.update_books.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_translation(source_ smallint, book_ integer, author_ integer, position_ smallint) RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
book_id integer := -1;
|
|
||||||
author_id integer := -1;
|
|
||||||
BEGIN
|
|
||||||
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
|
||||||
SELECT id INTO author_id FROM authors WHERE source = source_ AND remote_id = author_;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT * FROM translations WHERE book = book_id AND author = author_id) THEN
|
|
||||||
UPDATE translations SET position = position_
|
|
||||||
WHERE book = book_id AND author = author_id;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO translations (book, author, position) VALUES (book_id, author_id, position_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute(
|
|
||||||
"SELECT BookId, TranslatorId, Pos FROM libtranslator "
|
|
||||||
"WHERE BookId IN (SELECT BookId FROM libbook);"
|
|
||||||
)
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_translation($1, $2, $3, $4)",
|
|
||||||
((source, *row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_sequences(ctx: dict, *args, prefix: Optional[str] = None, **kwargs):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_libseqname.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
def prepare_sequence(row: list):
|
|
||||||
return [
|
|
||||||
source,
|
|
||||||
row[0],
|
|
||||||
remove_wrong_ch(row[1]),
|
|
||||||
]
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_sequences(source_ smallint, remote_id_ int, name_ varchar) RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT * FROM sequences WHERE source = source_ AND remote_id = remote_id_) THEN
|
|
||||||
UPDATE sequences SET name = name_ WHERE source = source_ AND remote_id = remote_id_;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO sequences (source, remote_id, name) VALUES (source_, remote_id_, name_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute("SELECT SeqId, SeqName FROM libseqname;")
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_sequences($1, $2, cast($3 as varchar));",
|
|
||||||
(prepare_sequence(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_sequences_info(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_libseq.value,
|
|
||||||
JobId.update_sequences.value,
|
|
||||||
JobId.update_books.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_book_sequence(source_ smallint, book_ integer, sequence_ integer, position_ smallint) RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
book_id integer := -1;
|
|
||||||
sequence_id integer := -1;
|
|
||||||
BEGIN
|
|
||||||
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
|
||||||
SELECT id INTO sequence_id FROM sequences WHERE source = source_ AND remote_id = sequence_;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT * FROM book_sequences WHERE book = book_id AND sequence = sequence_id) THEN
|
|
||||||
UPDATE book_sequences SET position = position_ WHERE book = book_id AND sequence = sequence_id;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO book_sequences (book, sequence, position) VALUES (book_id, sequence_id, position_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute(
|
|
||||||
"SELECT BookId, SeqId, level FROM libseq "
|
|
||||||
"WHERE "
|
|
||||||
"BookId IN (SELECT BookId FROM libbook) AND "
|
|
||||||
"SeqId IN (SELECT SeqId FROM libseqname);"
|
|
||||||
)
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_book_sequence($1, $2, $3, $4);",
|
|
||||||
([source, *row] for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_book_annotations(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_lib_b_annotations.value,
|
|
||||||
JobId.update_books.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_book_annotation(source_ smallint, book_ integer, title_ varchar, text_ text) RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
book_id integer := -1;
|
|
||||||
BEGIN
|
|
||||||
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT * FROM book_annotations WHERE book = book_id) THEN
|
|
||||||
UPDATE book_annotations SET title = title_, text = text_ WHERE book = book_id;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO book_annotations (book, title, text) VALUES (book_id, title_, text_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
def fix_annotation(row) -> list:
|
|
||||||
return [
|
|
||||||
source,
|
|
||||||
row[0],
|
|
||||||
row[1],
|
|
||||||
fix_annotation_text(row[2]),
|
|
||||||
]
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute(
|
|
||||||
"SELECT BookId, Title, Body FROM libbannotations "
|
|
||||||
"WHERE BookId IN (SELECT BookId FROM libbook);"
|
|
||||||
)
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_book_annotation($1, $2, cast($3 as varchar), cast($4 as text));",
|
|
||||||
(fix_annotation(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_book_annotations_pic(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_lib_b_annotations_pics.value,
|
|
||||||
JobId.update_book_annotations.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
def fix_link(row):
|
|
||||||
return [source, row[0], f"{env_config.FL_BASE_URL}/i/{row[1]}"]
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute("SELECT BookId, File FROM libbpics;")
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"UPDATE book_annotations "
|
|
||||||
"SET file = cast($3 as varchar) "
|
|
||||||
"FROM (SELECT id FROM books WHERE source = $1 AND remote_id = $2) as books "
|
|
||||||
"WHERE book = books.id;",
|
|
||||||
(fix_link(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_author_annotations(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_lib_a_annotations.value,
|
|
||||||
JobId.update_authors.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_author_annotation(source_ smallint, author_ integer, title_ varchar, text_ text) RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
author_id integer := -1;
|
|
||||||
BEGIN
|
|
||||||
SELECT id INTO author_id FROM authors WHERE source = source_ AND remote_id = author_;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT * FROM author_annotations WHERE author = author_id) THEN
|
|
||||||
UPDATE author_annotations SET title = title_, text = text_ WHERE author = author_id;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO author_annotations (author, title, text) VALUES (author_id, title_, text_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
def fix_annotation(row) -> list:
|
|
||||||
return [
|
|
||||||
source,
|
|
||||||
row[0],
|
|
||||||
row[1],
|
|
||||||
fix_annotation_text(row[2]),
|
|
||||||
]
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute("SELECT AvtorId, Title, Body FROM libaannotations;")
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_author_annotation($1, $2, cast($3 as varchar), cast($4 as text));",
|
|
||||||
(fix_annotation(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_author_annotations_pics(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
if not await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_lib_a_annotations_pics.value,
|
|
||||||
JobId.update_author_annotations.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
):
|
|
||||||
raise Retry(defer=60)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
def fix_link(row):
|
|
||||||
return [source, row[0], f"{env_config.FL_BASE_URL}/ia/{row[1]}"]
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute("SELECT AvtorId, File FROM libapics;")
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"UPDATE author_annotations "
|
|
||||||
"SET file = cast($3 as varchar) "
|
|
||||||
"FROM (SELECT id FROM authors WHERE source = $1 AND remote_id = $2) as authors "
|
|
||||||
"WHERE author = authors.id;",
|
|
||||||
(fix_link(row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_genres(ctx: dict, *args, prefix: Optional[str] = None, **kwargs):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_libgenrelist.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_genre(
|
|
||||||
source_ smallint, remote_id_ int, code_ varchar, description_ varchar, meta_ varchar
|
|
||||||
) RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT * FROM genres WHERE source = source_ AND remote_id = remote_id_) THEN
|
|
||||||
UPDATE genres SET code = code_, description = description_, meta = meta_
|
|
||||||
WHERE source = source_ AND remote_id = remote_id_;
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO authors (source, remote_id, code, description, meta)
|
|
||||||
VALUES (source_, remote_id_, code_, description_, meta_);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute(
|
|
||||||
"SELECT GenreId, GenreCode, GenreDesc, GenreMeta FROM libgenrelist;"
|
|
||||||
)
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_genre($1, $2, cast($3 as varchar), cast($4 as varchar), cast($5 as varchar));",
|
|
||||||
([source, *row] for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_books_genres(
|
|
||||||
ctx: dict, *args, prefix: Optional[str] = None, **kwargs
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool,
|
|
||||||
[
|
|
||||||
JobId.import_libgenre.value,
|
|
||||||
JobId.update_books.value,
|
|
||||||
JobId.update_genres.value,
|
|
||||||
],
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
postgres, mysql = await get_db_cons()
|
|
||||||
|
|
||||||
source = await get_source(postgres)
|
|
||||||
|
|
||||||
await postgres.execute(
|
|
||||||
"""
|
|
||||||
CREATE OR REPLACE FUNCTION update_book_sequence(source_ smallint, book_ integer, genre_ integer) RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
book_id integer := -1;
|
|
||||||
genre_id integer := -1;
|
|
||||||
BEGIN
|
|
||||||
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
|
||||||
SELECT id INTO genre_id FROM genres WHERE source = source_ AND remote_id = genre_;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT * FROM book_genres WHERE book = book_id AND genre = genre_id) THEN
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
INSERT INTO book_genres (book, genre) VALUES (book_id, genre_id);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async with mysql.cursor(aiomysql.SSCursor) as cursor:
|
|
||||||
await cursor.execute("SELECT BookId, GenreId FROM libgenre;")
|
|
||||||
|
|
||||||
while rows := await cursor.fetchmany(4096):
|
|
||||||
await postgres.executemany(
|
|
||||||
"SELECT update_book_sequence($1, $2, $3);",
|
|
||||||
((source, *row) for row in rows),
|
|
||||||
)
|
|
||||||
|
|
||||||
await postgres.close()
|
|
||||||
mysql.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def update_fl_webhook(
|
|
||||||
ctx: dict,
|
|
||||||
*args,
|
|
||||||
prefix: Optional[str] = None,
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
is_deps_complete, not_complete_count = await is_jobs_complete(
|
|
||||||
arq_pool, [e.value for e in JobId if e != JobId.webhook], prefix=prefix
|
|
||||||
)
|
|
||||||
|
|
||||||
if not is_deps_complete:
|
|
||||||
raise Retry(defer=45 * not_complete_count)
|
|
||||||
|
|
||||||
all_success = True
|
|
||||||
|
|
||||||
for webhook in env_config.WEBHOOKS:
|
|
||||||
async with httpx.AsyncClient() as client:
|
|
||||||
response: httpx.Response = await getattr(client, webhook.method)(
|
|
||||||
webhook.url, headers=webhook.headers
|
|
||||||
)
|
|
||||||
|
|
||||||
if response.status_code != 200:
|
|
||||||
all_success = False
|
|
||||||
|
|
||||||
return all_success
|
|
||||||
|
|
||||||
|
|
||||||
async def run_fl_update(ctx: dict, *args, **kwargs) -> bool:
|
|
||||||
IMPORTS = {
|
|
||||||
JobId.import_libbook: "lib.libbook.sql",
|
|
||||||
JobId.import_libavtor: "lib.libavtor.sql",
|
|
||||||
JobId.import_libavtorname: "lib.libavtorname.sql",
|
|
||||||
JobId.import_libtranslator: "lib.libtranslator.sql",
|
|
||||||
JobId.import_libseqname: "lib.libseqname.sql",
|
|
||||||
JobId.import_libseq: "lib.libseq.sql",
|
|
||||||
JobId.import_libgenre: "lib.libgenre.sql",
|
|
||||||
JobId.import_libgenrelist: "lib.libgenrelist.sql",
|
|
||||||
JobId.import_lib_b_annotations: "lib.b.annotations.sql",
|
|
||||||
JobId.import_lib_b_annotations_pics: "lib.b.annotations_pics.sql",
|
|
||||||
JobId.import_lib_a_annotations: "lib.a.annotations.sql",
|
|
||||||
JobId.import_lib_a_annotations_pics: "lib.a.annotations_pics.sql",
|
|
||||||
}
|
|
||||||
|
|
||||||
UPDATES = (
|
|
||||||
JobId.update_books,
|
|
||||||
JobId.update_book_annotations,
|
|
||||||
JobId.update_book_annotations_pic,
|
|
||||||
JobId.update_books_genres,
|
|
||||||
JobId.update_authors,
|
|
||||||
JobId.update_author_annotations,
|
|
||||||
JobId.update_author_annotations_pics,
|
|
||||||
JobId.update_books_authors,
|
|
||||||
JobId.update_translations,
|
|
||||||
JobId.update_sequences,
|
|
||||||
JobId.update_sequences_info,
|
|
||||||
JobId.update_genres,
|
|
||||||
JobId.webhook,
|
|
||||||
)
|
|
||||||
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
prefix = str(int(time.time()) // (5 * 60))
|
|
||||||
|
|
||||||
for job_id, filename in IMPORTS.items():
|
|
||||||
await arq_pool.enqueue_job(
|
|
||||||
"import_fl_dump", filename, _job_id=f"{prefix}_{job_id.value}"
|
|
||||||
)
|
|
||||||
|
|
||||||
for job_id in UPDATES:
|
|
||||||
await arq_pool.enqueue_job(
|
|
||||||
job_id.value, prefix=prefix, _job_id=f"{prefix}_{job_id.value}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
__tasks__ = [
|
|
||||||
run_fl_update,
|
|
||||||
import_fl_dump,
|
|
||||||
update_fl_authors,
|
|
||||||
update_fl_books,
|
|
||||||
update_fl_books_authors,
|
|
||||||
update_fl_translations,
|
|
||||||
update_fl_sequences,
|
|
||||||
update_fl_sequences_info,
|
|
||||||
update_fl_book_annotations,
|
|
||||||
update_fl_book_annotations_pic,
|
|
||||||
update_fl_author_annotations,
|
|
||||||
update_fl_author_annotations_pics,
|
|
||||||
update_fl_genres,
|
|
||||||
update_fl_books_genres,
|
|
||||||
update_fl_webhook,
|
|
||||||
]
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
async def run_cmd(cmd: str) -> tuple[bytes, bytes, Optional[int]]:
|
|
||||||
proc = await asyncio.create_subprocess_shell(
|
|
||||||
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
||||||
)
|
|
||||||
|
|
||||||
stdout, stderr = await proc.communicate()
|
|
||||||
return stdout, stderr, proc.returncode
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from arq.connections import ArqRedis
|
|
||||||
from arq.jobs import Job, JobStatus
|
|
||||||
|
|
||||||
|
|
||||||
async def is_jobs_complete(
|
|
||||||
arq_pool: ArqRedis, job_ids: list[str], prefix: Optional[str] = None
|
|
||||||
) -> tuple[bool, int]:
|
|
||||||
job_statuses = []
|
|
||||||
|
|
||||||
for job_id in job_ids:
|
|
||||||
_job_id = f"{prefix}_{job_id}" if prefix else job_id
|
|
||||||
status = await Job(
|
|
||||||
_job_id, arq_pool, arq_pool.default_queue_name, arq_pool.job_deserializer
|
|
||||||
).status()
|
|
||||||
job_statuses.append(status.value)
|
|
||||||
|
|
||||||
not_complete_count = 0
|
|
||||||
for status in (
|
|
||||||
JobStatus.not_found.value,
|
|
||||||
JobStatus.deferred.value,
|
|
||||||
JobStatus.in_progress.value,
|
|
||||||
JobStatus.queued.value,
|
|
||||||
):
|
|
||||||
if status in job_statuses:
|
|
||||||
not_complete_count += 1
|
|
||||||
|
|
||||||
return not_complete_count == 0, not_complete_count
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
def remove_wrong_ch(s: str) -> str:
|
|
||||||
return s.replace(";", "").replace("\n", " ").replace("ё", "е")
|
|
||||||
|
|
||||||
|
|
||||||
def remove_dots(s: str) -> str:
|
|
||||||
return s.replace(".", "")
|
|
||||||
|
|
||||||
|
|
||||||
tags_regexp = re.compile(r"<.*?>")
|
|
||||||
|
|
||||||
|
|
||||||
def fix_annotation_text(text: str) -> str:
|
|
||||||
replace_map = {
|
|
||||||
" ": "",
|
|
||||||
"[b]": "",
|
|
||||||
"[/b]": "",
|
|
||||||
"[hr]": "",
|
|
||||||
}
|
|
||||||
|
|
||||||
t = tags_regexp.sub("", text)
|
|
||||||
|
|
||||||
for key in replace_map:
|
|
||||||
t = t.replace(key, replace_map[key])
|
|
||||||
|
|
||||||
return t
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
from fastapi import APIRouter, Depends, Request
|
|
||||||
|
|
||||||
from arq.connections import ArqRedis
|
|
||||||
|
|
||||||
from app.depends import check_token
|
|
||||||
from app.services.updaters import UpdaterTypes, UPDATERS
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(tags=["updater"], dependencies=[Depends(check_token)])
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/update/{updater}")
|
|
||||||
async def update(request: Request, updater: UpdaterTypes):
|
|
||||||
arq_pool: ArqRedis = request.app.state.arq_pool
|
|
||||||
await arq_pool.enqueue_job(UPDATERS[updater])
|
|
||||||
|
|
||||||
return "Ok!"
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/healthcheck")
|
|
||||||
async def healthcheck():
|
|
||||||
return "Ok!"
|
|
||||||
57
src/config.rs
Normal file
57
src/config.rs
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::Map;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Clone)]
|
||||||
|
pub enum Method {
|
||||||
|
#[serde(rename = "get")]
|
||||||
|
Get,
|
||||||
|
#[serde(rename = "post")]
|
||||||
|
Post
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Clone)]
|
||||||
|
pub struct Webhook {
|
||||||
|
pub method: Method,
|
||||||
|
pub url: String,
|
||||||
|
pub headers: Map<String, serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Config {
|
||||||
|
pub sentry_dsn: String,
|
||||||
|
|
||||||
|
pub postgres_db_name: String,
|
||||||
|
pub postgres_host: String,
|
||||||
|
pub postgres_port: u16,
|
||||||
|
pub postgres_user: String,
|
||||||
|
pub postgres_password: String,
|
||||||
|
|
||||||
|
pub fl_base_url: String,
|
||||||
|
|
||||||
|
pub webhooks: Vec<Webhook>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_env(env: &'static str) -> String {
|
||||||
|
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
pub fn load() -> Config {
|
||||||
|
Config {
|
||||||
|
sentry_dsn: get_env("SENTRY_DSN"),
|
||||||
|
|
||||||
|
postgres_db_name: get_env("POSTGRES_DB_NAME"),
|
||||||
|
postgres_host: get_env("POSTGRES_HOST"),
|
||||||
|
postgres_port: get_env("POSTGRES_PORT").parse().unwrap(),
|
||||||
|
postgres_user: get_env("POSTGRES_USER"),
|
||||||
|
postgres_password: get_env("POSTGRES_PASSWORD"),
|
||||||
|
|
||||||
|
fl_base_url: get_env("FL_BASE_URL"),
|
||||||
|
|
||||||
|
webhooks: serde_json::from_str(&get_env("WEBHOOKS")).unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref CONFIG: Config = Config::load();
|
||||||
|
}
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
from fastapi import FastAPI
|
|
||||||
|
|
||||||
from app.views import router
|
|
||||||
from core.arq_pool import get_arq_pool
|
|
||||||
import core.sentry # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
def start_app() -> FastAPI:
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
app.include_router(router)
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
|
||||||
async def startup() -> None:
|
|
||||||
app.state.arq_pool = await get_arq_pool()
|
|
||||||
|
|
||||||
return app
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from arq.connections import create_pool, RedisSettings, ArqRedis
|
|
||||||
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
def get_redis_settings() -> RedisSettings:
|
|
||||||
return RedisSettings(
|
|
||||||
host=env_config.REDIS_HOST,
|
|
||||||
port=env_config.REDIS_PORT,
|
|
||||||
database=env_config.REDIS_DB,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_arq_pool() -> ArqRedis:
|
|
||||||
return await create_pool(get_redis_settings())
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from fastapi.security import APIKeyHeader
|
|
||||||
|
|
||||||
|
|
||||||
default_security = APIKeyHeader(name="Authorization")
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from typing import Union, Literal
|
|
||||||
|
|
||||||
from pydantic import BaseModel, BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class WebhookConfig(BaseModel):
|
|
||||||
method: Union[Literal["get"], Literal["post"]]
|
|
||||||
url: str
|
|
||||||
headers: dict[str, str]
|
|
||||||
|
|
||||||
|
|
||||||
class EnvConfig(BaseSettings):
|
|
||||||
API_KEY: str
|
|
||||||
|
|
||||||
POSTGRES_DB_NAME: str
|
|
||||||
POSTGRES_HOST: str
|
|
||||||
POSTGRES_PORT: int
|
|
||||||
POSTGRES_USER: str
|
|
||||||
POSTGRES_PASSWORD: str
|
|
||||||
|
|
||||||
MYSQL_DB_NAME: str
|
|
||||||
MYSQL_HOST: str
|
|
||||||
MYSQL_PORT: int
|
|
||||||
MYSQL_USER: str
|
|
||||||
MYSQL_PASSWORD: str
|
|
||||||
|
|
||||||
REDIS_HOST: str
|
|
||||||
REDIS_PORT: int
|
|
||||||
REDIS_DB: int
|
|
||||||
|
|
||||||
FL_BASE_URL: str
|
|
||||||
|
|
||||||
SENTRY_DSN: str
|
|
||||||
|
|
||||||
WEBHOOKS: list[WebhookConfig]
|
|
||||||
|
|
||||||
|
|
||||||
env_config = EnvConfig()
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import sentry_sdk
|
|
||||||
|
|
||||||
from core.config import env_config
|
|
||||||
|
|
||||||
|
|
||||||
sentry_sdk.init(
|
|
||||||
env_config.SENTRY_DSN,
|
|
||||||
)
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
from arq.connections import ArqRedis
|
|
||||||
|
|
||||||
from app.services.updaters.fl_updater import __tasks__ as fl_tasks
|
|
||||||
|
|
||||||
# from app.services.updaters.fl_updater import run_fl_update
|
|
||||||
from core.arq_pool import get_redis_settings, get_arq_pool
|
|
||||||
import core.sentry # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
# from arq.cron import cron
|
|
||||||
|
|
||||||
|
|
||||||
async def startup(ctx):
|
|
||||||
ctx["arq_pool"] = await get_arq_pool()
|
|
||||||
|
|
||||||
|
|
||||||
async def shutdown(ctx):
|
|
||||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
|
||||||
|
|
||||||
arq_pool.close()
|
|
||||||
await arq_pool.wait_closed()
|
|
||||||
|
|
||||||
|
|
||||||
class WorkerSettings:
|
|
||||||
functions = [*fl_tasks]
|
|
||||||
on_startup = startup
|
|
||||||
on_shutdown = shutdown
|
|
||||||
redis_settings = get_redis_settings()
|
|
||||||
max_jobs = 2
|
|
||||||
max_tries = 30
|
|
||||||
job_timeout = 10 * 60
|
|
||||||
# cron_jobs = [cron(run_fl_update, hour={5}, minute=0)]
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from core.app import start_app
|
|
||||||
|
|
||||||
|
|
||||||
app = start_app()
|
|
||||||
525
src/main.rs
Normal file
525
src/main.rs
Normal file
@@ -0,0 +1,525 @@
|
|||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
|
||||||
|
pub mod config;
|
||||||
|
pub mod types;
|
||||||
|
pub mod utils;
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
fmt::Debug,
|
||||||
|
sync::{Arc, Mutex}, str::FromStr
|
||||||
|
};
|
||||||
|
|
||||||
|
use config::Webhook;
|
||||||
|
use deadpool_postgres::{Config, CreatePoolError, ManagerConfig, Pool, RecyclingMethod, Runtime};
|
||||||
|
use futures::{io::copy, TryStreamExt};
|
||||||
|
use reqwest::header::{HeaderMap, HeaderValue, HeaderName};
|
||||||
|
use tokio::fs::{File, remove_file};
|
||||||
|
use tokio_cron_scheduler::{JobScheduler, Job, JobSchedulerError};
|
||||||
|
use tokio_postgres::NoTls;
|
||||||
|
|
||||||
|
use async_compression::futures::bufread::GzipDecoder;
|
||||||
|
|
||||||
|
use sql_parse::{
|
||||||
|
parse_statement, InsertReplace, InsertReplaceType, ParseOptions, SQLArguments, SQLDialect,
|
||||||
|
Statement,
|
||||||
|
};
|
||||||
|
use tokio_util::compat::TokioAsyncReadCompatExt;
|
||||||
|
use types::{
|
||||||
|
Author, AuthorAnnotation, AuthorAnnotationPic, BookAnnotation, BookAnnotationPic, BookAuthor,
|
||||||
|
BookGenre, FromVecExpression, Genre, Sequence, SequenceInfo, Translator, Update,
|
||||||
|
};
|
||||||
|
use utils::read_lines;
|
||||||
|
|
||||||
|
use crate::types::Book;
|
||||||
|
|
||||||
|
async fn download_file(filename_str: &str) -> Result<(), Box<dyn std::error::Error + Send>> {
|
||||||
|
log::info!("Download {filename_str}...");
|
||||||
|
|
||||||
|
let link = format!("{}/sql/{filename_str}.gz", &config::CONFIG.fl_base_url);
|
||||||
|
|
||||||
|
let response = match reqwest::get(link).await {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = match response.error_for_status() {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
match remove_file(filename_str).await {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(err) => log::debug!("Can't remove file: {:?}", err),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut file = match File::create(filename_str).await {
|
||||||
|
Ok(v) => v.compat(),
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("Can't create {filename_str}: {:?}", err);
|
||||||
|
return Err(Box::new(err))
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let data = response
|
||||||
|
.bytes_stream()
|
||||||
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
|
||||||
|
.into_async_read();
|
||||||
|
|
||||||
|
let decoder = GzipDecoder::new(data);
|
||||||
|
|
||||||
|
match copy(decoder, &mut file).await {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("Can't write data {filename_str}: {}", err);
|
||||||
|
return Err(Box::new(err))
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
log::info!("{filename_str} downloaded!");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn process<T>(
|
||||||
|
pool: Pool,
|
||||||
|
source_id: i16,
|
||||||
|
file_name: &str,
|
||||||
|
deps: Vec<Arc<Mutex<Option<UpdateStatus>>>>,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send>>
|
||||||
|
where
|
||||||
|
T: Debug + FromVecExpression<T> + Update,
|
||||||
|
{
|
||||||
|
if deps.len() != 0 {
|
||||||
|
loop {
|
||||||
|
let mut some_failed = false;
|
||||||
|
let mut some_none = false;
|
||||||
|
|
||||||
|
for dep in deps.iter() {
|
||||||
|
let status = dep.lock().unwrap();
|
||||||
|
match &*status {
|
||||||
|
Some(status) => match status {
|
||||||
|
UpdateStatus::Success => (),
|
||||||
|
UpdateStatus::Fail => some_failed = true,
|
||||||
|
},
|
||||||
|
None => some_none = true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !some_failed && !some_none {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match download_file(file_name).await {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(err) => return Err(err),
|
||||||
|
};
|
||||||
|
|
||||||
|
let parse_options = ParseOptions::new()
|
||||||
|
.dialect(SQLDialect::MariaDB)
|
||||||
|
.arguments(SQLArguments::QuestionMark)
|
||||||
|
.warn_unquoted_identifiers(true);
|
||||||
|
|
||||||
|
let lines = read_lines(file_name);
|
||||||
|
|
||||||
|
let lines = match lines {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
match T::before_update(&pool.get().await.unwrap()).await {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(err) => return Err(err),
|
||||||
|
};
|
||||||
|
|
||||||
|
log::info!("Start update {file_name}...");
|
||||||
|
|
||||||
|
for line in lines.into_iter() {
|
||||||
|
let line = match line {
|
||||||
|
Ok(line) => line,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut issues = Vec::new();
|
||||||
|
let ast = parse_statement(&line, &mut issues, &parse_options);
|
||||||
|
|
||||||
|
match ast {
|
||||||
|
Some(Statement::InsertReplace(
|
||||||
|
i @ InsertReplace {
|
||||||
|
type_: InsertReplaceType::Insert(_),
|
||||||
|
..
|
||||||
|
},
|
||||||
|
)) => {
|
||||||
|
for value in i.values.into_iter() {
|
||||||
|
for t_value in value.1.into_iter() {
|
||||||
|
let value = T::from_vec_expression(&t_value);
|
||||||
|
let client = pool.get().await.unwrap();
|
||||||
|
|
||||||
|
match value.update(&client, source_id).await {
|
||||||
|
Ok(_) => {
|
||||||
|
// log::info!("{:?}", value);
|
||||||
|
()
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("Update error: {:?} : {:?}", value, err);
|
||||||
|
return Err(err)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log::info!("Updated {file_name}...");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_postgres_pool() -> Result<Pool, CreatePoolError> {
|
||||||
|
let mut config = Config::new();
|
||||||
|
|
||||||
|
config.host = Some(config::CONFIG.postgres_host.clone());
|
||||||
|
config.port = Some(config::CONFIG.postgres_port);
|
||||||
|
config.dbname = Some(config::CONFIG.postgres_db_name.clone());
|
||||||
|
config.user = Some(config::CONFIG.postgres_user.clone());
|
||||||
|
config.password = Some(config::CONFIG.postgres_password.clone());
|
||||||
|
config.connect_timeout = Some(std::time::Duration::from_secs(5));
|
||||||
|
config.manager = Some(ManagerConfig {
|
||||||
|
recycling_method: RecyclingMethod::Verified,
|
||||||
|
});
|
||||||
|
|
||||||
|
match config.create_pool(Some(Runtime::Tokio1), NoTls) {
|
||||||
|
Ok(pool) => Ok(pool),
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_source(pool: Pool) -> Result<i16, Box<dyn std::error::Error>> {
|
||||||
|
let client = pool.get().await.unwrap();
|
||||||
|
|
||||||
|
let row = match client
|
||||||
|
.query_one("SELECT id FROM sources WHERE name = 'flibusta';", &[])
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let id = row.get(0);
|
||||||
|
|
||||||
|
Ok(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
enum UpdateStatus {
|
||||||
|
Success,
|
||||||
|
Fail,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_webhooks() -> Result<(), Box<reqwest::Error>> {
|
||||||
|
for webhook in config::CONFIG.webhooks.clone().into_iter() {
|
||||||
|
let Webhook { method, url, headers } = webhook;
|
||||||
|
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let builder = match method {
|
||||||
|
config::Method::Get => {
|
||||||
|
client.get(url)
|
||||||
|
},
|
||||||
|
config::Method::Post => {
|
||||||
|
client.post(url)
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let t_headers: Vec<(HeaderName, HeaderValue)> = headers.into_iter().map(|(key, val)| {
|
||||||
|
let value = match val {
|
||||||
|
serde_json::Value::String(v) => v,
|
||||||
|
_ => panic!("Header value not string!")
|
||||||
|
};
|
||||||
|
|
||||||
|
(
|
||||||
|
HeaderName::from_str(key.as_ref()).unwrap(),
|
||||||
|
HeaderValue::from_str(&value).unwrap()
|
||||||
|
)
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let headers = HeaderMap::from_iter(t_headers.into_iter());
|
||||||
|
|
||||||
|
let response = builder.headers(headers).send().await;
|
||||||
|
|
||||||
|
let response = match response {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
match response.error_for_status() {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
log::info!("Start update...");
|
||||||
|
|
||||||
|
let pool = match get_postgres_pool().await {
|
||||||
|
Ok(pool) => pool,
|
||||||
|
Err(err) => panic!("{:?}", err),
|
||||||
|
};
|
||||||
|
|
||||||
|
let source_id = match get_source(pool.clone()).await {
|
||||||
|
Ok(v) => Arc::new(v),
|
||||||
|
Err(err) => panic!("{:?}", err),
|
||||||
|
};
|
||||||
|
|
||||||
|
let author_status: Arc<Mutex<Option<UpdateStatus>>> = Arc::new(Mutex::new(None));
|
||||||
|
let book_status: Arc<Mutex<Option<UpdateStatus>>> = Arc::new(Mutex::new(None));
|
||||||
|
let sequence_status: Arc<Mutex<Option<UpdateStatus>>> = Arc::new(Mutex::new(None));
|
||||||
|
let book_annotation_status: Arc<Mutex<Option<UpdateStatus>>> = Arc::new(Mutex::new(None));
|
||||||
|
let author_annotation_status: Arc<Mutex<Option<UpdateStatus>>> = Arc::new(Mutex::new(None));
|
||||||
|
let genre_status: Arc<Mutex<Option<UpdateStatus>>> = Arc::new(Mutex::new(None));
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let author_status_clone = author_status.clone();
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let author_process = tokio::spawn(async move {
|
||||||
|
match process::<Author>(pool_clone, *source_id_clone, "lib.libavtorname.sql", vec![]).await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
let mut status = author_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let mut status = author_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let book_status_clone = book_status.clone();
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let book_process = tokio::spawn(async move {
|
||||||
|
match process::<Book>(pool_clone, *source_id_clone, "lib.libbook.sql", vec![]).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let mut status = book_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let mut status = book_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Fail);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![author_status.clone(), book_status.clone()];
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let book_author_process = tokio::spawn(async move {
|
||||||
|
process::<BookAuthor>(pool_clone, *source_id_clone, "lib.libavtor.sql", deps).await
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![author_status.clone(), book_status.clone()];
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let translator_process = tokio::spawn(async move {
|
||||||
|
process::<Translator>(pool_clone, *source_id_clone, "lib.libtranslator.sql", deps).await
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let sequence_status_clone = sequence_status.clone();
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let sequence_process = tokio::spawn(async move {
|
||||||
|
match process::<Sequence>(pool_clone, *source_id_clone, "lib.libseqname.sql", vec![]).await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
let mut status = sequence_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let mut status = sequence_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Fail);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![book_status.clone(), sequence_status.clone()];
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let sequence_info_process = tokio::spawn(async move {
|
||||||
|
process::<SequenceInfo>(pool_clone, *source_id_clone, "lib.libseq.sql", deps).await
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![book_status.clone()];
|
||||||
|
let book_annotation_status_clone = book_annotation_status.clone();
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let book_annotation_process = tokio::spawn(async move {
|
||||||
|
match process::<BookAnnotation>(pool_clone, *source_id_clone, "lib.b.annotations.sql", deps)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
let mut status = book_annotation_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let mut status = book_annotation_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Fail);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![book_annotation_status.clone()];
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let book_annotation_pics_process = tokio::spawn(async move {
|
||||||
|
process::<BookAnnotationPic>(
|
||||||
|
pool_clone,
|
||||||
|
*source_id_clone,
|
||||||
|
"lib.b.annotations_pics.sql",
|
||||||
|
deps,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![author_status.clone()];
|
||||||
|
let author_annotation_status_clone = author_annotation_status.clone();
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let author_annotation_process = tokio::spawn(async move {
|
||||||
|
match process::<AuthorAnnotation>(
|
||||||
|
pool_clone,
|
||||||
|
*source_id_clone,
|
||||||
|
"lib.a.annotations.sql",
|
||||||
|
deps,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
let mut status = author_annotation_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let mut status = author_annotation_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Fail);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![author_annotation_status.clone()];
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let author_annotation_pics_process = tokio::spawn(async move {
|
||||||
|
process::<AuthorAnnotationPic>(
|
||||||
|
pool_clone,
|
||||||
|
*source_id_clone,
|
||||||
|
"lib.a.annotations_pics.sql",
|
||||||
|
deps,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let genre_status_clone = genre_status.clone();
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let genre_annotation_process = tokio::spawn(async move {
|
||||||
|
match process::<Genre>(pool_clone, *source_id_clone, "lib.libgenrelist.sql", vec![]).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let mut status = genre_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Success);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let mut status = genre_status_clone.lock().unwrap();
|
||||||
|
*status = Some(UpdateStatus::Fail);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let deps = vec![genre_status.clone(), book_status.clone()];
|
||||||
|
let source_id_clone = source_id.clone();
|
||||||
|
let book_genre_process = tokio::spawn(async move {
|
||||||
|
process::<BookGenre>(pool_clone, *source_id_clone, "lib.libgenre.sql", deps).await
|
||||||
|
});
|
||||||
|
|
||||||
|
for process in [
|
||||||
|
author_process,
|
||||||
|
book_process,
|
||||||
|
book_author_process,
|
||||||
|
translator_process,
|
||||||
|
sequence_process,
|
||||||
|
sequence_info_process,
|
||||||
|
book_annotation_process,
|
||||||
|
book_annotation_pics_process,
|
||||||
|
author_annotation_process,
|
||||||
|
author_annotation_pics_process,
|
||||||
|
genre_annotation_process,
|
||||||
|
book_genre_process
|
||||||
|
] {
|
||||||
|
let process_result = match process.await {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => return Err(Box::new(err)),
|
||||||
|
};
|
||||||
|
|
||||||
|
match process_result {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(err) => panic!("{:?}", err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match send_webhooks().await {
|
||||||
|
Ok(_) => {
|
||||||
|
log::info!("Webhooks sended!");
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
|
log::info!("Webhooks send failed : {err}");
|
||||||
|
return Err(Box::new(err))
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), JobSchedulerError> {
|
||||||
|
let _guard = sentry::init(config::CONFIG.sentry_dsn.clone());
|
||||||
|
env_logger::init();
|
||||||
|
|
||||||
|
let job_scheduler = JobScheduler::new().await.unwrap();
|
||||||
|
|
||||||
|
let update_job = match Job::new_async("* 0 5 * * *", |_uuid, _l| Box::pin(async {
|
||||||
|
match update().await {
|
||||||
|
Ok(_) => log::info!("Updated"),
|
||||||
|
Err(err) => log::info!("Update err: {:?}", err),
|
||||||
|
};
|
||||||
|
})) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(err) => panic!("{:?}", err),
|
||||||
|
};
|
||||||
|
|
||||||
|
job_scheduler.add(update_job).await.unwrap();
|
||||||
|
|
||||||
|
match job_scheduler.start().await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
864
src/types.rs
Normal file
864
src/types.rs
Normal file
@@ -0,0 +1,864 @@
|
|||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use chrono::{NaiveDate, NaiveDateTime};
|
||||||
|
use sql_parse::Expression;
|
||||||
|
use tokio_postgres::Client;
|
||||||
|
|
||||||
|
use crate::utils::{fix_annotation_text, parse_lang, remove_wrong_chars};
|
||||||
|
|
||||||
|
pub trait FromVecExpression<T> {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait Update {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>>;
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Author {
|
||||||
|
pub id: u64,
|
||||||
|
pub last_name: String,
|
||||||
|
pub first_name: String,
|
||||||
|
pub middle_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<Author> for Author {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> Author {
|
||||||
|
Author {
|
||||||
|
id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Author.id"),
|
||||||
|
},
|
||||||
|
last_name: match &value[3] {
|
||||||
|
sql_parse::Expression::String(v) => remove_wrong_chars(&v.value),
|
||||||
|
_ => panic!("Author.last_name"),
|
||||||
|
},
|
||||||
|
first_name: match &value[1] {
|
||||||
|
sql_parse::Expression::String(v) => remove_wrong_chars(&v.value),
|
||||||
|
_ => panic!("Author.first_name"),
|
||||||
|
},
|
||||||
|
middle_name: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => remove_wrong_chars(&v.value),
|
||||||
|
_ => panic!("Author.middle_name"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for Author {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_author(
|
||||||
|
source_ smallint, remote_id_ int, first_name_ varchar, last_name_ varchar, middle_name_ varchar
|
||||||
|
) RETURNS void AS $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT * FROM authors WHERE source = source_ AND remote_id = remote_id_) THEN
|
||||||
|
UPDATE authors SET first_name = first_name_, last_name = last_name_, middle_name = middle_name_
|
||||||
|
WHERE source = source_ AND remote_id = remote_id_;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO authors (source, remote_id, first_name, last_name, middle_name)
|
||||||
|
VALUES (source_, remote_id_, first_name_, last_name_, middle_name_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"SELECT update_author($1, $2, cast($3 as varchar), cast($4 as varchar), cast($5 as varchar));",
|
||||||
|
&[&source_id, &(self.id as i32), &self.first_name, &self.last_name, &self.middle_name]
|
||||||
|
).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Book {
|
||||||
|
pub id: u64,
|
||||||
|
pub title: String,
|
||||||
|
pub lang: String,
|
||||||
|
pub file_type: String,
|
||||||
|
pub uploaded: NaiveDate,
|
||||||
|
pub is_deleted: bool,
|
||||||
|
pub pages: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<Book> for Book {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> Book {
|
||||||
|
Book {
|
||||||
|
id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Book.id"),
|
||||||
|
},
|
||||||
|
title: match &value[3] {
|
||||||
|
sql_parse::Expression::String(v) => remove_wrong_chars(&v.value),
|
||||||
|
_ => panic!("Book.title"),
|
||||||
|
},
|
||||||
|
lang: match &value[5] {
|
||||||
|
sql_parse::Expression::String(v) => parse_lang(&v.value),
|
||||||
|
_ => panic!("Book.lang"),
|
||||||
|
},
|
||||||
|
file_type: match &value[8] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("Book.file_type"),
|
||||||
|
},
|
||||||
|
uploaded: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => {
|
||||||
|
NaiveDateTime::parse_from_str(&v.value.to_string(), "%Y-%m-%d %H:%M:%S")
|
||||||
|
.unwrap()
|
||||||
|
.date()
|
||||||
|
}
|
||||||
|
_ => panic!("Book.uploaded"),
|
||||||
|
},
|
||||||
|
is_deleted: match &value[11] {
|
||||||
|
sql_parse::Expression::String(v) => v.value == "1",
|
||||||
|
_ => panic!("Book.is_deleted"),
|
||||||
|
},
|
||||||
|
pages: match &value[20] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Book.id"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for Book {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_book(
|
||||||
|
source_ smallint, remote_id_ int, title_ varchar, lang_ varchar,
|
||||||
|
file_type_ varchar, uploaded_ date, is_deleted_ boolean, pages_ int
|
||||||
|
) RETURNS void AS $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT * FROM books WHERE source = source_ AND remote_id = remote_id_) THEN
|
||||||
|
UPDATE books SET title = title_, lang = lang_, file_type = file_type_,
|
||||||
|
uploaded = uploaded_, is_deleted = is_deleted, pages = pages_
|
||||||
|
WHERE source = source_ AND remote_id = remote_id_;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO books (source, remote_id, title, lang, file_type, uploaded, is_deleted, pages)
|
||||||
|
VALUES (source_, remote_id_, title_, lang_, file_type_, uploaded_, is_deleted_, pages_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"SELECT update_book($1, $2, cast($3 as varchar), cast($4 as varchar), cast($5 as varchar), $6, $7, $8);",
|
||||||
|
&[&source_id, &(self.id as i32), &self.title, &self.lang, &self.file_type, &self.uploaded, &self.is_deleted, &(self.pages as i32)]
|
||||||
|
).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BookAuthor {
|
||||||
|
pub book_id: u64,
|
||||||
|
pub author_id: u64,
|
||||||
|
// TODO: position
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<BookAuthor> for BookAuthor {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> BookAuthor {
|
||||||
|
BookAuthor {
|
||||||
|
book_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("BookAuthor.book_id"),
|
||||||
|
},
|
||||||
|
author_id: match &value[1] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("BookAuthor.author_id"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for BookAuthor {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_book_author(source_ smallint, book_ integer, author_ integer) RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
book_id integer := -1;
|
||||||
|
author_id integer := -1;
|
||||||
|
BEGIN
|
||||||
|
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
||||||
|
SELECT id INTO author_id FROM authors WHERE source = source_ AND remote_id = author_;
|
||||||
|
IF EXISTS (SELECT * FROM book_authors WHERE book = book_id AND author = author_id) THEN
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO book_authors (book, author) VALUES (book_id, author_id);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_book_author($1, $2, $3);",
|
||||||
|
&[&source_id, &(self.book_id as i32), &(self.author_id as i32)],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Translator {
|
||||||
|
pub book_id: u64,
|
||||||
|
pub author_id: u64,
|
||||||
|
pub position: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<Translator> for Translator {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> Translator {
|
||||||
|
Translator {
|
||||||
|
book_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Translator.book_id"),
|
||||||
|
},
|
||||||
|
author_id: match &value[1] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Translator.author_id"),
|
||||||
|
},
|
||||||
|
position: match &value[2] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Translator.pos"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for Translator {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_translation(source_ smallint, book_ integer, author_ integer, position_ smallint) RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
book_id integer := -1;
|
||||||
|
author_id integer := -1;
|
||||||
|
BEGIN
|
||||||
|
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
||||||
|
SELECT id INTO author_id FROM authors WHERE source = source_ AND remote_id = author_;
|
||||||
|
IF EXISTS (SELECT * FROM translations WHERE book = book_id AND author = author_id) THEN
|
||||||
|
UPDATE translations SET position = position_
|
||||||
|
WHERE book = book_id AND author = author_id;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO translations (book, author, position) VALUES (book_id, author_id, position_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_translation($1, $2, $3, $4);",
|
||||||
|
&[
|
||||||
|
&source_id,
|
||||||
|
&(self.book_id as i32),
|
||||||
|
&(self.author_id as i32),
|
||||||
|
&(self.position as i16),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Sequence {
|
||||||
|
pub id: u64,
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<Sequence> for Sequence {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> Sequence {
|
||||||
|
Sequence {
|
||||||
|
id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Sequence.id"),
|
||||||
|
},
|
||||||
|
name: match &value[1] {
|
||||||
|
sql_parse::Expression::String(v) => remove_wrong_chars(&v.value),
|
||||||
|
_ => panic!("Sequence.name"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for Sequence {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_sequences(source_ smallint, remote_id_ int, name_ varchar) RETURNS void AS $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT * FROM sequences WHERE source = source_ AND remote_id = remote_id_) THEN
|
||||||
|
UPDATE sequences SET name = name_ WHERE source = source_ AND remote_id = remote_id_;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO sequences (source, remote_id, name) VALUES (source_, remote_id_, name_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_sequences($1, $2, cast($3 as varchar));",
|
||||||
|
&[&source_id, &(self.id as i32), &self.name],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SequenceInfo {
|
||||||
|
pub book_id: u64,
|
||||||
|
pub sequence_id: u64,
|
||||||
|
pub position: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<SequenceInfo> for SequenceInfo {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> SequenceInfo {
|
||||||
|
SequenceInfo {
|
||||||
|
book_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("SequenceInfo.book_id"),
|
||||||
|
},
|
||||||
|
sequence_id: match &value[1] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("SequenceInfo.sequence_id"),
|
||||||
|
},
|
||||||
|
position: match &value[2] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0.try_into().unwrap(),
|
||||||
|
sql_parse::Expression::Unary {
|
||||||
|
op,
|
||||||
|
op_span: _,
|
||||||
|
operand,
|
||||||
|
} => match (op, operand.as_ref()) {
|
||||||
|
(sql_parse::UnaryOperator::Minus, Expression::Integer(v)) => {
|
||||||
|
let value: i64 = (v.0).try_into().unwrap();
|
||||||
|
-value
|
||||||
|
}
|
||||||
|
(_, _) => panic!("SequenceInfo.position = {:?}", &value[2]),
|
||||||
|
},
|
||||||
|
_ => panic!("SequenceInfo.position = {:?}", &value[2]),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for SequenceInfo {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_book_sequence(source_ smallint, book_ integer, sequence_ integer, position_ smallint) RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
book_id integer := -1;
|
||||||
|
sequence_id integer := -1;
|
||||||
|
BEGIN
|
||||||
|
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
||||||
|
SELECT id INTO sequence_id FROM sequences WHERE source = source_ AND remote_id = sequence_;
|
||||||
|
IF EXISTS (SELECT * FROM book_sequences WHERE book = book_id AND sequence = sequence_id) THEN
|
||||||
|
UPDATE book_sequences SET position = position_ WHERE book = book_id AND sequence = sequence_id;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO book_sequences (book, sequence, position) VALUES (book_id, sequence_id, position_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_book_sequence($1, $2, $3, $4);",
|
||||||
|
&[
|
||||||
|
&source_id,
|
||||||
|
&(self.book_id as i32),
|
||||||
|
&(self.sequence_id as i32),
|
||||||
|
&(self.position as i16),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BookAnnotation {
|
||||||
|
pub book_id: u64,
|
||||||
|
pub title: String,
|
||||||
|
pub body: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<BookAnnotation> for BookAnnotation {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> BookAnnotation {
|
||||||
|
BookAnnotation {
|
||||||
|
book_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("BookAnnotation.book_id"),
|
||||||
|
},
|
||||||
|
title: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("BookAnnotation.title"),
|
||||||
|
},
|
||||||
|
body: match &value[3] {
|
||||||
|
sql_parse::Expression::String(v) => Some(fix_annotation_text(&v.value)),
|
||||||
|
sql_parse::Expression::Null(_) => None,
|
||||||
|
_ => panic!("BookAnnotation.body"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for BookAnnotation {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_book_annotation(source_ smallint, book_ integer, title_ varchar, text_ text) RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
book_id integer := -1;
|
||||||
|
BEGIN
|
||||||
|
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
||||||
|
IF EXISTS (SELECT * FROM book_annotations WHERE book = book_id) THEN
|
||||||
|
UPDATE book_annotations SET title = title_, text = text_ WHERE book = book_id;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF book_id IS NULL THEN
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
INSERT INTO book_annotations (book, title, text) VALUES (book_id, title_, text_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_book_annotation($1, $2, cast($3 as varchar), cast($4 as text));",
|
||||||
|
&[&source_id, &(self.book_id as i32), &self.title, &self.body],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BookAnnotationPic {
|
||||||
|
pub book_id: u64,
|
||||||
|
pub file: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<BookAnnotationPic> for BookAnnotationPic {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> BookAnnotationPic {
|
||||||
|
BookAnnotationPic {
|
||||||
|
book_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("BookAnnotationPic.book_id"),
|
||||||
|
},
|
||||||
|
file: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("BookAnnotationPic.file"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for BookAnnotationPic {
|
||||||
|
async fn before_update(
|
||||||
|
_client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"\
|
||||||
|
UPDATE book_annotations \
|
||||||
|
SET file = cast($3 as varchar) \
|
||||||
|
FROM (SELECT id FROM books WHERE source = $1 AND remote_id = $2) as books \
|
||||||
|
WHERE book = books.id;\
|
||||||
|
",
|
||||||
|
&[&source_id, &(self.book_id as i32), &self.file],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AuthorAnnotation {
|
||||||
|
pub author_id: u64,
|
||||||
|
pub title: String,
|
||||||
|
pub body: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<AuthorAnnotation> for AuthorAnnotation {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> AuthorAnnotation {
|
||||||
|
AuthorAnnotation {
|
||||||
|
author_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("AuthorAnnotation.author_id"),
|
||||||
|
},
|
||||||
|
title: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("AuthorAnnotation.title"),
|
||||||
|
},
|
||||||
|
body: match &value[3] {
|
||||||
|
sql_parse::Expression::String(v) => Some(fix_annotation_text(&v.value)),
|
||||||
|
sql_parse::Expression::Null(_) => None,
|
||||||
|
_ => panic!("AuthorAnnotation.body"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for AuthorAnnotation {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_author_annotation(source_ smallint, author_ integer, title_ varchar, text_ text) RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
author_id integer := -1;
|
||||||
|
BEGIN
|
||||||
|
SELECT id INTO author_id FROM authors WHERE source = source_ AND remote_id = author_;
|
||||||
|
IF EXISTS (SELECT * FROM author_annotations WHERE author = author_id) THEN
|
||||||
|
UPDATE author_annotations SET title = title_, text = text_ WHERE author = author_id;
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO author_annotations (author, title, text) VALUES (author_id, title_, text_);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_author_annotation($1, $2, cast($3 as varchar), cast($4 as text));",
|
||||||
|
&[
|
||||||
|
&source_id,
|
||||||
|
&(self.author_id as i32),
|
||||||
|
&self.title,
|
||||||
|
&self.body,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AuthorAnnotationPic {
|
||||||
|
pub author_id: u64,
|
||||||
|
pub file: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<AuthorAnnotationPic> for AuthorAnnotationPic {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> AuthorAnnotationPic {
|
||||||
|
AuthorAnnotationPic {
|
||||||
|
author_id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("AuthorAnnotationPic.book_id"),
|
||||||
|
},
|
||||||
|
file: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("AuthorAnnotationPic.file"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for AuthorAnnotationPic {
|
||||||
|
async fn before_update(
|
||||||
|
_client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"\
|
||||||
|
UPDATE author_annotations \
|
||||||
|
SET file = cast($3 as varchar) \
|
||||||
|
FROM (SELECT id FROM authors WHERE source = $1 AND remote_id = $2) as authors \
|
||||||
|
WHERE author = authors.id;",
|
||||||
|
&[&source_id, &(self.author_id as i32), &self.file],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Genre {
|
||||||
|
pub id: u64,
|
||||||
|
pub code: String,
|
||||||
|
pub description: String,
|
||||||
|
pub meta: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<Genre> for Genre {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> Genre {
|
||||||
|
Genre {
|
||||||
|
id: match &value[0] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("Genre.id"),
|
||||||
|
},
|
||||||
|
code: match &value[1] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("Genre.code = {:?}", &value[1]),
|
||||||
|
},
|
||||||
|
description: match &value[2] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("Genre.description = {:?}", &value[2]),
|
||||||
|
},
|
||||||
|
meta: match &value[3] {
|
||||||
|
sql_parse::Expression::String(v) => v.value.to_string(),
|
||||||
|
_ => panic!("Genre.meta"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for Genre {
|
||||||
|
async fn before_update(
|
||||||
|
client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client.execute(
|
||||||
|
"
|
||||||
|
CREATE OR REPLACE FUNCTION update_book_sequence(source_ smallint, book_ integer, genre_ integer) RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
book_id integer := -1;
|
||||||
|
genre_id integer := -1;
|
||||||
|
BEGIN
|
||||||
|
SELECT id INTO book_id FROM books WHERE source = source_ AND remote_id = book_;
|
||||||
|
SELECT id INTO genre_id FROM genres WHERE source = source_ AND remote_id = genre_;
|
||||||
|
IF EXISTS (SELECT * FROM book_genres WHERE book = book_id AND genre = genre_id) THEN
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
INSERT INTO book_genres (book, genre) VALUES (book_id, genre_id);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"
|
||||||
|
, &[]).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_genre($1, $2, cast($3 as varchar), cast($4 as varchar), cast($5 as varchar));",
|
||||||
|
&[&source_id, &(self.id as i32), &self.code, &self.description, &self.meta]
|
||||||
|
).await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BookGenre {
|
||||||
|
pub book_id: u64,
|
||||||
|
pub genre_id: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromVecExpression<BookGenre> for BookGenre {
|
||||||
|
fn from_vec_expression(value: &Vec<Expression>) -> BookGenre {
|
||||||
|
BookGenre {
|
||||||
|
book_id: match &value[1] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("BookGenre.book_id"),
|
||||||
|
},
|
||||||
|
genre_id: match &value[2] {
|
||||||
|
sql_parse::Expression::Integer(v) => v.0,
|
||||||
|
_ => panic!("BookGenre.genre_id"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Update for BookGenre {
|
||||||
|
async fn before_update(
|
||||||
|
_client: &Client
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update(
|
||||||
|
&self,
|
||||||
|
client: &Client,
|
||||||
|
source_id: i16,
|
||||||
|
) -> Result<(), Box<tokio_postgres::Error>> {
|
||||||
|
match client
|
||||||
|
.execute(
|
||||||
|
"SELECT update_book_sequence($1, $2, $3);",
|
||||||
|
&[&source_id, &(self.book_id as i32), &(self.genre_id as i32)],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => Err(Box::new(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
26
src/utils.rs
Normal file
26
src/utils.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use std::fs::File;
|
||||||
|
use std::io::{self, BufRead};
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
pub fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
|
||||||
|
where
|
||||||
|
P: AsRef<Path>,
|
||||||
|
{
|
||||||
|
let file = File::open(filename)?;
|
||||||
|
Ok(io::BufReader::new(file).lines())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_wrong_chars(s: &str) -> String {
|
||||||
|
s.replace(';', "").replace('\n', " ").replace('ё', "е")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_lang(s: &str) -> String {
|
||||||
|
s.replace('-', "").replace('~', "").to_lowercase()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fix_annotation_text(text: &str) -> String {
|
||||||
|
text.replace(" ", "")
|
||||||
|
.replace("[b]", "")
|
||||||
|
.replace("[/b]", "")
|
||||||
|
.replace("[hr]", "")
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user