mirror of
https://github.com/flibusta-apps/book_library_server.git
synced 2025-12-06 07:05:36 +01:00
Compare commits
98 Commits
feature/re
...
dc16bc7eb4
| Author | SHA1 | Date | |
|---|---|---|---|
| dc16bc7eb4 | |||
| 5b24d28ef0 | |||
| d9bbcc947f | |||
| 0642f14d4e | |||
| 684f4e3e52 | |||
|
|
64f77d0f15 | ||
| 6bf09ce429 | |||
|
|
e3410ef6dd | ||
| 4a680af4ae | |||
|
|
0d3ac1d5d1 | ||
| b1594214bc | |||
| 3614306094 | |||
|
|
f1155292bc | ||
| 30e0fc202a | |||
| 91afa29862 | |||
| abee7403b7 | |||
| 986a8f7f5f | |||
| 41a9e92030 | |||
| 4f78a5cf82 | |||
| d49d5339fe | |||
| 5d7b4e9a19 | |||
| c58e10bfa0 | |||
| 3e8500e825 | |||
| 52ab9b361d | |||
| f938516f65 | |||
| 325aee3377 | |||
| 8002a93069 | |||
| 3ee5e51767 | |||
| a1b1d412ed | |||
| 10f30dae41 | |||
| 698a9ded17 | |||
| 420c6a6310 | |||
|
|
60b4b025ba | ||
| f0132c2ce5 | |||
|
|
0658108a1f | ||
| fc92d489e2 | |||
|
|
5e8dc3e34e | ||
| eec1c77071 | |||
| 49ca7f2e2b | |||
| c4d09fd7d4 | |||
| 4bbbbb1660 | |||
| b252ac2994 | |||
| f7e1810077 | |||
| ba8612b990 | |||
| 45c567410e | |||
| a570c31634 | |||
|
|
8260aa7512 | ||
|
|
e44a26e49e | ||
| 3d1f4cc9f2 | |||
|
|
27a14289cd | ||
| a09c95fc3d | |||
|
|
ab0afc1f4a | ||
| ad4bd9e24c | |||
| 4c6720c9ac | |||
| 03ce4b91a6 | |||
|
|
06f99939f0 | ||
|
|
a574c7b149 | ||
| 98d6b486d6 | |||
|
|
89fb830b3a | ||
| e8032da027 | |||
| 32366e2f5a | |||
| a6f5a5be95 | |||
| 1cada2b695 | |||
| 3ac748b349 | |||
|
|
64b28fc0b3 | ||
|
|
0e63183242 | ||
| 9bf0c9f313 | |||
| 46d4a90ba8 | |||
| d0054335f6 | |||
| 5d8da5f29a | |||
| 8fe744920f | |||
| 5c756fa2c4 | |||
|
|
b0d147f4fc | ||
|
|
26d41b3c3a | ||
| 31ff4c1116 | |||
| 7b68f443b8 | |||
| 6a1b8a7191 | |||
| 0627557c6d | |||
| 093a68f23d | |||
| 34dfc3c08f | |||
|
|
2eb681fab9 | ||
|
|
2d0b387560 | ||
|
|
834953f10d | ||
| 1f2ae4fb28 | |||
|
|
15d3c3e25b | ||
| 640a668b85 | |||
| 895203bc0f | |||
| 2003c1e474 | |||
| 63327ae64b | |||
| cdb16b4ddb | |||
| c4fa52adb3 | |||
| 2fef226bd6 | |||
| d00faefd06 | |||
| 9014b88416 | |||
| 51b1523cfb | |||
| e257138f4e | |||
| 80321b9e69 | |||
| 09c71b67b1 |
@@ -1,2 +0,0 @@
|
||||
[alias]
|
||||
prisma = "run -p prisma-cli --"
|
||||
31
.github/workflows/build_docker_image.yml
vendored
31
.github/workflows/build_docker_image.yml
vendored
@@ -3,48 +3,43 @@ name: Build docker image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
- "main"
|
||||
|
||||
jobs:
|
||||
Build-Docker-Image:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- id: repository_name
|
||||
uses: ASzc/change-string-case-action@v5
|
||||
uses: ASzc/change-string-case-action@v6
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
|
||||
-
|
||||
name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
-
|
||||
name: Build and push
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
IMAGE: ${{ steps.repository_name.outputs.lowercase }}
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64
|
||||
tags: ghcr.io/${{ env.IMAGE }}:latest
|
||||
tags: ghcr.io/${{ env.IMAGE }}:latest,ghcr.io/${{ env.IMAGE }}:${{ github.sha }}
|
||||
context: .
|
||||
file: ./docker/build.dockerfile
|
||||
|
||||
-
|
||||
name: Invoke deployment hook
|
||||
- name: Invoke deployment hook
|
||||
uses: joelwmale/webhook-action@master
|
||||
with:
|
||||
url: ${{ secrets.WEBHOOK_URL }}
|
||||
url: ${{ secrets.WEBHOOK_URL }}?BOOK_SERVER_TAG=${{ github.sha }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,3 +2,4 @@
|
||||
|
||||
.env
|
||||
.vscode
|
||||
.idea
|
||||
|
||||
7
.pre-commit-config.yaml
Normal file
7
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
repos:
|
||||
- repo: https://github.com/doublify/pre-commit-rust
|
||||
rev: v1.0
|
||||
hooks:
|
||||
- id: fmt
|
||||
- id: cargo-check
|
||||
- id: clippy
|
||||
23
.sqlx/query-078bb62c5139d159bc17d98480846591fe42a466b788e7c27e1a64a6549bfda3.json
generated
Normal file
23
.sqlx/query-078bb62c5139d159bc17d98480846591fe42a466b788e7c27e1a64a6549bfda3.json
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT COUNT(*) FROM book_sequences bs\n JOIN books b ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "078bb62c5139d159bc17d98480846591fe42a466b788e7c27e1a64a6549bfda3"
|
||||
}
|
||||
40
.sqlx/query-1875c11e55b61fd58e916f7663e2649e0c09ae604e620274718dd465e7958f64.json
generated
Normal file
40
.sqlx/query-1875c11e55b61fd58e916f7663e2649e0c09ae604e620274718dd465e7958f64.json
generated
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n aa.id,\n aa.title,\n aa.text,\n aa.file\n FROM author_annotations aa\n WHERE aa.author = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "text",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "1875c11e55b61fd58e916f7663e2649e0c09ae604e620274718dd465e7958f64"
|
||||
}
|
||||
29
.sqlx/query-191a5f0ddc1e4631b594f07710157cfdb3e002821f1ebb112fe772b274d08b1c.json
generated
Normal file
29
.sqlx/query-191a5f0ddc1e4631b594f07710157cfdb3e002821f1ebb112fe772b274d08b1c.json
generated
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "191a5f0ddc1e4631b594f07710157cfdb3e002821f1ebb112fe772b274d08b1c"
|
||||
}
|
||||
66
.sqlx/query-1f78b5cbdae5f9732e3637fcfd1605477bafc12a443900276c46df644a7f6d26.json
generated
Normal file
66
.sqlx/query-1f78b5cbdae5f9732e3637fcfd1605477bafc12a443900276c46df644a7f6d26.json
generated
Normal file
@@ -0,0 +1,66 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n genres.id,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta,\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = genres.source\n ) AS \"source!: Source\"\n FROM genres\n ORDER BY genres.id ASC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "remote_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "code",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "meta",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "source!: Source",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "1f78b5cbdae5f9732e3637fcfd1605477bafc12a443900276c46df644a7f6d26"
|
||||
}
|
||||
175
.sqlx/query-2d44679efadfba5a350a1612b9cf1ba241c4ddb38babf0cac5bdd8049e894ac8.json
generated
Normal file
175
.sqlx/query-2d44679efadfba5a350a1612b9cf1ba241c4ddb38babf0cac5bdd8049e894ac8.json
generated
Normal file
@@ -0,0 +1,175 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n WHERE b.id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "authors!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "translators!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "sequences!: Vec<Sequence>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "sequence_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "sequence_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "2d44679efadfba5a350a1612b9cf1ba241c4ddb38babf0cac5bdd8049e894ac8"
|
||||
}
|
||||
29
.sqlx/query-4144af136af6f9a148a87030ce0f7c7625f4296bbee0ffdd24a81571e2afd54e.json
generated
Normal file
29
.sqlx/query-4144af136af6f9a148a87030ce0f7c7625f4296bbee0ffdd24a81571e2afd54e.json
generated
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "4144af136af6f9a148a87030ce0f7c7625f4296bbee0ffdd24a81571e2afd54e"
|
||||
}
|
||||
28
.sqlx/query-57f37e885a05ace86e5768a8ad7ac04f5d48784885db7d04d04e277d8c51970c.json
generated
Normal file
28
.sqlx/query-57f37e885a05ace86e5768a8ad7ac04f5d48784885db7d04d04e277d8c51970c.json
generated
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT id, name FROM sequences WHERE id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "57f37e885a05ace86e5768a8ad7ac04f5d48784885db7d04d04e277d8c51970c"
|
||||
}
|
||||
23
.sqlx/query-600cfd73a3a1c465c19d98dc4ba6381872d82f954b0733aa9518df2ee7701b6e.json
generated
Normal file
23
.sqlx/query-600cfd73a3a1c465c19d98dc4ba6381872d82f954b0733aa9518df2ee7701b6e.json
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*)\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "600cfd73a3a1c465c19d98dc4ba6381872d82f954b0733aa9518df2ee7701b6e"
|
||||
}
|
||||
47
.sqlx/query-6e1d93e7773059ec3cb4fd29259f4f0250868c3f56e7b017ba8e5c20ccffb57d.json
generated
Normal file
47
.sqlx/query-6e1d93e7773059ec3cb4fd29259f4f0250868c3f56e7b017ba8e5c20ccffb57d.json
generated
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n ORDER BY a.id ASC\n OFFSET $1\n LIMIT $2\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "middle_name!: String",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "6e1d93e7773059ec3cb4fd29259f4f0250868c3f56e7b017ba8e5c20ccffb57d"
|
||||
}
|
||||
137
.sqlx/query-71ddfa47ccbd71543a0ff402f9b077d7035ad35fb5e714f5d88357169b46b0fe.json
generated
Normal file
137
.sqlx/query-71ddfa47ccbd71543a0ff402f9b077d7035ad35fb5e714f5d88357169b46b0fe.json
generated
Normal file
@@ -0,0 +1,137 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n OFFSET $3\n LIMIT $4\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "authors!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "sequences!: Vec<Sequence>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "sequence_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "sequence_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "71ddfa47ccbd71543a0ff402f9b077d7035ad35fb5e714f5d88357169b46b0fe"
|
||||
}
|
||||
22
.sqlx/query-78e41ab1e7ca7b6acaf21aec5015a75c2962f6085a5774773fba8acb5e166e2e.json
generated
Normal file
22
.sqlx/query-78e41ab1e7ca7b6acaf21aec5015a75c2962f6085a5774773fba8acb5e166e2e.json
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*) FROM genres\n WHERE (meta = $1 OR $1 IS NULL)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "78e41ab1e7ca7b6acaf21aec5015a75c2962f6085a5774773fba8acb5e166e2e"
|
||||
}
|
||||
276
.sqlx/query-981703669c9152946a541f70a84ec5dbf481e7a28f3d5949fbc34588561104e5.json
generated
Normal file
276
.sqlx/query-981703669c9152946a541f70a84ec5dbf481e7a28f3d5949fbc34588561104e5.json
generated
Normal file
@@ -0,0 +1,276 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n genres.id,\n ROW(\n sources.id,\n sources.name\n )::source_type,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta\n )::genre_type\n )\n FROM book_genres\n JOIN genres ON genres.id = book_genres.genre\n JOIN sources ON sources.id = genres.source\n WHERE book_genres.book = b.id\n ),\n ARRAY[]::genre_type[]\n ) AS \"genres!: Vec<Genre>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id,\n b.is_deleted,\n b.pages\n FROM books b\n WHERE b.id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "authors!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "translators!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "sequences!: Vec<Sequence>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "sequence_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "sequence_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "genres!: Vec<Genre>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "genre_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "genre_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"source",
|
||||
{
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
"remote_id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"code",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"description",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"meta",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "source!: Source",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "remote_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "is_deleted",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "pages",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "981703669c9152946a541f70a84ec5dbf481e7a28f3d5949fbc34588561104e5"
|
||||
}
|
||||
35
.sqlx/query-9bb82eaa3dcf8ead767d5f9ac9dbe8d70f8e68b12f6b004a9e495aa8ebc6d250.json
generated
Normal file
35
.sqlx/query-9bb82eaa3dcf8ead767d5f9ac9dbe8d70f8e68b12f6b004a9e495aa8ebc6d250.json
generated
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ORDER BY b.id ASC\n OFFSET $7\n LIMIT $8\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"Bool",
|
||||
"Date",
|
||||
"Date",
|
||||
"Int4",
|
||||
"Int4",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "9bb82eaa3dcf8ead767d5f9ac9dbe8d70f8e68b12f6b004a9e495aa8ebc6d250"
|
||||
}
|
||||
23
.sqlx/query-9be35f43d7faa0c65c88ced8ee10347ae67e6a906461fb4858fc003824f4b260.json
generated
Normal file
23
.sqlx/query-9be35f43d7faa0c65c88ced8ee10347ae67e6a906461fb4858fc003824f4b260.json
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*)\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "9be35f43d7faa0c65c88ced8ee10347ae67e6a906461fb4858fc003824f4b260"
|
||||
}
|
||||
70
.sqlx/query-a22bfa2e92bf4a3b0710388c6c5bbfa50f24864b183bb304d35cea18babd8ce3.json
generated
Normal file
70
.sqlx/query-a22bfa2e92bf4a3b0710388c6c5bbfa50f24864b183bb304d35cea18babd8ce3.json
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n genres.id,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta,\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = genres.source\n ) AS \"source!: Source\"\n FROM genres\n WHERE (meta = $1 OR $1 IS NULL)\n ORDER BY genres.id ASC\n LIMIT $2 OFFSET $3\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "remote_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "code",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "meta",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "source!: Source",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "a22bfa2e92bf4a3b0710388c6c5bbfa50f24864b183bb304d35cea18babd8ce3"
|
||||
}
|
||||
20
.sqlx/query-b4733c7414c62520fb74e3302f9c01bc351153930117c58832981990db038e74.json
generated
Normal file
20
.sqlx/query-b4733c7414c62520fb74e3302f9c01bc351153930117c58832981990db038e74.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT COUNT(*) FROM authors",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "b4733c7414c62520fb74e3302f9c01bc351153930117c58832981990db038e74"
|
||||
}
|
||||
29
.sqlx/query-b4c8511c5b3c157a64e4783ff6acd469abb21c5fda9ed9728e36b5b1d02d9aba.json
generated
Normal file
29
.sqlx/query-b4c8511c5b3c157a64e4783ff6acd469abb21c5fda9ed9728e36b5b1d02d9aba.json
generated
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_sequences bs ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "b4c8511c5b3c157a64e4783ff6acd469abb21c5fda9ed9728e36b5b1d02d9aba"
|
||||
}
|
||||
155
.sqlx/query-b6556c3bf60306517850e476d764c01e1e4538d6cf937096ad4a8e42a9657b9c.json
generated
Normal file
155
.sqlx/query-b6556c3bf60306517850e476d764c01e1e4538d6cf937096ad4a8e42a9657b9c.json
generated
Normal file
@@ -0,0 +1,155 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n bs.position\n FROM books b\n JOIN book_sequences bs ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)\n ORDER BY bs.position\n LIMIT $3 OFFSET $4\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "authors!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "translators!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "position",
|
||||
"type_info": "Int2"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b6556c3bf60306517850e476d764c01e1e4538d6cf937096ad4a8e42a9657b9c"
|
||||
}
|
||||
40
.sqlx/query-b83f6df4dea9bad87d0423ad307da8c72e2c343181afa0f5bce3e1a43dee7c8c.json
generated
Normal file
40
.sqlx/query-b83f6df4dea9bad87d0423ad307da8c72e2c343181afa0f5bce3e1a43dee7c8c.json
generated
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n title,\n text,\n file\n FROM book_annotations\n WHERE book = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "text",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "b83f6df4dea9bad87d0423ad307da8c72e2c343181afa0f5bce3e1a43dee7c8c"
|
||||
}
|
||||
210
.sqlx/query-bb036838069e57b6f88ec4dd3b53d6b44b1d9a4e01c5f80343e33e116e422bb5.json
generated
Normal file
210
.sqlx/query-bb036838069e57b6f88ec4dd3b53d6b44b1d9a4e01c5f80343e33e116e422bb5.json
generated
Normal file
@@ -0,0 +1,210 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id\n FROM books b\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ORDER BY b.id ASC\n OFFSET $7\n LIMIT $8\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "authors!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "translators!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "sequences!: Vec<Sequence>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "sequence_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "sequence_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "source!: Source",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "remote_id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"Bool",
|
||||
"Date",
|
||||
"Date",
|
||||
"Int4",
|
||||
"Int4",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "bb036838069e57b6f88ec4dd3b53d6b44b1d9a4e01c5f80343e33e116e422bb5"
|
||||
}
|
||||
27
.sqlx/query-c0be89ba0ef10d97bb82401fed4196ffd2be48ce4e5586ba6da63c78793bb1db.json
generated
Normal file
27
.sqlx/query-c0be89ba0ef10d97bb82401fed4196ffd2be48ce4e5586ba6da63c78793bb1db.json
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*) FROM books\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"Bool",
|
||||
"Date",
|
||||
"Date",
|
||||
"Int4",
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "c0be89ba0ef10d97bb82401fed4196ffd2be48ce4e5586ba6da63c78793bb1db"
|
||||
}
|
||||
28
.sqlx/query-d14c08d4d25201d30178c2313650db1aaef355968970f7f0a75b88bba209dc20.json
generated
Normal file
28
.sqlx/query-d14c08d4d25201d30178c2313650db1aaef355968970f7f0a75b88bba209dc20.json
generated
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT id, name FROM sequences WHERE id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "d14c08d4d25201d30178c2313650db1aaef355968970f7f0a75b88bba209dc20"
|
||||
}
|
||||
46
.sqlx/query-d6584aea52bc3abcbb4d9f491ef357845b562cf83d2e135b7542ebca2024a3f3.json
generated
Normal file
46
.sqlx/query-d6584aea52bc3abcbb4d9f491ef357845b562cf83d2e135b7542ebca2024a3f3.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n WHERE a.id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "middle_name!: String",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "d6584aea52bc3abcbb4d9f491ef357845b562cf83d2e135b7542ebca2024a3f3"
|
||||
}
|
||||
46
.sqlx/query-eaeeab7481036b78b9323b5d9e99e9a14a39e4f6c1489fe564045e937c38769c.json
generated
Normal file
46
.sqlx/query-eaeeab7481036b78b9323b5d9e99e9a14a39e4f6c1489fe564045e937c38769c.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n WHERE a.id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "middle_name!: String",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "eaeeab7481036b78b9323b5d9e99e9a14a39e4f6c1489fe564045e937c38769c"
|
||||
}
|
||||
277
.sqlx/query-fb0d1b13928611d566514fd103df973ad1c81fd60efada560e89a2b40a6d3fc1.json
generated
Normal file
277
.sqlx/query-fb0d1b13928611d566514fd103df973ad1c81fd60efada560e89a2b40a6d3fc1.json
generated
Normal file
@@ -0,0 +1,277 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n genres.id,\n ROW(\n sources.id,\n sources.name\n )::source_type,\n remote_id,\n genres.code,\n genres.description,\n genres.meta\n )::genre_type\n )\n FROM book_genres\n JOIN genres ON genres.id = book_genres.genre\n JOIN sources ON sources.id = genres.source\n WHERE book_genres.book = b.id\n ),\n ARRAY[]::genre_type[]\n ) AS \"genres!: Vec<Genre>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id,\n b.is_deleted,\n b.pages\n FROM books b\n WHERE b.source = $1 AND b.remote_id = $2\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "authors!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "translators!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "sequences!: Vec<Sequence>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "sequence_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "sequence_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "genres!: Vec<Genre>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "genre_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "genre_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"source",
|
||||
{
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
"remote_id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"code",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"description",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"meta",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "source!: Source",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "source_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "remote_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "is_deleted",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "pages",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int2",
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "fb0d1b13928611d566514fd103df973ad1c81fd60efada560e89a2b40a6d3fc1"
|
||||
}
|
||||
137
.sqlx/query-ff9694275aad3c0cbb3bddb87a45550615d1996328ffba98a6d01aaa2b17ec2b.json
generated
Normal file
137
.sqlx/query-ff9694275aad3c0cbb3bddb87a45550615d1996328ffba98a6d01aaa2b17ec2b.json
generated
Normal file
@@ -0,0 +1,137 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ORDER BY b.title ASC\n OFFSET $3\n LIMIT $4\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "lang",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "year",
|
||||
"type_info": "Int2"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "available_types!: Vec<String>",
|
||||
"type_info": "TextArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "uploaded",
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "translators!: Vec<Author>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "author_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "author_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"first_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"last_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"middle_name",
|
||||
"Varchar"
|
||||
],
|
||||
[
|
||||
"annotation_exists",
|
||||
"Bool"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "sequences!: Vec<Sequence>",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "sequence_type[]",
|
||||
"kind": {
|
||||
"Array": {
|
||||
"Custom": {
|
||||
"name": "sequence_type",
|
||||
"kind": {
|
||||
"Composite": [
|
||||
[
|
||||
"id",
|
||||
"Int4"
|
||||
],
|
||||
[
|
||||
"name",
|
||||
"Varchar"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "annotation_exists!: bool",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"TextArray",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "ff9694275aad3c0cbb3bddb87a45550615d1996328ffba98a6d01aaa2b17ec2b"
|
||||
}
|
||||
5510
Cargo.lock
generated
5510
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
48
Cargo.toml
48
Cargo.toml
@@ -3,33 +3,43 @@ name = "book_library_server"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"prisma-cli"
|
||||
]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
debug = false
|
||||
strip = true
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = 'abort'
|
||||
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = true
|
||||
strip = false
|
||||
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1.18.0"
|
||||
once_cell = "1.21.1"
|
||||
|
||||
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
|
||||
tokio = { version = "1.44.2", features = ["full"] }
|
||||
|
||||
tokio = { version = "1.28.2", features = ["full"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
|
||||
sentry-tracing = "0.41.0"
|
||||
tower-http = { version = "0.6.2", features = ["trace"] }
|
||||
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"]}
|
||||
tower-http = { version = "0.4.3", features = ["trace"] }
|
||||
axum = { version = "0.8.1", features = ["json"] }
|
||||
axum-extra = { version ="0.10.0", features = ["query"] }
|
||||
axum-prometheus = "0.8.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["raw_value"] }
|
||||
|
||||
axum = { version = "0.6.18", features = ["json"] }
|
||||
axum-extra = { version ="0.7.7", features = ["query"] }
|
||||
axum-prometheus = "0.4.0"
|
||||
serde = { version = "1.0.163", features = ["derive"] }
|
||||
sentry = { version = "0.41.0", features = ["debug-images"] }
|
||||
|
||||
sentry = { version = "0.31.3", features = ["debug-images"] }
|
||||
meilisearch-sdk = "0.28.0"
|
||||
|
||||
meilisearch-sdk = "0.24.1"
|
||||
rand = "0.9.0"
|
||||
|
||||
rand = "0.8.5"
|
||||
chrono = { version = "0.4.40", features = ["serde"] }
|
||||
|
||||
chrono = "0.4.26"
|
||||
sqlx = { version = "0.8.3", features = ["runtime-tokio", "postgres", "macros", "chrono", "json"] }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM rust:bullseye AS builder
|
||||
FROM rust:bookworm AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -7,15 +7,18 @@ COPY . .
|
||||
RUN cargo build --release --bin book_library_server
|
||||
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y openssl ca-certificates \
|
||||
&& apt-get install -y openssl ca-certificates curl jq \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN update-ca-certificates
|
||||
|
||||
COPY ./scripts/*.sh /
|
||||
RUN chmod +x /*.sh
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/target/release/book_library_server /usr/local/bin
|
||||
ENTRYPOINT ["/usr/local/bin/book_library_server"]
|
||||
CMD ["/start.sh"]
|
||||
|
||||
3
prisma-cli/.gitignore
vendored
3
prisma-cli/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
node_modules
|
||||
# Keep environment variables out of version control
|
||||
.env
|
||||
4622
prisma-cli/Cargo.lock
generated
4622
prisma-cli/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
||||
[package]
|
||||
name = "prisma-cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
|
||||
@@ -1,3 +0,0 @@
|
||||
fn main() {
|
||||
prisma_client_rust_cli::run();
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
generator client {
|
||||
provider = "cargo prisma"
|
||||
output = "../src/prisma.rs"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
model AuthorAnnotation {
|
||||
id Int @id @default(autoincrement())
|
||||
author_id Int @unique @map("author")
|
||||
title String @db.VarChar(256)
|
||||
text String
|
||||
file String? @db.VarChar(256)
|
||||
author Author @relation(fields: [author_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_author_annotations_authors_id_author")
|
||||
|
||||
@@map("author_annotations")
|
||||
}
|
||||
|
||||
model Author {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
first_name String @db.VarChar(256)
|
||||
last_name String @db.VarChar(256)
|
||||
middle_name String? @db.VarChar(256)
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_authors_sources_id_source")
|
||||
author_annotation AuthorAnnotation?
|
||||
book_authors BookAuthor[]
|
||||
translations Translator[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_authors_source_remote_id")
|
||||
@@index([last_name(ops: raw("gin_trgm_ops"))], map: "tgrm_authors_l", type: Gin)
|
||||
@@map("authors")
|
||||
}
|
||||
|
||||
model BookAnnotation {
|
||||
id Int @id @default(autoincrement())
|
||||
book_id Int @unique @map("book")
|
||||
title String @db.VarChar(256)
|
||||
text String
|
||||
file String? @db.VarChar(256)
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_book_annotations_books_id_book")
|
||||
|
||||
@@map("book_annotations")
|
||||
}
|
||||
|
||||
model BookAuthor {
|
||||
id Int @id @default(autoincrement())
|
||||
author_id Int @map("author")
|
||||
book_id Int @map("book")
|
||||
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_book_authors_authors_author_id")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_authors_books_book_id")
|
||||
|
||||
@@unique([book_id, author_id], map: "uc_book_authors_book_author")
|
||||
@@index([author_id], map: "book_authors_author")
|
||||
@@index([book_id], map: "book_authors_book")
|
||||
@@map("book_authors")
|
||||
}
|
||||
|
||||
model BookGenre {
|
||||
id Int @id @default(autoincrement())
|
||||
genre_id Int @map("genre")
|
||||
book_id Int @map("book")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_genres_books_book_id")
|
||||
genre Genre @relation(fields: [genre_id], references: [id], onDelete: Cascade, map: "fk_book_genres_genres_genre_id")
|
||||
|
||||
@@unique([book_id, genre_id], map: "uc_book_genres_book_genre")
|
||||
@@index([book_id], map: "book_genres_book")
|
||||
@@index([genre_id], map: "book_genres_genre")
|
||||
@@map("book_genres")
|
||||
}
|
||||
|
||||
model BookSequence {
|
||||
id Int @id @default(autoincrement())
|
||||
position Int @db.SmallInt
|
||||
sequence_id Int @map("sequence")
|
||||
book_id Int @map("book")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_books_book_id")
|
||||
sequence Sequence @relation(fields: [sequence_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_sequences_sequence_id")
|
||||
|
||||
@@unique([book_id, sequence_id], map: "uc_book_sequences_book_sequence")
|
||||
@@index([book_id], map: "book_sequences_book")
|
||||
@@index([sequence_id], map: "book_sequences_sequence")
|
||||
@@map("book_sequences")
|
||||
}
|
||||
|
||||
model Book {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
title String @db.VarChar(256)
|
||||
lang String @db.VarChar(3)
|
||||
file_type String @db.VarChar(4)
|
||||
uploaded DateTime @db.Date
|
||||
is_deleted Boolean @default(false)
|
||||
pages Int?
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_books_sources_id_source")
|
||||
book_annotation BookAnnotation?
|
||||
book_authors BookAuthor[]
|
||||
book_genres BookGenre[]
|
||||
book_sequences BookSequence[]
|
||||
translations Translator[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_books_source_remote_id")
|
||||
@@index([file_type], map: "ix_books_file_type")
|
||||
@@index([title], map: "ix_books_title")
|
||||
@@index([title(ops: raw("gin_trgm_ops"))], map: "trgm_books_title", type: Gin)
|
||||
@@map("books")
|
||||
}
|
||||
|
||||
model Genre {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
code String @db.VarChar(45)
|
||||
description String @db.VarChar(99)
|
||||
meta String @db.VarChar(45)
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_genres_sources_id_source")
|
||||
book_genres BookGenre[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_genres_source_remote_id")
|
||||
@@map("genres")
|
||||
}
|
||||
|
||||
model Sequence {
|
||||
id Int @id @default(autoincrement())
|
||||
source_id Int @map("source") @db.SmallInt
|
||||
remote_id Int
|
||||
name String @db.VarChar(256)
|
||||
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_sequences_sources_id_source")
|
||||
book_sequences BookSequence[]
|
||||
|
||||
@@unique([source_id, remote_id], map: "uc_sequences_source_remote_id")
|
||||
@@index([name], map: "ix_sequences_name")
|
||||
@@index([name(ops: raw("gin_trgm_ops"))], map: "tgrm_sequences_name", type: Gin)
|
||||
@@map("sequences")
|
||||
}
|
||||
|
||||
model Source {
|
||||
id Int @id @default(autoincrement()) @db.SmallInt
|
||||
name String @unique @db.VarChar(32)
|
||||
authors Author[]
|
||||
books Book[]
|
||||
genres Genre[]
|
||||
sequences Sequence[]
|
||||
|
||||
@@map("sources")
|
||||
}
|
||||
|
||||
model Translator {
|
||||
id Int @id @default(autoincrement())
|
||||
position Int @db.SmallInt
|
||||
author_id Int @map("author")
|
||||
book_id Int @map("book")
|
||||
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_translations_authors_author_id")
|
||||
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_translations_books_book_id")
|
||||
|
||||
@@unique([book_id, author_id], map: "uc_translations_book_author")
|
||||
@@index([author_id], map: "translations_author")
|
||||
@@index([book_id], map: "translations_book")
|
||||
@@map("translations")
|
||||
}
|
||||
12
scripts/env.sh
Normal file
12
scripts/env.sh
Normal file
@@ -0,0 +1,12 @@
|
||||
#! /usr/bin/env sh
|
||||
|
||||
response=`curl -X 'GET' "https://$VAULT_HOST/v1/$VAULT_SECRET_PATH" -s \
|
||||
-H 'accept: application/json' \
|
||||
-H "X-Vault-Token: $VAULT_TOKEN"`
|
||||
|
||||
data=`echo $response | jq -r '.data.data'`
|
||||
|
||||
for key in $(echo "$data" | jq -r 'keys[]'); do
|
||||
value=$(echo "$data" | jq -r ".\"$key\"") # Corrected syntax
|
||||
echo "$key"="$value"
|
||||
done
|
||||
5
scripts/start.sh
Normal file
5
scripts/start.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#! /usr/bin/env sh
|
||||
|
||||
export $(/env.sh)
|
||||
|
||||
exec /usr/local/bin/book_library_server
|
||||
@@ -16,7 +16,7 @@ pub struct Config {
|
||||
pub meili_host: String,
|
||||
pub meili_master_key: String,
|
||||
|
||||
pub sentry_dsn: String
|
||||
pub sentry_dsn: String,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -33,11 +33,9 @@ impl Config {
|
||||
meili_host: get_env("MEILI_HOST"),
|
||||
meili_master_key: get_env("MEILI_MASTER_KEY"),
|
||||
|
||||
sentry_dsn: get_env("SENTRY_DSN")
|
||||
sentry_dsn: get_env("SENTRY_DSN"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||
Config::load()
|
||||
});
|
||||
pub static CONFIG: Lazy<Config> = Lazy::new(Config::load);
|
||||
|
||||
14
src/db.rs
14
src/db.rs
@@ -1,9 +1,10 @@
|
||||
use crate::{prisma::PrismaClient, config::CONFIG};
|
||||
use crate::config::CONFIG;
|
||||
|
||||
use sqlx::{postgres::PgPoolOptions, PgPool};
|
||||
|
||||
pub async fn get_prisma_client() -> PrismaClient {
|
||||
pub async fn get_postgres_pool() -> PgPool {
|
||||
let database_url: String = format!(
|
||||
"postgresql://{}:{}@{}:{}/{}?connection_limit=4",
|
||||
"postgresql://{}:{}@{}:{}/{}",
|
||||
CONFIG.postgres_user,
|
||||
CONFIG.postgres_password,
|
||||
CONFIG.postgres_host,
|
||||
@@ -11,9 +12,10 @@ pub async fn get_prisma_client() -> PrismaClient {
|
||||
CONFIG.postgres_db
|
||||
);
|
||||
|
||||
PrismaClient::_builder()
|
||||
.with_url(database_url)
|
||||
.build()
|
||||
PgPoolOptions::new()
|
||||
.max_connections(10)
|
||||
.acquire_timeout(std::time::Duration::from_secs(300))
|
||||
.connect(&database_url)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
28
src/main.rs
28
src/main.rs
@@ -1,17 +1,17 @@
|
||||
pub mod config;
|
||||
pub mod views;
|
||||
pub mod prisma;
|
||||
pub mod db;
|
||||
pub mod serializers;
|
||||
pub mod meilisearch;
|
||||
pub mod serializers;
|
||||
pub mod views;
|
||||
|
||||
use sentry::{integrations::debug_images::DebugImagesIntegration, types::Dsn, ClientOptions};
|
||||
use sentry_tracing::EventFilter;
|
||||
use std::{net::SocketAddr, str::FromStr};
|
||||
use sentry::{ClientOptions, types::Dsn, integrations::debug_images::DebugImagesIntegration};
|
||||
use tracing::info;
|
||||
use tracing_subscriber::{filter, layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
use crate::views::get_router;
|
||||
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let options = ClientOptions {
|
||||
@@ -23,9 +23,15 @@ async fn main() {
|
||||
|
||||
let _guard = sentry::init(options);
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_target(false)
|
||||
.compact()
|
||||
let sentry_layer = sentry_tracing::layer().event_filter(|md| match md.level() {
|
||||
&tracing::Level::ERROR => EventFilter::Event,
|
||||
_ => EventFilter::Ignore,
|
||||
});
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::fmt::layer().with_target(false))
|
||||
.with(filter::LevelFilter::INFO)
|
||||
.with(sentry_layer)
|
||||
.init();
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
|
||||
@@ -33,9 +39,7 @@ async fn main() {
|
||||
let app = get_router().await;
|
||||
|
||||
info!("Start webserver...");
|
||||
axum::Server::bind(&addr)
|
||||
.serve(app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
info!("Webserver shutdown...")
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use meilisearch_sdk::Client;
|
||||
use meilisearch_sdk::client::Client;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::config::CONFIG;
|
||||
|
||||
|
||||
pub fn get_meili_client() -> Client {
|
||||
Client::new(
|
||||
&CONFIG.meili_host,
|
||||
Some(CONFIG.meili_master_key.clone())
|
||||
)
|
||||
Client::new(&CONFIG.meili_host, Some(CONFIG.meili_master_key.clone())).unwrap()
|
||||
}
|
||||
|
||||
pub trait GetId {
|
||||
fn get_id(&self) -> i32;
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -19,7 +19,13 @@ pub struct AuthorMeili {
|
||||
pub middle_name: String,
|
||||
pub author_langs: Vec<String>,
|
||||
pub translator_langs: Vec<String>,
|
||||
pub books_count: i32
|
||||
pub books_count: i32,
|
||||
}
|
||||
|
||||
impl GetId for AuthorMeili {
|
||||
fn get_id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -27,7 +33,13 @@ pub struct BookMeili {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub genres: Vec<i32>
|
||||
pub genres: Vec<i32>,
|
||||
}
|
||||
|
||||
impl GetId for BookMeili {
|
||||
fn get_id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -36,7 +48,13 @@ pub struct GenreMeili {
|
||||
pub description: String,
|
||||
pub meta: String,
|
||||
pub langs: Vec<String>,
|
||||
pub books_count: i32
|
||||
pub books_count: i32,
|
||||
}
|
||||
|
||||
impl GetId for GenreMeili {
|
||||
fn get_id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -44,5 +62,11 @@ pub struct SequenceMeili {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub langs: Vec<String>,
|
||||
pub books_count: i32
|
||||
pub books_count: i32,
|
||||
}
|
||||
|
||||
impl GetId for SequenceMeili {
|
||||
fn get_id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
}
|
||||
|
||||
16099
src/prisma.rs
16099
src/prisma.rs
File diff suppressed because one or more lines are too long
@@ -1,6 +1,11 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
fn default_langs() -> Vec<String> {
|
||||
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AllowedLangs {
|
||||
pub allowed_langs: Vec<String>
|
||||
#[serde(default = "default_langs")]
|
||||
pub allowed_langs: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use chrono::NaiveDate;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::{author, book};
|
||||
use super::date::naive_date_serializer;
|
||||
use super::sequence::Sequence;
|
||||
|
||||
use super::{sequence::Sequence, utils::{get_available_types, get_translators, get_sequences}};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
|
||||
#[sqlx(type_name = "author_type")]
|
||||
pub struct Author {
|
||||
pub id: i32,
|
||||
pub first_name: String,
|
||||
@@ -13,66 +14,17 @@ pub struct Author {
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<author::Data> for Author {
|
||||
fn from(val: author::Data) -> Self {
|
||||
let author::Data {
|
||||
id,
|
||||
first_name,
|
||||
last_name,
|
||||
middle_name,
|
||||
author_annotation,
|
||||
..
|
||||
} = val;
|
||||
|
||||
Author {
|
||||
id,
|
||||
first_name,
|
||||
last_name,
|
||||
middle_name: middle_name.unwrap_or("".to_string()),
|
||||
annotation_exists: author_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, Serialize)]
|
||||
pub struct AuthorBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub year: i32,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
#[serde(serialize_with = "naive_date_serializer::serialize")]
|
||||
pub uploaded: NaiveDate,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for AuthorBook {
|
||||
fn from(val: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = val;
|
||||
|
||||
AuthorBook {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,9 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::author_annotation;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, Serialize)]
|
||||
pub struct AuthorAnnotation {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub text: String,
|
||||
pub file: Option<String>
|
||||
}
|
||||
|
||||
impl From<author_annotation::Data> for AuthorAnnotation {
|
||||
fn from(val: author_annotation::Data) -> Self {
|
||||
let author_annotation::Data { id, title, text, file, .. } = val;
|
||||
|
||||
AuthorAnnotation {
|
||||
id,
|
||||
title,
|
||||
text,
|
||||
file
|
||||
}
|
||||
}
|
||||
pub file: Option<String>,
|
||||
}
|
||||
|
||||
@@ -1,78 +1,35 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use chrono::NaiveDate;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::prisma::book::{self};
|
||||
use super::date::naive_date_serializer;
|
||||
|
||||
use super::{source::Source, utils::{get_available_types, get_translators, get_sequences, get_authors, get_genres}, author::Author, sequence::Sequence, genre::Genre};
|
||||
use super::{author::Author, genre::Genre, sequence::Sequence, source::Source};
|
||||
|
||||
fn default_langs() -> Vec<String> {
|
||||
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BookFilter {
|
||||
#[serde(default = "default_langs")]
|
||||
pub allowed_langs: Vec<String>,
|
||||
pub is_deleted: Option<bool>,
|
||||
pub uploaded_gte: Option<DateTime<Utc>>,
|
||||
pub uploaded_lte: Option<DateTime<Utc>>,
|
||||
pub uploaded_gte: Option<NaiveDate>,
|
||||
pub uploaded_lte: Option<NaiveDate>,
|
||||
pub id_gte: Option<i32>,
|
||||
pub id_lte: Option<i32>,
|
||||
}
|
||||
|
||||
impl BookFilter {
|
||||
pub fn get_filter_vec(self) -> Vec<book::WhereParam> {
|
||||
let mut result = vec![];
|
||||
|
||||
result.push(
|
||||
book::lang::in_vec(self.allowed_langs)
|
||||
);
|
||||
|
||||
match self.is_deleted {
|
||||
Some(v) => {
|
||||
result.push(
|
||||
book::is_deleted::equals(v)
|
||||
);
|
||||
},
|
||||
None => {
|
||||
result.push(
|
||||
book::is_deleted::equals(false)
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(uploaded_gte) = self.uploaded_gte {
|
||||
result.push(
|
||||
book::uploaded::gte(uploaded_gte.into())
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(uploaded_lte) = self.uploaded_lte {
|
||||
result.push(
|
||||
book::uploaded::lte(uploaded_lte.into())
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(id_gte) = self.id_gte {
|
||||
result.push(
|
||||
book::id::gte(id_gte)
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(id_lte) = self.id_lte {
|
||||
result.push(
|
||||
book::id::lte(id_lte)
|
||||
);
|
||||
};
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct RemoteBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub year: i32,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
#[serde(serialize_with = "naive_date_serializer::serialize")]
|
||||
pub uploaded: NaiveDate,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
@@ -81,70 +38,22 @@ pub struct RemoteBook {
|
||||
pub remote_id: i32,
|
||||
}
|
||||
|
||||
impl From<book::Data> for RemoteBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
remote_id,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
source: source.unwrap().as_ref().clone().into(),
|
||||
remote_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BaseBook {
|
||||
pub id: i32,
|
||||
pub available_types: Vec<String>,
|
||||
}
|
||||
|
||||
impl From<book::Data> for BaseBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
file_type,
|
||||
source,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DetailBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub year: i32,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
#[serde(serialize_with = "naive_date_serializer::serialize")]
|
||||
pub uploaded: NaiveDate,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
@@ -153,53 +62,13 @@ pub struct DetailBook {
|
||||
pub remote_id: i32,
|
||||
pub genres: Vec<Genre>,
|
||||
pub is_deleted: bool,
|
||||
pub pages: Option<i32>
|
||||
}
|
||||
|
||||
impl From<book::Data> for DetailBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
remote_id,
|
||||
book_genres,
|
||||
is_deleted,
|
||||
pages,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
source: source.unwrap().as_ref().clone().into(),
|
||||
remote_id,
|
||||
genres: get_genres(book_genres),
|
||||
is_deleted,
|
||||
pages,
|
||||
}
|
||||
}
|
||||
pub pages: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RandomBookFilter {
|
||||
pub allowed_langs: Vec<String>,
|
||||
pub genre: Option<i32>
|
||||
pub genre: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -208,41 +77,12 @@ pub struct Book {
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub year: i32,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
#[serde(serialize_with = "naive_date_serializer::serialize")]
|
||||
pub uploaded: NaiveDate,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for Book {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +1,9 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::book_annotation;
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, Serialize)]
|
||||
pub struct BookAnnotation {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub text: String,
|
||||
pub file: Option<String>
|
||||
}
|
||||
|
||||
impl From<book_annotation::Data> for BookAnnotation {
|
||||
fn from(value: book_annotation::Data) -> Self {
|
||||
let book_annotation::Data {
|
||||
id,
|
||||
title,
|
||||
text,
|
||||
file,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
text,
|
||||
file
|
||||
}
|
||||
}
|
||||
pub file: Option<String>,
|
||||
}
|
||||
|
||||
16
src/serializers/date.rs
Normal file
16
src/serializers/date.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use chrono::NaiveDate;
|
||||
use serde::Serializer;
|
||||
|
||||
const FORMAT: &str = "%Y-%m-%d";
|
||||
|
||||
pub mod naive_date_serializer {
|
||||
use super::*;
|
||||
|
||||
pub fn serialize<S>(date: &NaiveDate, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let formatted_date = date.format(FORMAT).to_string();
|
||||
serializer.serialize_str(&formatted_date)
|
||||
}
|
||||
}
|
||||
@@ -1,43 +1,18 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::prisma::genre;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::source::Source;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
|
||||
#[sqlx(type_name = "genre_type")]
|
||||
pub struct Genre {
|
||||
pub id: i32,
|
||||
pub source: Source,
|
||||
pub remote_id: i32,
|
||||
pub code: String,
|
||||
pub description: String,
|
||||
pub meta: String
|
||||
pub meta: String,
|
||||
}
|
||||
|
||||
impl From<genre::Data> for Genre {
|
||||
fn from(val: genre::Data) -> Self {
|
||||
let genre::Data {
|
||||
id,
|
||||
remote_id,
|
||||
code,
|
||||
description,
|
||||
meta,
|
||||
source,
|
||||
..
|
||||
} = val;
|
||||
|
||||
Genre {
|
||||
id,
|
||||
remote_id,
|
||||
code,
|
||||
description,
|
||||
meta,
|
||||
source: source.unwrap().as_ref().clone().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GenreFilter {
|
||||
pub meta: Option<String>,
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
pub mod pagination;
|
||||
pub mod allowed_langs;
|
||||
pub mod author;
|
||||
pub mod author_annotation;
|
||||
pub mod genre;
|
||||
pub mod source;
|
||||
pub mod book;
|
||||
pub mod sequence;
|
||||
pub mod utils;
|
||||
pub mod translator;
|
||||
pub mod allowed_langs;
|
||||
pub mod book_annotation;
|
||||
pub mod date;
|
||||
pub mod genre;
|
||||
pub mod pagination;
|
||||
pub mod sequence;
|
||||
pub mod source;
|
||||
pub mod translator;
|
||||
pub mod utils;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
|
||||
fn default_page() -> i64 {
|
||||
1
|
||||
}
|
||||
@@ -14,17 +13,16 @@ pub struct Pagination {
|
||||
#[serde(default = "default_page")]
|
||||
pub page: i64,
|
||||
#[serde(default = "default_size")]
|
||||
pub size: i64
|
||||
pub size: i64,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Page<T> {
|
||||
pub items: Vec<T>,
|
||||
pub total: i64,
|
||||
pub page: i64,
|
||||
pub size: i64,
|
||||
pub pages: i64
|
||||
pub pages: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -34,7 +32,7 @@ pub struct PageWithParent<T, P> {
|
||||
pub page: i64,
|
||||
pub size: i64,
|
||||
pub pages: i64,
|
||||
pub parent_item: P
|
||||
pub parent_item: P,
|
||||
}
|
||||
|
||||
impl<T> Page<T> {
|
||||
@@ -44,7 +42,7 @@ impl<T> Page<T> {
|
||||
total,
|
||||
page: pagination.page,
|
||||
size: pagination.size,
|
||||
pages: (total + pagination.size - 1) / pagination.size
|
||||
pages: (total + pagination.size - 1) / pagination.size,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -57,7 +55,7 @@ impl<T, P> PageWithParent<T, P> {
|
||||
page: pagination.page,
|
||||
size: pagination.size,
|
||||
pages: (total + pagination.size - 1) / pagination.size,
|
||||
parent_item
|
||||
parent_item,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,62 +1,28 @@
|
||||
use chrono::NaiveDate;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::{sequence, book};
|
||||
use super::author::Author;
|
||||
use super::date::naive_date_serializer;
|
||||
|
||||
use super::{author::Author, utils::{get_available_types, get_authors, get_translators}};
|
||||
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
|
||||
#[sqlx(type_name = "sequence_type")]
|
||||
pub struct Sequence {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl From<sequence::Data> for Sequence {
|
||||
fn from(val: sequence::Data) -> Self {
|
||||
let sequence::Data { id, name, .. } = val;
|
||||
|
||||
Sequence { id, name }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, Serialize)]
|
||||
pub struct SequenceBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub year: i32,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
#[serde(serialize_with = "naive_date_serializer::serialize")]
|
||||
pub uploaded: NaiveDate,
|
||||
pub authors: Vec<Author>,
|
||||
pub translators: Vec<Author>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for SequenceBook {
|
||||
fn from(value: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
translations,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type, source.clone().unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
translators: get_translators(translations),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
pub position: i32,
|
||||
}
|
||||
|
||||
@@ -1,25 +1,8 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::source;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
|
||||
#[sqlx(type_name = "source_type")]
|
||||
pub struct Source {
|
||||
pub id: i32,
|
||||
pub name: String
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl From<source::Data> for Source
|
||||
{
|
||||
fn from(val: source::Data) -> Self {
|
||||
let source::Data {
|
||||
id,
|
||||
name,
|
||||
..
|
||||
} = val;
|
||||
|
||||
Source {
|
||||
id,
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,47 +1,21 @@
|
||||
use chrono::NaiveDate;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::prisma::book;
|
||||
use super::date::naive_date_serializer;
|
||||
|
||||
use super::{author::Author, sequence::Sequence, utils::{get_available_types, get_authors, get_sequences}};
|
||||
use super::{author::Author, sequence::Sequence};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(sqlx::FromRow, Serialize)]
|
||||
pub struct TranslatorBook {
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub lang: String,
|
||||
pub file_type: String,
|
||||
pub year: i32,
|
||||
pub available_types: Vec<String>,
|
||||
pub uploaded: String,
|
||||
#[serde(serialize_with = "naive_date_serializer::serialize")]
|
||||
pub uploaded: NaiveDate,
|
||||
pub authors: Vec<Author>,
|
||||
pub sequences: Vec<Sequence>,
|
||||
pub annotation_exists: bool,
|
||||
}
|
||||
|
||||
impl From<book::Data> for TranslatorBook {
|
||||
fn from(val: book::Data) -> Self {
|
||||
let book::Data {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type,
|
||||
uploaded,
|
||||
book_authors,
|
||||
book_sequences,
|
||||
book_annotation,
|
||||
source,
|
||||
..
|
||||
} = val;
|
||||
|
||||
TranslatorBook {
|
||||
id,
|
||||
title,
|
||||
lang,
|
||||
file_type: file_type.clone(),
|
||||
available_types: get_available_types(file_type.clone(), source.unwrap().name),
|
||||
uploaded: uploaded.format("%Y-%m-%d").to_string(),
|
||||
authors: get_authors(book_authors),
|
||||
sequences: get_sequences(book_sequences),
|
||||
annotation_exists: book_annotation.unwrap().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
use crate::prisma::{translator, book_sequence, book_author, book_genre};
|
||||
|
||||
use super::{author::Author, sequence::Sequence, genre::Genre};
|
||||
|
||||
pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> {
|
||||
if file_type == "fb2" && source_name == "flibusta" {
|
||||
vec![
|
||||
@@ -14,43 +10,3 @@ pub fn get_available_types(file_type: String, source_name: String) -> Vec<String
|
||||
vec![file_type]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_authors(
|
||||
book_authors: Option<Vec<book_author::Data>>
|
||||
) -> Vec<Author> {
|
||||
book_authors
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_translators(
|
||||
translations: Option<Vec<translator::Data>>
|
||||
) -> Vec<Author> {
|
||||
translations
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_sequences(
|
||||
book_sequences: Option<Vec<book_sequence::Data>>
|
||||
) -> Vec<Sequence> {
|
||||
book_sequences
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.sequence.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_genres(
|
||||
book_genres: Option<Vec<book_genre::Data>>
|
||||
) -> Vec<Genre> {
|
||||
book_genres
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| item.genre.clone().unwrap().as_ref().clone().into())
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -1,279 +1,339 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, extract::{Query, Path}, Json, response::IntoResponse, routing::get, http::StatusCode};
|
||||
use axum::{
|
||||
extract::{Path, Query},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
|
||||
use rand::Rng;
|
||||
use crate::{
|
||||
meilisearch::{get_meili_client, AuthorMeili},
|
||||
serializers::{
|
||||
allowed_langs::AllowedLangs,
|
||||
author::{Author, AuthorBook},
|
||||
author_annotation::AuthorAnnotation,
|
||||
book::BaseBook,
|
||||
pagination::{Page, PageWithParent, Pagination},
|
||||
sequence::Sequence,
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{prisma::{author, author_annotation::{self}, book, book_author, translator, book_sequence}, serializers::{pagination::{Pagination, Page, PageWithParent}, author::{Author, AuthorBook}, author_annotation::AuthorAnnotation, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}};
|
||||
use super::{common::get_random_item::get_random_item, Database};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_authors(
|
||||
db: Database,
|
||||
pagination: Query<Pagination>
|
||||
) -> impl IntoResponse {
|
||||
let authors_count = db
|
||||
.author()
|
||||
.count(vec![])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let authors = db
|
||||
.author()
|
||||
.find_many(vec![])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
authors.iter().map(|item| item.clone().into()).collect(),
|
||||
authors_count,
|
||||
&pagination
|
||||
);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
async fn get_random_author(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"author_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author = &result.hits.get(0).unwrap().result;
|
||||
|
||||
author.id
|
||||
};
|
||||
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
async fn get_authors(db: Database, pagination: Query<Pagination>) -> impl IntoResponse {
|
||||
let authors_count = sqlx::query_scalar!("SELECT COUNT(*) FROM authors",)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
Json::<Author>(author.into())
|
||||
let authors = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
ORDER BY a.id ASC
|
||||
OFFSET $1
|
||||
LIMIT $2
|
||||
"#,
|
||||
(pagination.page - 1) * pagination.size,
|
||||
pagination.size
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Author> = Page::new(authors, authors_count, &pagination);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
async fn get_author(
|
||||
async fn get_random_author(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
) -> impl IntoResponse {
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let author_id = {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!("author_langs IN [{}]", allowed_langs.join(", "));
|
||||
|
||||
get_random_item::<AuthorMeili>(authors_index, filter).await
|
||||
};
|
||||
|
||||
let author = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
WHERE a.id = $1
|
||||
"#,
|
||||
author_id
|
||||
)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Json::<Author>(author)
|
||||
}
|
||||
|
||||
async fn get_author(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
|
||||
let author = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
WHERE a.id = $1
|
||||
"#,
|
||||
author_id
|
||||
)
|
||||
.fetch_optional(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match author {
|
||||
Some(author) => Json::<Author>(author.into()).into_response(),
|
||||
Some(author) => Json::<Author>(author).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_annotation(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
) -> impl IntoResponse {
|
||||
let author_annotation = db
|
||||
.author_annotation()
|
||||
.find_unique(
|
||||
author_annotation::author_id::equals(author_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
async fn get_author_annotation(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
|
||||
let author_annotation = sqlx::query_as!(
|
||||
AuthorAnnotation,
|
||||
r#"
|
||||
SELECT
|
||||
aa.id,
|
||||
aa.title,
|
||||
aa.text,
|
||||
aa.file
|
||||
FROM author_annotations aa
|
||||
WHERE aa.author = $1
|
||||
"#,
|
||||
author_id
|
||||
)
|
||||
.fetch_optional(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match author_annotation {
|
||||
Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
|
||||
Some(annotation) => Json::<AuthorAnnotation>(annotation).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_books(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
pagination: Query<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let author = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(author_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let author = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
WHERE a.id = $1
|
||||
"#,
|
||||
author_id
|
||||
)
|
||||
.fetch_optional(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let author = match author {
|
||||
Some(author) => author,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let books_count = sqlx::query_scalar!(
|
||||
r#"
|
||||
SELECT COUNT(*)
|
||||
FROM books b
|
||||
JOIN book_authors ba ON b.id = ba.book
|
||||
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
|
||||
"#,
|
||||
author_id,
|
||||
&allowed_langs
|
||||
)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
let books = sqlx::query_as!(
|
||||
AuthorBook,
|
||||
r#"
|
||||
SELECT
|
||||
b.id,
|
||||
b.title,
|
||||
b.lang,
|
||||
b.file_type,
|
||||
b.year,
|
||||
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
|
||||
b.uploaded,
|
||||
COALESCE(
|
||||
(
|
||||
SELECT
|
||||
ARRAY_AGG(
|
||||
ROW(
|
||||
authors.id,
|
||||
authors.first_name,
|
||||
authors.last_name,
|
||||
authors.middle_name,
|
||||
EXISTS(
|
||||
SELECT * FROM author_annotations WHERE author = authors.id
|
||||
)
|
||||
)::author_type
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
FROM translations
|
||||
JOIN authors ON authors.id = translations.author
|
||||
WHERE translations.book = b.id
|
||||
),
|
||||
ARRAY[]::author_type[]
|
||||
) AS "translators!: Vec<Author>",
|
||||
COALESCE(
|
||||
(
|
||||
SELECT
|
||||
ARRAY_AGG(
|
||||
ROW(
|
||||
sequences.id,
|
||||
sequences.name
|
||||
)::sequence_type
|
||||
)
|
||||
FROM book_sequences
|
||||
JOIN sequences ON sequences.id = book_sequences.sequence
|
||||
WHERE book_sequences.book = b.id
|
||||
),
|
||||
ARRAY[]::sequence_type[]
|
||||
) AS "sequences!: Vec<Sequence>",
|
||||
EXISTS(
|
||||
SELECT * FROM book_annotations WHERE book = b.id
|
||||
) AS "annotation_exists!: bool"
|
||||
FROM books b
|
||||
JOIN book_authors ba ON b.id = ba.book
|
||||
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
|
||||
ORDER BY b.title ASC
|
||||
OFFSET $3
|
||||
LIMIT $4
|
||||
"#,
|
||||
author_id,
|
||||
&allowed_langs,
|
||||
(pagination.page - 1) * pagination.size,
|
||||
pagination.size
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
|
||||
author.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
let page: PageWithParent<AuthorBook, Author> =
|
||||
PageWithParent::new(author, books, books_count, &pagination);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
async fn get_author_books_available_types(
|
||||
db: Database,
|
||||
Path(author_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_authors::some(vec![
|
||||
book_author::author_id::equals(author_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
// TODO: refactor
|
||||
|
||||
let books = sqlx::query_as!(
|
||||
BaseBook,
|
||||
r#"
|
||||
SELECT
|
||||
b.id,
|
||||
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>"
|
||||
FROM books b
|
||||
JOIN book_authors ba ON b.id = ba.book
|
||||
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
|
||||
"#,
|
||||
author_id,
|
||||
&allowed_langs
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
for file_type in book.available_types {
|
||||
file_types.insert(file_type);
|
||||
}
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
async fn search_authors(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
pagination: Query<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"author_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
let filter = format!("author_langs IN [{}]", allowed_langs.join(", "));
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_offset(
|
||||
((pagination.page - 1) * pagination.size)
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
@@ -282,18 +342,27 @@ async fn search_authors(
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut authors = db
|
||||
.author()
|
||||
.find_many(vec![
|
||||
author::id::in_vec(author_ids.clone())
|
||||
])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let mut authors = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
WHERE a.id = ANY($1)
|
||||
"#,
|
||||
&author_ids
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
authors.sort_by(|a, b| {
|
||||
let a_pos = author_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
@@ -302,23 +371,21 @@ async fn search_authors(
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
authors.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
let page: Page<Author> = Page::new(authors, total.try_into().unwrap(), &pagination);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_authors_router() -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_authors))
|
||||
.route("/random", get(get_random_author))
|
||||
.route("/:author_id", get(get_author))
|
||||
.route("/:author_id/annotation", get(get_author_annotation))
|
||||
.route("/:author_id/books", get(get_author_books))
|
||||
.route("/:author_id/available_types", get(get_author_books_available_types))
|
||||
.route("/search/:query", get(search_authors))
|
||||
.route("/{author_id}", get(get_author))
|
||||
.route("/{author_id}/annotation", get(get_author_annotation))
|
||||
.route("/{author_id}/books", get(get_author_books))
|
||||
.route(
|
||||
"/{author_id}/available_types",
|
||||
get(get_author_books_available_types),
|
||||
)
|
||||
.route("/search/{query}", get(search_authors))
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
31
src/views/common/get_random_item.rs
Normal file
31
src/views/common/get_random_item.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use meilisearch_sdk::indexes::Index;
|
||||
use rand::Rng;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use crate::meilisearch::GetId;
|
||||
|
||||
pub async fn get_random_item<'a, T>(index: Index, filter: String) -> i32
|
||||
where
|
||||
T: DeserializeOwned + GetId + 'static + Send + Sync,
|
||||
{
|
||||
let result = index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<T>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap());
|
||||
|
||||
let result = index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<T>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let item = &result.hits.first().unwrap().result;
|
||||
|
||||
item.get_id()
|
||||
}
|
||||
1
src/views/common/mod.rs
Normal file
1
src/views/common/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod get_random_item;
|
||||
@@ -1,67 +1,95 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::Query, Json, response::IntoResponse};
|
||||
use prisma_client_rust::Direction;
|
||||
use axum::{extract::Query, response::IntoResponse, routing::get, Json, Router};
|
||||
|
||||
use crate::{serializers::{pagination::{Pagination, Page}, genre::{Genre, GenreFilter}}, prisma::genre};
|
||||
use crate::serializers::{
|
||||
genre::{Genre, GenreFilter},
|
||||
pagination::{Page, Pagination},
|
||||
};
|
||||
|
||||
use crate::serializers::source::Source;
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
pub async fn get_genres(
|
||||
db: Database,
|
||||
pagination: Query<Pagination>,
|
||||
Query(GenreFilter { meta }): Query<GenreFilter>
|
||||
Query(GenreFilter { meta }): Query<GenreFilter>,
|
||||
) -> impl IntoResponse {
|
||||
let filter = {
|
||||
match meta {
|
||||
Some(meta) => vec![
|
||||
genre::meta::equals(meta)
|
||||
],
|
||||
None => vec![],
|
||||
}
|
||||
};
|
||||
let genres_count = sqlx::query_scalar!(
|
||||
r#"
|
||||
SELECT COUNT(*) FROM genres
|
||||
WHERE (meta = $1 OR $1 IS NULL)
|
||||
"#,
|
||||
meta
|
||||
)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let genres_count = db
|
||||
.genre()
|
||||
.count(filter.clone())
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let genres = sqlx::query_as!(
|
||||
Genre,
|
||||
r#"
|
||||
SELECT
|
||||
genres.id,
|
||||
genres.remote_id,
|
||||
genres.code,
|
||||
genres.description,
|
||||
genres.meta,
|
||||
(
|
||||
SELECT
|
||||
ROW(
|
||||
sources.id,
|
||||
sources.name
|
||||
)::source_type
|
||||
FROM sources
|
||||
WHERE sources.id = genres.source
|
||||
) AS "source!: Source"
|
||||
FROM genres
|
||||
WHERE (meta = $1 OR $1 IS NULL)
|
||||
ORDER BY genres.id ASC
|
||||
LIMIT $2 OFFSET $3
|
||||
"#,
|
||||
meta,
|
||||
pagination.size,
|
||||
(pagination.page - 1) * pagination.size
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let genres = db
|
||||
.genre()
|
||||
.find_many(filter)
|
||||
.with(
|
||||
genre::source::fetch()
|
||||
)
|
||||
.order_by(genre::id::order(Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: Page<Genre> = Page::new(
|
||||
genres.iter().map(|item| item.clone().into()).collect(),
|
||||
genres_count,
|
||||
&pagination
|
||||
);
|
||||
let page: Page<Genre> = Page::new(genres, genres_count, &pagination);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_genre_metas(
|
||||
db: Database
|
||||
) -> impl IntoResponse {
|
||||
let genres = db
|
||||
.genre()
|
||||
.find_many(vec![])
|
||||
.order_by(genre::id::order(Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
pub async fn get_genre_metas(db: Database) -> impl IntoResponse {
|
||||
let genres = sqlx::query_as!(
|
||||
Genre,
|
||||
r#"
|
||||
SELECT
|
||||
genres.id,
|
||||
genres.remote_id,
|
||||
genres.code,
|
||||
genres.description,
|
||||
genres.meta,
|
||||
(
|
||||
SELECT
|
||||
ROW(
|
||||
sources.id,
|
||||
sources.name
|
||||
)::source_type
|
||||
FROM sources
|
||||
WHERE sources.id = genres.source
|
||||
) AS "source!: Source"
|
||||
FROM genres
|
||||
ORDER BY genres.id ASC
|
||||
"#
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut metas: HashSet<String> = HashSet::new();
|
||||
|
||||
@@ -69,9 +97,11 @@ pub async fn get_genre_metas(
|
||||
metas.insert(genre.meta.clone());
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(metas.into_iter().collect())
|
||||
}
|
||||
let mut metas: Vec<String> = metas.into_iter().collect();
|
||||
metas.sort();
|
||||
|
||||
Json::<Vec<String>>(metas)
|
||||
}
|
||||
|
||||
pub async fn get_genres_router() -> Router {
|
||||
Router::new()
|
||||
|
||||
@@ -1,27 +1,35 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{Router, routing::get, middleware::{self, Next}, Extension, http::{Request, StatusCode, self}, response::Response};
|
||||
use axum::{
|
||||
http::{self, Request, StatusCode},
|
||||
middleware::{self, Next},
|
||||
response::Response,
|
||||
routing::get,
|
||||
Extension, Router,
|
||||
};
|
||||
use axum_prometheus::PrometheusMetricLayer;
|
||||
use tower_http::trace::{TraceLayer, self};
|
||||
use sqlx::PgPool;
|
||||
use tower_http::trace::{self, TraceLayer};
|
||||
use tracing::Level;
|
||||
|
||||
use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
|
||||
use crate::{config::CONFIG, db::get_postgres_pool};
|
||||
|
||||
use self::{authors::get_authors_router, genres::get_genres_router, books::get_books_router, sequences::get_sequences_router};
|
||||
use self::translators::get_translators_router;
|
||||
use self::{
|
||||
authors::get_authors_router, books::get_books_router, genres::get_genres_router,
|
||||
sequences::get_sequences_router,
|
||||
};
|
||||
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod common;
|
||||
pub mod genres;
|
||||
pub mod sequences;
|
||||
pub mod translators;
|
||||
|
||||
pub type Database = Extension<PgPool>;
|
||||
|
||||
pub type Database = Extension<Arc<PrismaClient>>;
|
||||
|
||||
|
||||
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
|
||||
let auth_header = req.headers()
|
||||
async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, StatusCode> {
|
||||
let auth_header = req
|
||||
.headers()
|
||||
.get(http::header::AUTHORIZATION)
|
||||
.and_then(|header| header.to_str().ok());
|
||||
|
||||
@@ -38,35 +46,27 @@ async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode>
|
||||
Ok(next.run(req).await)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_router() -> Router {
|
||||
let client = Arc::new(get_prisma_client().await);
|
||||
let client = get_postgres_pool().await;
|
||||
|
||||
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
|
||||
|
||||
let app_router = Router::new()
|
||||
|
||||
.nest("/api/v1/authors", get_authors_router().await)
|
||||
.nest("/api/v1/translators", get_translators_router().await)
|
||||
.nest("/api/v1/genres", get_genres_router().await)
|
||||
.nest("/api/v1/books", get_books_router().await)
|
||||
.nest("/api/v1/sequences", get_sequences_router().await)
|
||||
|
||||
.layer(middleware::from_fn(auth))
|
||||
.layer(Extension(client))
|
||||
.layer(prometheus_layer);
|
||||
|
||||
let metric_router = Router::new()
|
||||
.route("/metrics", get(|| async move { metric_handle.render() }));
|
||||
let metric_router =
|
||||
Router::new().route("/metrics", get(|| async move { metric_handle.render() }));
|
||||
|
||||
Router::new()
|
||||
.nest("/", app_router)
|
||||
.nest("/", metric_router)
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(trace::DefaultMakeSpan::new()
|
||||
.level(Level::INFO))
|
||||
.on_response(trace::DefaultOnResponse::new()
|
||||
.level(Level::INFO)),
|
||||
)
|
||||
Router::new().merge(app_router).merge(metric_router).layer(
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(trace::DefaultMakeSpan::new().level(Level::INFO))
|
||||
.on_response(trace::DefaultOnResponse::new().level(Level::INFO)),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,82 +1,79 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::{Path, Query}, http::StatusCode, response::IntoResponse, Json};
|
||||
use rand::Rng;
|
||||
use axum::{
|
||||
extract::{Path, Query},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
|
||||
use crate::{prisma::{sequence, book_sequence, book, book_author, author, translator}, serializers::{sequence::{Sequence, SequenceBook}, allowed_langs::AllowedLangs, pagination::{PageWithParent, Pagination, Page}}, meilisearch::{get_meili_client, SequenceMeili}};
|
||||
|
||||
use super::Database;
|
||||
use crate::{
|
||||
meilisearch::{get_meili_client, SequenceMeili},
|
||||
serializers::{
|
||||
allowed_langs::AllowedLangs,
|
||||
author::Author,
|
||||
book::BaseBook,
|
||||
pagination::{Page, PageWithParent, Pagination},
|
||||
sequence::{Sequence, SequenceBook},
|
||||
},
|
||||
};
|
||||
|
||||
use super::{common::get_random_item::get_random_item, Database};
|
||||
|
||||
async fn get_random_sequence(
|
||||
db: Database,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("sequences");
|
||||
|
||||
let filter = format!(
|
||||
"langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_filter(&filter)
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence_id = {
|
||||
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
|
||||
let client = get_meili_client();
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_limit(1)
|
||||
.with_offset(offset)
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
.unwrap();
|
||||
let authors_index = client.index("sequences");
|
||||
|
||||
let sequence = &result.hits.get(0).unwrap().result;
|
||||
let filter = format!("langs IN [{}]", allowed_langs.join(", "));
|
||||
|
||||
sequence.id
|
||||
get_random_item::<SequenceMeili>(authors_index, filter).await
|
||||
};
|
||||
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let sequence = sqlx::query_as!(
|
||||
Sequence,
|
||||
r#"
|
||||
SELECT id, name FROM sequences WHERE id = $1
|
||||
"#,
|
||||
sequence_id
|
||||
)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Json::<Sequence>(sequence.into())
|
||||
Json::<Sequence>(sequence)
|
||||
}
|
||||
|
||||
async fn search_sequence(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
pagination: Query<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let sequence_index = client.index("sequences");
|
||||
|
||||
let filter = format!(
|
||||
"langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
let filter = format!("langs IN [{}]", allowed_langs.join(", "));
|
||||
|
||||
let result = sequence_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_offset(
|
||||
((pagination.page - 1) * pagination.size)
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<SequenceMeili>()
|
||||
.await
|
||||
@@ -85,14 +82,16 @@ async fn search_sequence(
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut sequences = db
|
||||
.sequence()
|
||||
.find_many(vec![
|
||||
sequence::id::in_vec(sequence_ids.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let mut sequences = sqlx::query_as!(
|
||||
Sequence,
|
||||
r#"
|
||||
SELECT id, name FROM sequences WHERE id = ANY($1)
|
||||
"#,
|
||||
&sequence_ids
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sequences.sort_by(|a, b| {
|
||||
let a_pos = sequence_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
@@ -101,30 +100,25 @@ async fn search_sequence(
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Sequence> = Page::new(
|
||||
sequences.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
let page: Page<Sequence> = Page::new(sequences, total.try_into().unwrap(), &pagination);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
async fn get_sequence(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>
|
||||
) -> impl IntoResponse {
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
async fn get_sequence(db: Database, Path(sequence_id): Path<i32>) -> impl IntoResponse {
|
||||
let sequence = sqlx::query_as!(
|
||||
Sequence,
|
||||
r#"
|
||||
SELECT id, name FROM sequences WHERE id = $1
|
||||
"#,
|
||||
sequence_id
|
||||
)
|
||||
.fetch_optional(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match sequence {
|
||||
Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
|
||||
Some(sequence) => Json::<Sequence>(sequence).into_response(),
|
||||
None => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
@@ -132,30 +126,38 @@ async fn get_sequence(
|
||||
async fn get_sequence_available_types(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
// TODO: refactor
|
||||
|
||||
let books = sqlx::query_as!(
|
||||
BaseBook,
|
||||
r#"
|
||||
SELECT
|
||||
b.id,
|
||||
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>"
|
||||
FROM books b
|
||||
JOIN book_sequences bs ON b.id = bs.book
|
||||
WHERE
|
||||
b.is_deleted = FALSE AND
|
||||
bs.sequence = $1 AND
|
||||
b.lang = ANY($2)
|
||||
"#,
|
||||
sequence_id,
|
||||
&allowed_langs
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
for file_type in book.available_types {
|
||||
file_types.insert(file_type);
|
||||
}
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
@@ -164,90 +166,131 @@ async fn get_sequence_available_types(
|
||||
async fn get_sequence_books(
|
||||
db: Database,
|
||||
Path(sequence_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
pagination: Query<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let sequence = db
|
||||
.sequence()
|
||||
.find_unique(
|
||||
sequence::id::equals(sequence_id)
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let sequence = sqlx::query_as!(
|
||||
Sequence,
|
||||
r#"
|
||||
SELECT id, name FROM sequences WHERE id = $1
|
||||
"#,
|
||||
sequence_id
|
||||
)
|
||||
.fetch_optional(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let sequence = match sequence {
|
||||
Some(v) => v,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
let books_count = sqlx::query_scalar!(
|
||||
"SELECT COUNT(*) FROM book_sequences bs
|
||||
JOIN books b ON b.id = bs.book
|
||||
WHERE
|
||||
b.is_deleted = FALSE AND
|
||||
bs.sequence = $1 AND
|
||||
b.lang = ANY($2)",
|
||||
sequence.id,
|
||||
&allowed_langs
|
||||
)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let mut books = sqlx::query_as!(
|
||||
SequenceBook,
|
||||
r#"
|
||||
SELECT
|
||||
b.id,
|
||||
b.title,
|
||||
b.lang,
|
||||
b.file_type,
|
||||
b.year,
|
||||
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
|
||||
b.uploaded,
|
||||
COALESCE(
|
||||
(
|
||||
SELECT
|
||||
ARRAY_AGG(
|
||||
ROW(
|
||||
authors.id,
|
||||
authors.first_name,
|
||||
authors.last_name,
|
||||
authors.middle_name,
|
||||
EXISTS(
|
||||
SELECT * FROM author_annotations WHERE author = authors.id
|
||||
)
|
||||
)::author_type
|
||||
)
|
||||
FROM book_authors
|
||||
JOIN authors ON authors.id = book_authors.author
|
||||
WHERE book_authors.book = b.id
|
||||
),
|
||||
ARRAY[]::author_type[]
|
||||
) AS "authors!: Vec<Author>",
|
||||
COALESCE(
|
||||
(
|
||||
SELECT
|
||||
ARRAY_AGG(
|
||||
ROW(
|
||||
authors.id,
|
||||
authors.first_name,
|
||||
authors.last_name,
|
||||
authors.middle_name,
|
||||
EXISTS(
|
||||
SELECT * FROM author_annotations WHERE author = authors.id
|
||||
)
|
||||
)::author_type
|
||||
)
|
||||
FROM translations
|
||||
JOIN authors ON authors.id = translations.author
|
||||
WHERE translations.book = b.id
|
||||
),
|
||||
ARRAY[]::author_type[]
|
||||
) AS "translators!: Vec<Author>",
|
||||
EXISTS(
|
||||
SELECT * FROM book_annotations WHERE book = b.id
|
||||
) AS "annotation_exists!: bool",
|
||||
bs.position
|
||||
FROM books b
|
||||
JOIN book_sequences bs ON b.id = bs.book
|
||||
WHERE
|
||||
b.is_deleted = FALSE AND
|
||||
bs.sequence = $1 AND
|
||||
b.lang = ANY($2)
|
||||
ORDER BY bs.position
|
||||
LIMIT $3 OFFSET $4
|
||||
"#,
|
||||
sequence.id,
|
||||
&allowed_langs,
|
||||
pagination.size,
|
||||
(pagination.page - 1) * pagination.size,
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::book_sequences::some(vec![
|
||||
book_sequence::sequence_id::equals(sequence_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::translations::fetch(vec![])
|
||||
.with(
|
||||
translator::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
books.sort_by(|a, b| a.position.cmp(&b.position));
|
||||
|
||||
let page: PageWithParent<SequenceBook, Sequence> = PageWithParent::new(
|
||||
sequence.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
let page: PageWithParent<SequenceBook, Sequence> =
|
||||
PageWithParent::new(sequence, books, books_count, &pagination);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_sequences_router() -> Router {
|
||||
Router::new()
|
||||
.route("/random", get(get_random_sequence))
|
||||
.route("/search/:query", get(search_sequence))
|
||||
.route("/:sequence_id", get(get_sequence))
|
||||
.route("/:sequence_id/available_types", get(get_sequence_available_types))
|
||||
.route("/:sequence_id/books", get(get_sequence_books))
|
||||
.route("/search/{query}", get(search_sequence))
|
||||
.route("/{sequence_id}", get(get_sequence))
|
||||
.route(
|
||||
"/{sequence_id}/available_types",
|
||||
get(get_sequence_available_types),
|
||||
)
|
||||
.route("/{sequence_id}/books", get(get_sequence_books))
|
||||
}
|
||||
|
||||
@@ -1,147 +1,216 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{Router, routing::get, extract::{Path, Query}, response::IntoResponse, Json, http::StatusCode};
|
||||
use axum::{
|
||||
extract::{Path, Query},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
|
||||
use crate::{serializers::{pagination::{Pagination, Page, PageWithParent}, author::Author, translator::TranslatorBook, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}, prisma::{author, book::{self}, translator, book_author, book_sequence}};
|
||||
use crate::{
|
||||
meilisearch::{get_meili_client, AuthorMeili},
|
||||
serializers::{
|
||||
allowed_langs::AllowedLangs,
|
||||
author::Author,
|
||||
book::BaseBook,
|
||||
pagination::{Page, PageWithParent, Pagination},
|
||||
sequence::Sequence,
|
||||
translator::TranslatorBook,
|
||||
},
|
||||
};
|
||||
|
||||
use super::Database;
|
||||
|
||||
|
||||
async fn get_translated_books(
|
||||
db: Database,
|
||||
Path(translator_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
pagination: Query<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let translator = db
|
||||
.author()
|
||||
.find_unique(
|
||||
author::id::equals(translator_id)
|
||||
)
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let translator = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
WHERE a.id = $1
|
||||
"#,
|
||||
translator_id
|
||||
)
|
||||
.fetch_optional(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let translator = match translator {
|
||||
Some(translator) => translator,
|
||||
None => return StatusCode::NOT_FOUND.into_response(),
|
||||
};
|
||||
|
||||
let books_count = db
|
||||
.book()
|
||||
.count(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs.clone())
|
||||
])
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let books_count = sqlx::query_scalar!(
|
||||
r#"
|
||||
SELECT COUNT(*)
|
||||
FROM books b
|
||||
JOIN book_authors ba ON b.id = ba.book
|
||||
WHERE
|
||||
b.is_deleted = false
|
||||
AND ba.author = $1
|
||||
AND b.lang = ANY($2)
|
||||
"#,
|
||||
translator_id,
|
||||
&allowed_langs
|
||||
)
|
||||
.fetch_one(&db.0)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.with(
|
||||
book::source::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_annotation::fetch()
|
||||
)
|
||||
.with(
|
||||
book::book_authors::fetch(vec![])
|
||||
.with(
|
||||
book_author::author::fetch()
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
let books = sqlx::query_as!(
|
||||
TranslatorBook,
|
||||
r#"
|
||||
SELECT
|
||||
b.id,
|
||||
b.title,
|
||||
b.lang,
|
||||
b.file_type,
|
||||
b.year,
|
||||
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
|
||||
b.uploaded,
|
||||
COALESCE(
|
||||
(
|
||||
SELECT
|
||||
ARRAY_AGG(
|
||||
ROW(
|
||||
authors.id,
|
||||
authors.first_name,
|
||||
authors.last_name,
|
||||
authors.middle_name,
|
||||
EXISTS(
|
||||
SELECT * FROM author_annotations WHERE author = authors.id
|
||||
)
|
||||
)::author_type
|
||||
)
|
||||
)
|
||||
)
|
||||
.with(
|
||||
book::book_sequences::fetch(vec![])
|
||||
.with(
|
||||
book_sequence::sequence::fetch()
|
||||
)
|
||||
)
|
||||
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
|
||||
.skip((pagination.page - 1) * pagination.size)
|
||||
.take(pagination.size)
|
||||
.exec()
|
||||
FROM book_authors
|
||||
JOIN authors ON authors.id = book_authors.author
|
||||
WHERE book_authors.book = b.id
|
||||
),
|
||||
ARRAY[]::author_type[]
|
||||
) AS "authors!: Vec<Author>",
|
||||
COALESCE(
|
||||
(
|
||||
SELECT
|
||||
ARRAY_AGG(
|
||||
ROW(
|
||||
sequences.id,
|
||||
sequences.name
|
||||
)::sequence_type
|
||||
)
|
||||
FROM book_sequences
|
||||
JOIN sequences ON sequences.id = book_sequences.sequence
|
||||
WHERE book_sequences.book = b.id
|
||||
),
|
||||
ARRAY[]::sequence_type[]
|
||||
) AS "sequences!: Vec<Sequence>",
|
||||
EXISTS(
|
||||
SELECT * FROM book_annotations WHERE book = b.id
|
||||
) AS "annotation_exists!: bool"
|
||||
FROM books b
|
||||
JOIN book_authors ba ON b.id = ba.book
|
||||
WHERE
|
||||
b.is_deleted = false
|
||||
AND ba.author = $1
|
||||
AND b.lang = ANY($2)
|
||||
OFFSET $3
|
||||
LIMIT $4
|
||||
"#,
|
||||
translator_id,
|
||||
&allowed_langs,
|
||||
(pagination.page - 1) * pagination.size,
|
||||
pagination.size
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
|
||||
translator.into(),
|
||||
books.iter().map(|item| item.clone().into()).collect(),
|
||||
books_count,
|
||||
&pagination
|
||||
);
|
||||
let page: PageWithParent<TranslatorBook, Author> =
|
||||
PageWithParent::new(translator, books, books_count, &pagination);
|
||||
|
||||
Json(page).into_response()
|
||||
}
|
||||
|
||||
|
||||
async fn get_translated_books_available_types(
|
||||
db: Database,
|
||||
Path(translator_id): Path<i32>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
) -> impl IntoResponse {
|
||||
let books = db
|
||||
.book()
|
||||
.find_many(vec![
|
||||
book::translations::some(vec![
|
||||
translator::author_id::equals(translator_id)
|
||||
]),
|
||||
book::lang::in_vec(allowed_langs)
|
||||
])
|
||||
.exec()
|
||||
// TODO: refactor
|
||||
|
||||
let books = sqlx::query_as!(
|
||||
BaseBook,
|
||||
r#"
|
||||
SELECT
|
||||
b.id,
|
||||
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>"
|
||||
FROM books b
|
||||
JOIN book_authors ba ON b.id = ba.book
|
||||
WHERE
|
||||
b.is_deleted = false
|
||||
AND ba.author = $1
|
||||
AND b.lang = ANY($2)
|
||||
"#,
|
||||
translator_id,
|
||||
&allowed_langs
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut file_types: HashSet<String> = HashSet::new();
|
||||
|
||||
for book in books {
|
||||
file_types.insert(book.file_type.clone());
|
||||
}
|
||||
|
||||
if file_types.contains(&"fb2".to_string()) {
|
||||
file_types.insert("epub".to_string());
|
||||
file_types.insert("mobi".to_string());
|
||||
file_types.insert("fb2zip".to_string());
|
||||
for file_type in book.available_types {
|
||||
file_types.insert(file_type);
|
||||
}
|
||||
}
|
||||
|
||||
Json::<Vec<String>>(file_types.into_iter().collect())
|
||||
}
|
||||
|
||||
|
||||
async fn search_translators(
|
||||
db: Database,
|
||||
Path(query): Path<String>,
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
|
||||
pagination: Query<Pagination>
|
||||
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
|
||||
AllowedLangs,
|
||||
>,
|
||||
pagination: Query<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let client = get_meili_client();
|
||||
|
||||
let authors_index = client.index("authors");
|
||||
|
||||
let filter = format!(
|
||||
"translator_langs IN [{}]",
|
||||
allowed_langs.join(", ")
|
||||
);
|
||||
let filter = format!("translator_langs IN [{}]", allowed_langs.join(", "));
|
||||
|
||||
let result = authors_index
|
||||
.search()
|
||||
.with_query(&query)
|
||||
.with_filter(&filter)
|
||||
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
|
||||
.with_offset(
|
||||
((pagination.page - 1) * pagination.size)
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
.with_limit(pagination.size.try_into().unwrap())
|
||||
.execute::<AuthorMeili>()
|
||||
.await
|
||||
@@ -150,18 +219,27 @@ async fn search_translators(
|
||||
let total = result.estimated_total_hits.unwrap();
|
||||
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
|
||||
|
||||
let mut translators = db
|
||||
.author()
|
||||
.find_many(vec![
|
||||
author::id::in_vec(translator_ids.clone())
|
||||
])
|
||||
.with(
|
||||
author::author_annotation::fetch()
|
||||
)
|
||||
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
|
||||
.exec()
|
||||
.await
|
||||
.unwrap();
|
||||
let mut translators = sqlx::query_as!(
|
||||
Author,
|
||||
r#"
|
||||
SELECT
|
||||
a.id,
|
||||
a.first_name,
|
||||
a.last_name,
|
||||
COALESCE(a.middle_name, '') AS "middle_name!: String",
|
||||
CASE
|
||||
WHEN aa.id IS NOT NULL THEN true
|
||||
ELSE false
|
||||
END AS "annotation_exists!: bool"
|
||||
FROM authors a
|
||||
LEFT JOIN author_annotations aa ON a.id = aa.author
|
||||
WHERE a.id = ANY($1)
|
||||
"#,
|
||||
&translator_ids
|
||||
)
|
||||
.fetch_all(&db.0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
translators.sort_by(|a, b| {
|
||||
let a_pos = translator_ids.iter().position(|i| *i == a.id).unwrap();
|
||||
@@ -170,19 +248,17 @@ async fn search_translators(
|
||||
a_pos.cmp(&b_pos)
|
||||
});
|
||||
|
||||
let page: Page<Author> = Page::new(
|
||||
translators.iter().map(|item| item.clone().into()).collect(),
|
||||
total.try_into().unwrap(),
|
||||
&pagination
|
||||
);
|
||||
let page: Page<Author> = Page::new(translators, total.try_into().unwrap(), &pagination);
|
||||
|
||||
Json(page)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_translators_router() -> Router {
|
||||
Router::new()
|
||||
.route("/:translator_id/books", get(get_translated_books))
|
||||
.route("/:translator_id/available_types", get(get_translated_books_available_types))
|
||||
.route("/search/:query", get(search_translators))
|
||||
.route("/{translator_id}/books", get(get_translated_books))
|
||||
.route(
|
||||
"/{translator_id}/available_types",
|
||||
get(get_translated_books_available_types),
|
||||
)
|
||||
.route("/search/{query}", get(search_translators))
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user