96 Commits

Author SHA1 Message Date
d9bbcc947f Update webhook URL parameter to BOOK_SERVER_TAG
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
2025-06-21 23:08:39 +02:00
0642f14d4e Update dependencies and Docker workflow for latest compatibility
- Update many Rust dependencies in Cargo.lock to latest versions -
  Update Docker workflow: - Add image tag with commit SHA - Pass commit
  SHA as a query param to deployment webhook - Minor YAML formatting
  improvements
2025-06-21 22:11:01 +02:00
684f4e3e52 Merge pull request #40 from flibusta-apps/dependabot/cargo/tokio-1.44.2
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
Bump tokio from 1.44.1 to 1.44.2
2025-04-08 15:56:16 +02:00
dependabot[bot]
64f77d0f15 Bump tokio from 1.44.1 to 1.44.2
Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.44.1 to 1.44.2.
- [Release notes](https://github.com/tokio-rs/tokio/releases)
- [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.44.1...tokio-1.44.2)

---
updated-dependencies:
- dependency-name: tokio
  dependency-version: 1.44.2
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-08 02:11:04 +00:00
6bf09ce429 Merge pull request #39 from flibusta-apps/dependabot/cargo/openssl-0.10.72
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
Bump openssl from 0.10.70 to 0.10.72
2025-04-04 23:57:54 +02:00
dependabot[bot]
e3410ef6dd Bump openssl from 0.10.70 to 0.10.72
Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.70 to 0.10.72.
- [Release notes](https://github.com/sfackler/rust-openssl/releases)
- [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.70...openssl-v0.10.72)

---
updated-dependencies:
- dependency-name: openssl
  dependency-version: 0.10.72
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-04 20:54:01 +00:00
4a680af4ae Merge pull request #38 from flibusta-apps/dependabot/cargo/ring-0.17.13
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
Bump ring from 0.17.8 to 0.17.13
2025-03-23 14:19:23 +01:00
dependabot[bot]
0d3ac1d5d1 Bump ring from 0.17.8 to 0.17.13
Bumps [ring](https://github.com/briansmith/ring) from 0.17.8 to 0.17.13.
- [Changelog](https://github.com/briansmith/ring/blob/main/RELEASES.md)
- [Commits](https://github.com/briansmith/ring/commits)

---
updated-dependencies:
- dependency-name: ring
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-03-23 13:16:42 +00:00
b1594214bc Update deps 2025-03-23 14:15:32 +01:00
3614306094 Merge pull request #37 from flibusta-apps/dependabot/cargo/openssl-0.10.70
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
Bump openssl from 0.10.68 to 0.10.70
2025-02-04 10:55:25 +01:00
dependabot[bot]
f1155292bc Bump openssl from 0.10.68 to 0.10.70
Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.68 to 0.10.70.
- [Release notes](https://github.com/sfackler/rust-openssl/releases)
- [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.68...openssl-v0.10.70)

---
updated-dependencies:
- dependency-name: openssl
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-03 18:51:49 +00:00
30e0fc202a Update .gitignore
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
2025-01-05 14:17:35 +01:00
91afa29862 Revert "Update books query"
This reverts commit abee7403b7.
2025-01-05 14:17:04 +01:00
abee7403b7 Update books query 2025-01-05 14:10:12 +01:00
986a8f7f5f Fix
Some checks are pending
Build docker image / Build-Docker-Image (push) Waiting to run
2025-01-04 14:59:18 +01:00
41a9e92030 Update to axum 0.8 2025-01-04 14:51:39 +01:00
4f78a5cf82 Fix translator handler
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
2024-12-26 02:13:05 +01:00
d49d5339fe Fix
Some checks are pending
Build docker image / Build-Docker-Image (push) Waiting to run
2024-12-26 01:46:09 +01:00
5d7b4e9a19 Fix 2024-12-26 01:45:26 +01:00
c58e10bfa0 Fix 2024-12-26 01:28:32 +01:00
3e8500e825 Add json feature to sqlx 2024-12-26 00:11:59 +01:00
52ab9b361d Fix 2024-12-26 00:03:51 +01:00
f938516f65 Fix 2024-12-25 23:51:21 +01:00
325aee3377 Update queries 2024-12-25 23:30:28 +01:00
8002a93069 Move to sqlx 2024-12-25 23:28:22 +01:00
3ee5e51767 Update deps
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
2024-10-02 14:51:43 +02:00
a1b1d412ed Fix getting books by sequence
Some checks failed
Build docker image / Build-Docker-Image (push) Has been cancelled
2024-09-28 22:29:41 +02:00
10f30dae41 Update sequence books 2024-09-28 22:21:14 +02:00
698a9ded17 Add position to sequence books 2024-09-28 21:36:58 +02:00
420c6a6310 Merge pull request #36 from flibusta-apps/dependabot/cargo/openssl-0.10.66
Bump openssl from 0.10.64 to 0.10.66
2024-07-24 19:22:25 +02:00
dependabot[bot]
60b4b025ba Bump openssl from 0.10.64 to 0.10.66
Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.64 to 0.10.66.
- [Release notes](https://github.com/sfackler/rust-openssl/releases)
- [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.64...openssl-v0.10.66)

---
updated-dependencies:
- dependency-name: openssl
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-24 16:59:13 +00:00
f0132c2ce5 Merge pull request #35 from flibusta-apps/dependabot/cargo/prisma-cli/openssl-0.10.66
Bump openssl from 0.10.60 to 0.10.66 in /prisma-cli
2024-07-24 18:58:11 +02:00
dependabot[bot]
0658108a1f Bump openssl from 0.10.60 to 0.10.66 in /prisma-cli
Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.60 to 0.10.66.
- [Release notes](https://github.com/sfackler/rust-openssl/releases)
- [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.60...openssl-v0.10.66)

---
updated-dependencies:
- dependency-name: openssl
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-22 18:10:46 +00:00
fc92d489e2 Merge pull request #34 from flibusta-apps/dependabot/github_actions/docker/build-push-action-6
Bump docker/build-push-action from 5 to 6
2024-06-18 13:05:02 +02:00
dependabot[bot]
5e8dc3e34e Bump docker/build-push-action from 5 to 6
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 5 to 6.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v5...v6)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-18 08:54:08 +00:00
eec1c77071 Set pool_timeout to 300 2024-05-24 14:07:54 +02:00
49ca7f2e2b Update connection pool size 2024-05-13 12:44:49 +02:00
c4d09fd7d4 Fix 2024-05-06 23:51:05 +02:00
4bbbbb1660 Update deps 2024-04-25 16:35:27 +02:00
b252ac2994 Update deps 2024-04-25 16:30:21 +02:00
f7e1810077 Fix 2024-04-14 11:59:24 +02:00
ba8612b990 Use vault 2024-04-14 11:22:35 +02:00
45c567410e Merge pull request #32 from flibusta-apps/dependabot/cargo/h2-0.3.26
Bump h2 from 0.3.24 to 0.3.26
2024-04-05 19:55:50 +02:00
a570c31634 Merge pull request #33 from flibusta-apps/dependabot/cargo/prisma-cli/h2-0.3.26
Bump h2 from 0.3.24 to 0.3.26 in /prisma-cli
2024-04-05 19:49:42 +02:00
dependabot[bot]
8260aa7512 Bump h2 from 0.3.24 to 0.3.26 in /prisma-cli
Bumps [h2](https://github.com/hyperium/h2) from 0.3.24 to 0.3.26.
- [Release notes](https://github.com/hyperium/h2/releases)
- [Changelog](https://github.com/hyperium/h2/blob/v0.3.26/CHANGELOG.md)
- [Commits](https://github.com/hyperium/h2/compare/v0.3.24...v0.3.26)

---
updated-dependencies:
- dependency-name: h2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-05 16:18:16 +00:00
dependabot[bot]
e44a26e49e Bump h2 from 0.3.24 to 0.3.26
Bumps [h2](https://github.com/hyperium/h2) from 0.3.24 to 0.3.26.
- [Release notes](https://github.com/hyperium/h2/releases)
- [Changelog](https://github.com/hyperium/h2/blob/v0.3.26/CHANGELOG.md)
- [Commits](https://github.com/hyperium/h2/compare/v0.3.24...v0.3.26)

---
updated-dependencies:
- dependency-name: h2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-05 15:08:42 +00:00
3d1f4cc9f2 Merge pull request #31 from flibusta-apps/dependabot/cargo/prisma-cli/mio-0.8.11
Bump mio from 0.8.8 to 0.8.11 in /prisma-cli
2024-03-04 22:43:58 +01:00
dependabot[bot]
27a14289cd Bump mio from 0.8.8 to 0.8.11 in /prisma-cli
Bumps [mio](https://github.com/tokio-rs/mio) from 0.8.8 to 0.8.11.
- [Release notes](https://github.com/tokio-rs/mio/releases)
- [Changelog](https://github.com/tokio-rs/mio/blob/master/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/mio/compare/v0.8.8...v0.8.11)

---
updated-dependencies:
- dependency-name: mio
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-04 21:36:36 +00:00
a09c95fc3d Merge pull request #30 from flibusta-apps/dependabot/cargo/mio-0.8.11
Bump mio from 0.8.10 to 0.8.11
2024-03-04 22:35:42 +01:00
dependabot[bot]
ab0afc1f4a Bump mio from 0.8.10 to 0.8.11
Bumps [mio](https://github.com/tokio-rs/mio) from 0.8.10 to 0.8.11.
- [Release notes](https://github.com/tokio-rs/mio/releases)
- [Changelog](https://github.com/tokio-rs/mio/blob/master/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/mio/compare/v0.8.10...v0.8.11)

---
updated-dependencies:
- dependency-name: mio
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-03-04 21:30:15 +00:00
ad4bd9e24c Update deps 2024-02-04 12:57:08 +01:00
4c6720c9ac Merge pull request #28 from flibusta-apps/dependabot/cargo/h2-0.3.24
Bump h2 from 0.3.22 to 0.3.24
2024-01-21 22:26:14 +01:00
03ce4b91a6 Merge pull request #29 from flibusta-apps/dependabot/cargo/prisma-cli/h2-0.3.24
Bump h2 from 0.3.20 to 0.3.24 in /prisma-cli
2024-01-21 22:26:07 +01:00
dependabot[bot]
06f99939f0 Bump h2 from 0.3.20 to 0.3.24 in /prisma-cli
Bumps [h2](https://github.com/hyperium/h2) from 0.3.20 to 0.3.24.
- [Release notes](https://github.com/hyperium/h2/releases)
- [Changelog](https://github.com/hyperium/h2/blob/v0.3.24/CHANGELOG.md)
- [Commits](https://github.com/hyperium/h2/compare/v0.3.20...v0.3.24)

---
updated-dependencies:
- dependency-name: h2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-01-19 16:17:21 +00:00
dependabot[bot]
a574c7b149 Bump h2 from 0.3.22 to 0.3.24
Bumps [h2](https://github.com/hyperium/h2) from 0.3.22 to 0.3.24.
- [Release notes](https://github.com/hyperium/h2/releases)
- [Changelog](https://github.com/hyperium/h2/blob/v0.3.24/CHANGELOG.md)
- [Commits](https://github.com/hyperium/h2/compare/v0.3.22...v0.3.24)

---
updated-dependencies:
- dependency-name: h2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-01-19 15:25:46 +00:00
98d6b486d6 Merge pull request #27 from flibusta-apps/dependabot/cargo/zerocopy-0.7.31
Bump zerocopy from 0.7.30 to 0.7.31
2023-12-15 18:30:30 +01:00
dependabot[bot]
89fb830b3a Bump zerocopy from 0.7.30 to 0.7.31
Bumps [zerocopy](https://github.com/google/zerocopy) from 0.7.30 to 0.7.31.
- [Release notes](https://github.com/google/zerocopy/releases)
- [Changelog](https://github.com/google/zerocopy/blob/main/CHANGELOG.md)
- [Commits](https://github.com/google/zerocopy/compare/v0.7.30...v0.7.31)

---
updated-dependencies:
- dependency-name: zerocopy
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-12-15 03:56:36 +00:00
e8032da027 Fix translator books ordering 2023-12-13 14:27:55 +01:00
32366e2f5a Update author books ordering 2023-12-13 13:52:56 +01:00
a6f5a5be95 Update deps 2023-12-10 22:31:43 +01:00
1cada2b695 Merge pull request #26 from flibusta-apps/dependabot/cargo/openssl-0.10.60
Bump openssl from 0.10.59 to 0.10.60
2023-11-29 00:37:50 +01:00
3ac748b349 Merge pull request #25 from flibusta-apps/dependabot/cargo/prisma-cli/openssl-0.10.60
Bump openssl from 0.10.55 to 0.10.60 in /prisma-cli
2023-11-29 00:37:15 +01:00
dependabot[bot]
64b28fc0b3 Bump openssl from 0.10.59 to 0.10.60
Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.59 to 0.10.60.
- [Release notes](https://github.com/sfackler/rust-openssl/releases)
- [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.59...openssl-v0.10.60)

---
updated-dependencies:
- dependency-name: openssl
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-11-28 21:46:09 +00:00
dependabot[bot]
0e63183242 Bump openssl from 0.10.55 to 0.10.60 in /prisma-cli
Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.55 to 0.10.60.
- [Release notes](https://github.com/sfackler/rust-openssl/releases)
- [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.55...openssl-v0.10.60)

---
updated-dependencies:
- dependency-name: openssl
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-11-28 21:41:19 +00:00
9bf0c9f313 Update deps 2023-11-21 12:09:48 +01:00
46d4a90ba8 Update deps 2023-11-08 18:04:50 +01:00
d0054335f6 Update deps 2023-11-07 13:53:15 +01:00
5d8da5f29a Add year to book model 2023-11-04 06:24:17 +03:00
8fe744920f Merge pull request #23 from flibusta-apps/dependabot/cargo/rustix-0.38.19
Bump rustix from 0.38.14 to 0.38.19
2023-10-26 10:36:58 +02:00
5c756fa2c4 Merge pull request #24 from flibusta-apps/dependabot/github_actions/ASzc/change-string-case-action-6
Bump ASzc/change-string-case-action from 5 to 6
2023-10-26 10:36:43 +02:00
dependabot[bot]
b0d147f4fc Bump ASzc/change-string-case-action from 5 to 6
Bumps [ASzc/change-string-case-action](https://github.com/aszc/change-string-case-action) from 5 to 6.
- [Release notes](https://github.com/aszc/change-string-case-action/releases)
- [Commits](https://github.com/aszc/change-string-case-action/compare/v5...v6)

---
updated-dependencies:
- dependency-name: ASzc/change-string-case-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-10-26 08:20:16 +00:00
dependabot[bot]
26d41b3c3a Bump rustix from 0.38.14 to 0.38.19
Bumps [rustix](https://github.com/bytecodealliance/rustix) from 0.38.14 to 0.38.19.
- [Release notes](https://github.com/bytecodealliance/rustix/releases)
- [Commits](https://github.com/bytecodealliance/rustix/compare/v0.38.14...v0.38.19)

---
updated-dependencies:
- dependency-name: rustix
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-10-18 18:44:41 +00:00
31ff4c1116 Add pre-commit config 2023-09-24 22:57:57 +02:00
7b68f443b8 Update deps 2023-09-22 23:01:43 +02:00
6a1b8a7191 Fixes 2023-09-14 21:57:12 +02:00
0627557c6d Merge pull request #22 from flibusta-apps/dependabot/github_actions/docker/setup-buildx-action-3
Bump docker/setup-buildx-action from 2 to 3
2023-09-12 12:21:53 +02:00
093a68f23d Merge pull request #21 from flibusta-apps/dependabot/github_actions/docker/build-push-action-5
Bump docker/build-push-action from 4 to 5
2023-09-12 12:12:29 +02:00
34dfc3c08f Merge pull request #20 from flibusta-apps/dependabot/github_actions/docker/login-action-3
Bump docker/login-action from 2 to 3
2023-09-12 12:00:12 +02:00
dependabot[bot]
2eb681fab9 Bump docker/setup-buildx-action from 2 to 3
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 2 to 3.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v2...v3)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-12 08:22:26 +00:00
dependabot[bot]
2d0b387560 Bump docker/build-push-action from 4 to 5
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 4 to 5.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v4...v5)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-12 08:22:23 +00:00
dependabot[bot]
834953f10d Bump docker/login-action from 2 to 3
Bumps [docker/login-action](https://github.com/docker/login-action) from 2 to 3.
- [Release notes](https://github.com/docker/login-action/releases)
- [Commits](https://github.com/docker/login-action/compare/v2...v3)

---
updated-dependencies:
- dependency-name: docker/login-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-12 08:22:20 +00:00
1f2ae4fb28 Merge pull request #19 from flibusta-apps/dependabot/github_actions/actions/checkout-4
Bump actions/checkout from 3 to 4
2023-09-05 12:24:29 +02:00
dependabot[bot]
15d3c3e25b Bump actions/checkout from 3 to 4
Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-05 08:12:27 +00:00
640a668b85 Update deps 2023-08-18 23:31:48 +02:00
895203bc0f Update deps 2023-08-16 22:44:02 +02:00
2003c1e474 Fix 2023-08-12 19:24:34 +02:00
63327ae64b Fix prisma client 2023-08-12 19:08:42 +02:00
cdb16b4ddb Add indexes 2023-08-12 19:08:22 +02:00
c4fa52adb3 Fix 2023-08-12 19:03:14 +02:00
2fef226bd6 Fix 2023-08-12 14:17:54 +02:00
d00faefd06 Fix 2023-08-12 14:02:20 +02:00
9014b88416 Fix 2023-08-12 00:31:23 +02:00
51b1523cfb Fix 2023-08-11 23:41:11 +02:00
e257138f4e Fix metas ordering 2023-08-11 23:31:13 +02:00
80321b9e69 Fix building 2023-08-11 22:25:10 +02:00
09c71b67b1 Merge pull request #18 from flibusta-apps/feature/rewrite-to-rust
Feature: rewrite to rust
2023-08-11 22:21:36 +02:00
66 changed files with 5344 additions and 25920 deletions

View File

@@ -1,2 +0,0 @@
[alias]
prisma = "run -p prisma-cli --"

View File

@@ -3,48 +3,43 @@ name: Build docker image
on:
push:
branches:
- 'main'
- "main"
jobs:
Build-Docker-Image:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v3
- name: Checkout
uses: actions/checkout@v4
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- id: repository_name
uses: ASzc/change-string-case-action@v5
uses: ASzc/change-string-case-action@v6
with:
string: ${{ github.repository }}
-
name: Login to ghcr.io
uses: docker/login-action@v2
- name: Login to ghcr.io
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
-
name: Build and push
- name: Build and push
id: docker_build
uses: docker/build-push-action@v4
uses: docker/build-push-action@v6
env:
IMAGE: ${{ steps.repository_name.outputs.lowercase }}
with:
push: true
platforms: linux/amd64
tags: ghcr.io/${{ env.IMAGE }}:latest
tags: ghcr.io/${{ env.IMAGE }}:latest,ghcr.io/${{ env.IMAGE }}:${{ github.sha }}
context: .
file: ./docker/build.dockerfile
-
name: Invoke deployment hook
- name: Invoke deployment hook
uses: joelwmale/webhook-action@master
with:
url: ${{ secrets.WEBHOOK_URL }}
url: ${{ secrets.WEBHOOK_URL }}?BOOK_SERVER_TAG=${{ github.sha }}

1
.gitignore vendored
View File

@@ -2,3 +2,4 @@
.env
.vscode
.idea

7
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,7 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: fmt
- id: cargo-check
- id: clippy

View File

@@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) FROM book_sequences bs\n JOIN books b ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "078bb62c5139d159bc17d98480846591fe42a466b788e7c27e1a64a6549bfda3"
}

View File

@@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n aa.id,\n aa.title,\n aa.text,\n aa.file\n FROM author_annotations aa\n WHERE aa.author = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "text",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "file",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "1875c11e55b61fd58e916f7663e2649e0c09ae604e620274718dd465e7958f64"
}

View File

@@ -0,0 +1,29 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
false,
null
]
},
"hash": "191a5f0ddc1e4631b594f07710157cfdb3e002821f1ebb112fe772b274d08b1c"
}

View File

@@ -0,0 +1,66 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n genres.id,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta,\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = genres.source\n ) AS \"source!: Source\"\n FROM genres\n ORDER BY genres.id ASC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "code",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "meta",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
false,
false,
null
]
},
"hash": "1f78b5cbdae5f9732e3637fcfd1605477bafc12a443900276c46df644a7f6d26"
}

View File

@@ -0,0 +1,175 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n WHERE b.id = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4Array"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null
]
},
"hash": "2d44679efadfba5a350a1612b9cf1ba241c4ddb38babf0cac5bdd8049e894ac8"
}

View File

@@ -0,0 +1,29 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
false,
null
]
},
"hash": "4144af136af6f9a148a87030ce0f7c7625f4296bbee0ffdd24a81571e2afd54e"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, name FROM sequences WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false
]
},
"hash": "57f37e885a05ace86e5768a8ad7ac04f5d48784885db7d04d04e277d8c51970c"
}

View File

@@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*)\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "600cfd73a3a1c465c19d98dc4ba6381872d82f954b0733aa9518df2ee7701b6e"
}

View File

@@ -0,0 +1,47 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n ORDER BY a.id ASC\n OFFSET $1\n LIMIT $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "first_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "last_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "middle_name!: String",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
null,
null
]
},
"hash": "6e1d93e7773059ec3cb4fd29259f4f0250868c3f56e7b017ba8e5c20ccffb57d"
}

View File

@@ -0,0 +1,137 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n OFFSET $3\n LIMIT $4\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null
]
},
"hash": "71ddfa47ccbd71543a0ff402f9b077d7035ad35fb5e714f5d88357169b46b0fe"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) FROM genres\n WHERE (meta = $1 OR $1 IS NULL)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
null
]
},
"hash": "78e41ab1e7ca7b6acaf21aec5015a75c2962f6085a5774773fba8acb5e166e2e"
}

View File

@@ -0,0 +1,276 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n genres.id,\n ROW(\n sources.id,\n sources.name\n )::source_type,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta\n )::genre_type\n )\n FROM book_genres\n JOIN genres ON genres.id = book_genres.genre\n JOIN sources ON sources.id = genres.source\n WHERE book_genres.book = b.id\n ),\n ARRAY[]::genre_type[]\n ) AS \"genres!: Vec<Genre>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id,\n b.is_deleted,\n b.pages\n FROM books b\n WHERE b.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "genres!: Vec<Genre>",
"type_info": {
"Custom": {
"name": "genre_type[]",
"kind": {
"Array": {
"Custom": {
"name": "genre_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"source",
{
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
],
[
"remote_id",
"Int4"
],
[
"code",
"Varchar"
],
[
"description",
"Varchar"
],
[
"meta",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 11,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 12,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
},
{
"ordinal": 13,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 14,
"name": "is_deleted",
"type_info": "Bool"
},
{
"ordinal": 15,
"name": "pages",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null,
null,
null,
false,
false,
true
]
},
"hash": "981703669c9152946a541f70a84ec5dbf481e7a28f3d5949fbc34588561104e5"
}

View File

@@ -0,0 +1,35 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ORDER BY b.id ASC\n OFFSET $7\n LIMIT $8\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"TextArray",
"Bool",
"Date",
"Date",
"Int4",
"Int4",
"Int8",
"Int8"
]
},
"nullable": [
false,
null
]
},
"hash": "9bb82eaa3dcf8ead767d5f9ac9dbe8d70f8e68b12f6b004a9e495aa8ebc6d250"
}

View File

@@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*)\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE\n b.is_deleted = false\n AND ba.author = $1\n AND b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "9be35f43d7faa0c65c88ced8ee10347ae67e6a906461fb4858fc003824f4b260"
}

View File

@@ -0,0 +1,70 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n genres.id,\n genres.remote_id,\n genres.code,\n genres.description,\n genres.meta,\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = genres.source\n ) AS \"source!: Source\"\n FROM genres\n WHERE (meta = $1 OR $1 IS NULL)\n ORDER BY genres.id ASC\n LIMIT $2 OFFSET $3\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "code",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "meta",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
],
"parameters": {
"Left": [
"Text",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null
]
},
"hash": "a22bfa2e92bf4a3b0710388c6c5bbfa50f24864b183bb304d35cea18babd8ce3"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "PostgreSQL",
"query": "SELECT COUNT(*) FROM authors",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "b4733c7414c62520fb74e3302f9c01bc351153930117c58832981990db038e74"
}

View File

@@ -0,0 +1,29 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\"\n FROM books b\n JOIN book_sequences bs ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray"
]
},
"nullable": [
false,
null
]
},
"hash": "b4c8511c5b3c157a64e4783ff6acd469abb21c5fda9ed9728e36b5b1d02d9aba"
}

View File

@@ -0,0 +1,155 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n bs.position\n FROM books b\n JOIN book_sequences bs ON b.id = bs.book\n WHERE\n b.is_deleted = FALSE AND\n bs.sequence = $1 AND\n b.lang = ANY($2)\n ORDER BY bs.position\n LIMIT $3 OFFSET $4\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 10,
"name": "position",
"type_info": "Int2"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
false
]
},
"hash": "b6556c3bf60306517850e476d764c01e1e4538d6cf937096ad4a8e42a9657b9c"
}

View File

@@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n id,\n title,\n text,\n file\n FROM book_annotations\n WHERE book = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "text",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "file",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "b83f6df4dea9bad87d0423ad307da8c72e2c343181afa0f5bce3e1a43dee7c8c"
}

View File

@@ -0,0 +1,210 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id\n FROM books b\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ORDER BY b.id ASC\n OFFSET $7\n LIMIT $8\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 11,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
},
{
"ordinal": 12,
"name": "remote_id",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"TextArray",
"Bool",
"Date",
"Date",
"Int4",
"Int4",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null,
null,
false
]
},
"hash": "bb036838069e57b6f88ec4dd3b53d6b44b1d9a4e01c5f80343e33e116e422bb5"
}

View File

@@ -0,0 +1,27 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) FROM books\n WHERE lang = ANY($1) AND\n ($2::boolean IS NULL OR is_deleted = $2) AND\n ($3::date IS NULL OR uploaded >= $3) AND\n ($4::date IS NULL OR uploaded <= $4) AND\n ($5::integer IS NULL OR id >= $5) AND\n ($6::integer IS NULL OR id <= $6)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"TextArray",
"Bool",
"Date",
"Date",
"Int4",
"Int4"
]
},
"nullable": [
null
]
},
"hash": "c0be89ba0ef10d97bb82401fed4196ffd2be48ce4e5586ba6da63c78793bb1db"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, name FROM sequences WHERE id = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int4Array"
]
},
"nullable": [
false,
false
]
},
"hash": "d14c08d4d25201d30178c2313650db1aaef355968970f7f0a75b88bba209dc20"
}

View File

@@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n WHERE a.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "first_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "last_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "middle_name!: String",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
null,
null
]
},
"hash": "d6584aea52bc3abcbb4d9f491ef357845b562cf83d2e135b7542ebca2024a3f3"
}

View File

@@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n a.id,\n a.first_name,\n a.last_name,\n COALESCE(a.middle_name, '') AS \"middle_name!: String\",\n CASE\n WHEN aa.id IS NOT NULL THEN true\n ELSE false\n END AS \"annotation_exists!: bool\"\n FROM authors a\n LEFT JOIN author_annotations aa ON a.id = aa.author\n WHERE a.id = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "first_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "last_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "middle_name!: String",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4Array"
]
},
"nullable": [
false,
false,
false,
null,
null
]
},
"hash": "eaeeab7481036b78b9323b5d9e99e9a14a39e4f6c1489fe564045e937c38769c"
}

View File

@@ -0,0 +1,277 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM book_authors\n JOIN authors ON authors.id = book_authors.author\n WHERE book_authors.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"authors!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n genres.id,\n ROW(\n sources.id,\n sources.name\n )::source_type,\n remote_id,\n genres.code,\n genres.description,\n genres.meta\n )::genre_type\n )\n FROM book_genres\n JOIN genres ON genres.id = book_genres.genre\n JOIN sources ON sources.id = genres.source\n WHERE book_genres.book = b.id\n ),\n ARRAY[]::genre_type[]\n ) AS \"genres!: Vec<Genre>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\",\n (\n SELECT\n ROW(\n sources.id,\n sources.name\n )::source_type\n FROM sources\n WHERE sources.id = b.source\n ) AS \"source!: Source\",\n b.remote_id,\n b.is_deleted,\n b.pages\n FROM books b\n WHERE b.source = $1 AND b.remote_id = $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "authors!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 10,
"name": "genres!: Vec<Genre>",
"type_info": {
"Custom": {
"name": "genre_type[]",
"kind": {
"Array": {
"Custom": {
"name": "genre_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"source",
{
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
],
[
"remote_id",
"Int4"
],
[
"code",
"Varchar"
],
[
"description",
"Varchar"
],
[
"meta",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 11,
"name": "annotation_exists!: bool",
"type_info": "Bool"
},
{
"ordinal": 12,
"name": "source!: Source",
"type_info": {
"Custom": {
"name": "source_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
},
{
"ordinal": 13,
"name": "remote_id",
"type_info": "Int4"
},
{
"ordinal": 14,
"name": "is_deleted",
"type_info": "Bool"
},
{
"ordinal": 15,
"name": "pages",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Int2",
"Int4"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null,
null,
null,
null,
false,
false,
true
]
},
"hash": "fb0d1b13928611d566514fd103df973ad1c81fd60efada560e89a2b40a6d3fc1"
}

View File

@@ -0,0 +1,137 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n b.id,\n b.title,\n b.lang,\n b.file_type,\n b.year,\n CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS \"available_types!: Vec<String>\",\n b.uploaded,\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n authors.id,\n authors.first_name,\n authors.last_name,\n authors.middle_name,\n EXISTS(\n SELECT * FROM author_annotations WHERE author = authors.id\n )\n )::author_type\n )\n FROM translations\n JOIN authors ON authors.id = translations.author\n WHERE translations.book = b.id\n ),\n ARRAY[]::author_type[]\n ) AS \"translators!: Vec<Author>\",\n COALESCE(\n (\n SELECT\n ARRAY_AGG(\n ROW(\n sequences.id,\n sequences.name\n )::sequence_type\n )\n FROM book_sequences\n JOIN sequences ON sequences.id = book_sequences.sequence\n WHERE book_sequences.book = b.id\n ),\n ARRAY[]::sequence_type[]\n ) AS \"sequences!: Vec<Sequence>\",\n EXISTS(\n SELECT * FROM book_annotations WHERE book = b.id\n ) AS \"annotation_exists!: bool\"\n FROM books b\n JOIN book_authors ba ON b.id = ba.book\n WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)\n ORDER BY b.title ASC\n OFFSET $3\n LIMIT $4\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "lang",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "file_type",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "year",
"type_info": "Int2"
},
{
"ordinal": 5,
"name": "available_types!: Vec<String>",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "uploaded",
"type_info": "Date"
},
{
"ordinal": 7,
"name": "translators!: Vec<Author>",
"type_info": {
"Custom": {
"name": "author_type[]",
"kind": {
"Array": {
"Custom": {
"name": "author_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"first_name",
"Varchar"
],
[
"last_name",
"Varchar"
],
[
"middle_name",
"Varchar"
],
[
"annotation_exists",
"Bool"
]
]
}
}
}
}
}
}
},
{
"ordinal": 8,
"name": "sequences!: Vec<Sequence>",
"type_info": {
"Custom": {
"name": "sequence_type[]",
"kind": {
"Array": {
"Custom": {
"name": "sequence_type",
"kind": {
"Composite": [
[
"id",
"Int4"
],
[
"name",
"Varchar"
]
]
}
}
}
}
}
}
},
{
"ordinal": 9,
"name": "annotation_exists!: bool",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4",
"TextArray",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
null,
false,
null,
null,
null
]
},
"hash": "ff9694275aad3c0cbb3bddb87a45550615d1996328ffba98a6d01aaa2b17ec2b"
}

5224
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,33 +3,29 @@ name = "book_library_server"
version = "0.1.0"
edition = "2021"
[workspace]
members = [
"prisma-cli"
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
once_cell = "1.18.0"
once_cell = "1.21.1"
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }
tokio = { version = "1.44.2", features = ["full"] }
tokio = { version = "1.28.2", features = ["full"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
sentry-tracing = "0.36.0"
tower-http = { version = "0.6.2", features = ["trace"] }
tracing = "0.1.37"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"]}
tower-http = { version = "0.4.3", features = ["trace"] }
axum = { version = "0.8.1", features = ["json"] }
axum-extra = { version ="0.10.0", features = ["query"] }
axum-prometheus = "0.8.0"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = { version = "1.0.140", features = ["raw_value"] }
axum = { version = "0.6.18", features = ["json"] }
axum-extra = { version ="0.7.7", features = ["query"] }
axum-prometheus = "0.4.0"
serde = { version = "1.0.163", features = ["derive"] }
sentry = { version = "0.36.0", features = ["debug-images"] }
sentry = { version = "0.31.3", features = ["debug-images"] }
meilisearch-sdk = "0.28.0"
meilisearch-sdk = "0.24.1"
rand = "0.9.0"
rand = "0.8.5"
chrono = { version = "0.4.40", features = ["serde"] }
chrono = "0.4.26"
sqlx = { version = "0.8.3", features = ["runtime-tokio", "postgres", "macros", "chrono", "json"] }

View File

@@ -1,4 +1,4 @@
FROM rust:bullseye AS builder
FROM rust:bookworm AS builder
WORKDIR /app
@@ -7,15 +7,18 @@ COPY . .
RUN cargo build --release --bin book_library_server
FROM debian:bullseye-slim
FROM debian:bookworm-slim
RUN apt-get update \
&& apt-get install -y openssl ca-certificates \
&& apt-get install -y openssl ca-certificates curl jq \
&& rm -rf /var/lib/apt/lists/*
RUN update-ca-certificates
COPY ./scripts/*.sh /
RUN chmod +x /*.sh
WORKDIR /app
COPY --from=builder /app/target/release/book_library_server /usr/local/bin
ENTRYPOINT ["/usr/local/bin/book_library_server"]
CMD ["/start.sh"]

View File

@@ -1,3 +0,0 @@
node_modules
# Keep environment variables out of version control
.env

4622
prisma-cli/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +0,0 @@
[package]
name = "prisma-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.8", features = ["postgresql"] }

View File

@@ -1,3 +0,0 @@
fn main() {
prisma_client_rust_cli::run();
}

View File

@@ -1,165 +0,0 @@
generator client {
provider = "cargo prisma"
output = "../src/prisma.rs"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model AuthorAnnotation {
id Int @id @default(autoincrement())
author_id Int @unique @map("author")
title String @db.VarChar(256)
text String
file String? @db.VarChar(256)
author Author @relation(fields: [author_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_author_annotations_authors_id_author")
@@map("author_annotations")
}
model Author {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
first_name String @db.VarChar(256)
last_name String @db.VarChar(256)
middle_name String? @db.VarChar(256)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_authors_sources_id_source")
author_annotation AuthorAnnotation?
book_authors BookAuthor[]
translations Translator[]
@@unique([source_id, remote_id], map: "uc_authors_source_remote_id")
@@index([last_name(ops: raw("gin_trgm_ops"))], map: "tgrm_authors_l", type: Gin)
@@map("authors")
}
model BookAnnotation {
id Int @id @default(autoincrement())
book_id Int @unique @map("book")
title String @db.VarChar(256)
text String
file String? @db.VarChar(256)
book Book @relation(fields: [book_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_book_annotations_books_id_book")
@@map("book_annotations")
}
model BookAuthor {
id Int @id @default(autoincrement())
author_id Int @map("author")
book_id Int @map("book")
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_book_authors_authors_author_id")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_authors_books_book_id")
@@unique([book_id, author_id], map: "uc_book_authors_book_author")
@@index([author_id], map: "book_authors_author")
@@index([book_id], map: "book_authors_book")
@@map("book_authors")
}
model BookGenre {
id Int @id @default(autoincrement())
genre_id Int @map("genre")
book_id Int @map("book")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_genres_books_book_id")
genre Genre @relation(fields: [genre_id], references: [id], onDelete: Cascade, map: "fk_book_genres_genres_genre_id")
@@unique([book_id, genre_id], map: "uc_book_genres_book_genre")
@@index([book_id], map: "book_genres_book")
@@index([genre_id], map: "book_genres_genre")
@@map("book_genres")
}
model BookSequence {
id Int @id @default(autoincrement())
position Int @db.SmallInt
sequence_id Int @map("sequence")
book_id Int @map("book")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_books_book_id")
sequence Sequence @relation(fields: [sequence_id], references: [id], onDelete: Cascade, map: "fk_book_sequences_sequences_sequence_id")
@@unique([book_id, sequence_id], map: "uc_book_sequences_book_sequence")
@@index([book_id], map: "book_sequences_book")
@@index([sequence_id], map: "book_sequences_sequence")
@@map("book_sequences")
}
model Book {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
title String @db.VarChar(256)
lang String @db.VarChar(3)
file_type String @db.VarChar(4)
uploaded DateTime @db.Date
is_deleted Boolean @default(false)
pages Int?
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_books_sources_id_source")
book_annotation BookAnnotation?
book_authors BookAuthor[]
book_genres BookGenre[]
book_sequences BookSequence[]
translations Translator[]
@@unique([source_id, remote_id], map: "uc_books_source_remote_id")
@@index([file_type], map: "ix_books_file_type")
@@index([title], map: "ix_books_title")
@@index([title(ops: raw("gin_trgm_ops"))], map: "trgm_books_title", type: Gin)
@@map("books")
}
model Genre {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
code String @db.VarChar(45)
description String @db.VarChar(99)
meta String @db.VarChar(45)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_genres_sources_id_source")
book_genres BookGenre[]
@@unique([source_id, remote_id], map: "uc_genres_source_remote_id")
@@map("genres")
}
model Sequence {
id Int @id @default(autoincrement())
source_id Int @map("source") @db.SmallInt
remote_id Int
name String @db.VarChar(256)
source Source @relation(fields: [source_id], references: [id], onDelete: NoAction, onUpdate: NoAction, map: "fk_sequences_sources_id_source")
book_sequences BookSequence[]
@@unique([source_id, remote_id], map: "uc_sequences_source_remote_id")
@@index([name], map: "ix_sequences_name")
@@index([name(ops: raw("gin_trgm_ops"))], map: "tgrm_sequences_name", type: Gin)
@@map("sequences")
}
model Source {
id Int @id @default(autoincrement()) @db.SmallInt
name String @unique @db.VarChar(32)
authors Author[]
books Book[]
genres Genre[]
sequences Sequence[]
@@map("sources")
}
model Translator {
id Int @id @default(autoincrement())
position Int @db.SmallInt
author_id Int @map("author")
book_id Int @map("book")
author Author @relation(fields: [author_id], references: [id], onDelete: Cascade, map: "fk_translations_authors_author_id")
book Book @relation(fields: [book_id], references: [id], onDelete: Cascade, map: "fk_translations_books_book_id")
@@unique([book_id, author_id], map: "uc_translations_book_author")
@@index([author_id], map: "translations_author")
@@index([book_id], map: "translations_book")
@@map("translations")
}

12
scripts/env.sh Normal file
View File

@@ -0,0 +1,12 @@
#! /usr/bin/env sh
response=`curl -X 'GET' "https://$VAULT_HOST/v1/$VAULT_SECRET_PATH" -s \
-H 'accept: application/json' \
-H "X-Vault-Token: $VAULT_TOKEN"`
data=`echo $response | jq -r '.data.data'`
for key in $(echo "$data" | jq -r 'keys[]'); do
value=$(echo "$data" | jq -r ".\"$key\"") # Corrected syntax
echo "$key"="$value"
done

5
scripts/start.sh Normal file
View File

@@ -0,0 +1,5 @@
#! /usr/bin/env sh
export $(/env.sh)
exec /usr/local/bin/book_library_server

View File

@@ -16,7 +16,7 @@ pub struct Config {
pub meili_host: String,
pub meili_master_key: String,
pub sentry_dsn: String
pub sentry_dsn: String,
}
impl Config {
@@ -33,11 +33,9 @@ impl Config {
meili_host: get_env("MEILI_HOST"),
meili_master_key: get_env("MEILI_MASTER_KEY"),
sentry_dsn: get_env("SENTRY_DSN")
sentry_dsn: get_env("SENTRY_DSN"),
}
}
}
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
Config::load()
});
pub static CONFIG: Lazy<Config> = Lazy::new(Config::load);

View File

@@ -1,9 +1,10 @@
use crate::{prisma::PrismaClient, config::CONFIG};
use crate::config::CONFIG;
use sqlx::{postgres::PgPoolOptions, PgPool};
pub async fn get_prisma_client() -> PrismaClient {
pub async fn get_postgres_pool() -> PgPool {
let database_url: String = format!(
"postgresql://{}:{}@{}:{}/{}?connection_limit=4",
"postgresql://{}:{}@{}:{}/{}",
CONFIG.postgres_user,
CONFIG.postgres_password,
CONFIG.postgres_host,
@@ -11,9 +12,10 @@ pub async fn get_prisma_client() -> PrismaClient {
CONFIG.postgres_db
);
PrismaClient::_builder()
.with_url(database_url)
.build()
PgPoolOptions::new()
.max_connections(10)
.acquire_timeout(std::time::Duration::from_secs(300))
.connect(&database_url)
.await
.unwrap()
}

View File

@@ -1,17 +1,17 @@
pub mod config;
pub mod views;
pub mod prisma;
pub mod db;
pub mod serializers;
pub mod meilisearch;
pub mod serializers;
pub mod views;
use sentry::{integrations::debug_images::DebugImagesIntegration, types::Dsn, ClientOptions};
use sentry_tracing::EventFilter;
use std::{net::SocketAddr, str::FromStr};
use sentry::{ClientOptions, types::Dsn, integrations::debug_images::DebugImagesIntegration};
use tracing::info;
use tracing_subscriber::{filter, layer::SubscriberExt, util::SubscriberInitExt};
use crate::views::get_router;
#[tokio::main]
async fn main() {
let options = ClientOptions {
@@ -23,9 +23,15 @@ async fn main() {
let _guard = sentry::init(options);
tracing_subscriber::fmt()
.with_target(false)
.compact()
let sentry_layer = sentry_tracing::layer().event_filter(|md| match md.level() {
&tracing::Level::ERROR => EventFilter::Event,
_ => EventFilter::Ignore,
});
tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer().with_target(false))
.with(filter::LevelFilter::INFO)
.with(sentry_layer)
.init();
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
@@ -33,9 +39,7 @@ async fn main() {
let app = get_router().await;
info!("Start webserver...");
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
axum::serve(listener, app).await.unwrap();
info!("Webserver shutdown...")
}

View File

@@ -1,14 +1,14 @@
use meilisearch_sdk::Client;
use meilisearch_sdk::client::Client;
use serde::Deserialize;
use crate::config::CONFIG;
pub fn get_meili_client() -> Client {
Client::new(
&CONFIG.meili_host,
Some(CONFIG.meili_master_key.clone())
)
Client::new(&CONFIG.meili_host, Some(CONFIG.meili_master_key.clone())).unwrap()
}
pub trait GetId {
fn get_id(&self) -> i32;
}
#[derive(Deserialize)]
@@ -19,7 +19,13 @@ pub struct AuthorMeili {
pub middle_name: String,
pub author_langs: Vec<String>,
pub translator_langs: Vec<String>,
pub books_count: i32
pub books_count: i32,
}
impl GetId for AuthorMeili {
fn get_id(&self) -> i32 {
self.id
}
}
#[derive(Deserialize)]
@@ -27,7 +33,13 @@ pub struct BookMeili {
pub id: i32,
pub title: String,
pub lang: String,
pub genres: Vec<i32>
pub genres: Vec<i32>,
}
impl GetId for BookMeili {
fn get_id(&self) -> i32 {
self.id
}
}
#[derive(Deserialize)]
@@ -36,7 +48,13 @@ pub struct GenreMeili {
pub description: String,
pub meta: String,
pub langs: Vec<String>,
pub books_count: i32
pub books_count: i32,
}
impl GetId for GenreMeili {
fn get_id(&self) -> i32 {
self.id
}
}
#[derive(Deserialize)]
@@ -44,5 +62,11 @@ pub struct SequenceMeili {
pub id: i32,
pub name: String,
pub langs: Vec<String>,
pub books_count: i32
pub books_count: i32,
}
impl GetId for SequenceMeili {
fn get_id(&self) -> i32 {
self.id
}
}

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,11 @@
use serde::Deserialize;
fn default_langs() -> Vec<String> {
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
}
#[derive(Deserialize)]
pub struct AllowedLangs {
pub allowed_langs: Vec<String>
#[serde(default = "default_langs")]
pub allowed_langs: Vec<String>,
}

View File

@@ -1,10 +1,11 @@
use chrono::NaiveDate;
use serde::Serialize;
use crate::prisma::{author, book};
use super::date::naive_date_serializer;
use super::sequence::Sequence;
use super::{sequence::Sequence, utils::{get_available_types, get_translators, get_sequences}};
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
#[sqlx(type_name = "author_type")]
pub struct Author {
pub id: i32,
pub first_name: String,
@@ -13,66 +14,17 @@ pub struct Author {
pub annotation_exists: bool,
}
impl From<author::Data> for Author {
fn from(val: author::Data) -> Self {
let author::Data {
id,
first_name,
last_name,
middle_name,
author_annotation,
..
} = val;
Author {
id,
first_name,
last_name,
middle_name: middle_name.unwrap_or("".to_string()),
annotation_exists: author_annotation.unwrap().is_some(),
}
}
}
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct AuthorBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for AuthorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
translations,
book_sequences,
book_annotation,
source,
..
} = val;
AuthorBook {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,24 +1,9 @@
use serde::Serialize;
use crate::prisma::author_annotation;
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct AuthorAnnotation {
pub id: i32,
pub title: String,
pub text: String,
pub file: Option<String>
}
impl From<author_annotation::Data> for AuthorAnnotation {
fn from(val: author_annotation::Data) -> Self {
let author_annotation::Data { id, title, text, file, .. } = val;
AuthorAnnotation {
id,
title,
text,
file
}
}
pub file: Option<String>,
}

View File

@@ -1,78 +1,35 @@
use chrono::{DateTime, Utc};
use serde::{Serialize, Deserialize};
use chrono::NaiveDate;
use serde::{Deserialize, Serialize};
use crate::prisma::book::{self};
use super::date::naive_date_serializer;
use super::{source::Source, utils::{get_available_types, get_translators, get_sequences, get_authors, get_genres}, author::Author, sequence::Sequence, genre::Genre};
use super::{author::Author, genre::Genre, sequence::Sequence, source::Source};
fn default_langs() -> Vec<String> {
vec!["ru".to_string(), "be".to_string(), "uk".to_string()]
}
#[derive(Deserialize)]
pub struct BookFilter {
#[serde(default = "default_langs")]
pub allowed_langs: Vec<String>,
pub is_deleted: Option<bool>,
pub uploaded_gte: Option<DateTime<Utc>>,
pub uploaded_lte: Option<DateTime<Utc>>,
pub uploaded_gte: Option<NaiveDate>,
pub uploaded_lte: Option<NaiveDate>,
pub id_gte: Option<i32>,
pub id_lte: Option<i32>,
}
impl BookFilter {
pub fn get_filter_vec(self) -> Vec<book::WhereParam> {
let mut result = vec![];
result.push(
book::lang::in_vec(self.allowed_langs)
);
match self.is_deleted {
Some(v) => {
result.push(
book::is_deleted::equals(v)
);
},
None => {
result.push(
book::is_deleted::equals(false)
);
},
};
if let Some(uploaded_gte) = self.uploaded_gte {
result.push(
book::uploaded::gte(uploaded_gte.into())
);
};
if let Some(uploaded_lte) = self.uploaded_lte {
result.push(
book::uploaded::lte(uploaded_lte.into())
);
};
if let Some(id_gte) = self.id_gte {
result.push(
book::id::gte(id_gte)
);
};
if let Some(id_lte) = self.id_lte {
result.push(
book::id::lte(id_lte)
);
};
result
}
}
#[derive(Serialize)]
pub struct RemoteBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
@@ -81,70 +38,22 @@ pub struct RemoteBook {
pub remote_id: i32,
}
impl From<book::Data> for RemoteBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id
}
}
}
#[derive(Serialize)]
pub struct BaseBook {
pub id: i32,
pub available_types: Vec<String>,
}
impl From<book::Data> for BaseBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
file_type,
source,
..
} = value;
Self {
id,
available_types: get_available_types(file_type, source.clone().unwrap().name),
}
}
}
#[derive(Serialize)]
pub struct DetailBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
@@ -153,53 +62,13 @@ pub struct DetailBook {
pub remote_id: i32,
pub genres: Vec<Genre>,
pub is_deleted: bool,
pub pages: Option<i32>
}
impl From<book::Data> for DetailBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
remote_id,
book_genres,
is_deleted,
pages,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
source: source.unwrap().as_ref().clone().into(),
remote_id,
genres: get_genres(book_genres),
is_deleted,
pages,
}
}
pub pages: Option<i32>,
}
#[derive(Deserialize)]
pub struct RandomBookFilter {
pub allowed_langs: Vec<String>,
pub genre: Option<i32>
pub genre: Option<i32>,
}
#[derive(Serialize)]
@@ -208,41 +77,12 @@ pub struct Book {
pub title: String,
pub lang: String,
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for Book {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_sequences,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,31 +1,9 @@
use serde::Serialize;
use crate::prisma::book_annotation;
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct BookAnnotation {
pub id: i32,
pub title: String,
pub text: String,
pub file: Option<String>
}
impl From<book_annotation::Data> for BookAnnotation {
fn from(value: book_annotation::Data) -> Self {
let book_annotation::Data {
id,
title,
text,
file,
..
} = value;
Self {
id,
title,
text,
file
}
}
pub file: Option<String>,
}

16
src/serializers/date.rs Normal file
View File

@@ -0,0 +1,16 @@
use chrono::NaiveDate;
use serde::Serializer;
const FORMAT: &str = "%Y-%m-%d";
pub mod naive_date_serializer {
use super::*;
pub fn serialize<S>(date: &NaiveDate, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let formatted_date = date.format(FORMAT).to_string();
serializer.serialize_str(&formatted_date)
}
}

View File

@@ -1,43 +1,18 @@
use serde::{Serialize, Deserialize};
use crate::prisma::genre;
use serde::{Deserialize, Serialize};
use super::source::Source;
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
#[sqlx(type_name = "genre_type")]
pub struct Genre {
pub id: i32,
pub source: Source,
pub remote_id: i32,
pub code: String,
pub description: String,
pub meta: String
pub meta: String,
}
impl From<genre::Data> for Genre {
fn from(val: genre::Data) -> Self {
let genre::Data {
id,
remote_id,
code,
description,
meta,
source,
..
} = val;
Genre {
id,
remote_id,
code,
description,
meta,
source: source.unwrap().as_ref().clone().into()
}
}
}
#[derive(Deserialize)]
pub struct GenreFilter {
pub meta: Option<String>,

View File

@@ -1,11 +1,12 @@
pub mod pagination;
pub mod allowed_langs;
pub mod author;
pub mod author_annotation;
pub mod genre;
pub mod source;
pub mod book;
pub mod sequence;
pub mod utils;
pub mod translator;
pub mod allowed_langs;
pub mod book_annotation;
pub mod date;
pub mod genre;
pub mod pagination;
pub mod sequence;
pub mod source;
pub mod translator;
pub mod utils;

View File

@@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize};
fn default_page() -> i64 {
1
}
@@ -14,17 +13,16 @@ pub struct Pagination {
#[serde(default = "default_page")]
pub page: i64,
#[serde(default = "default_size")]
pub size: i64
pub size: i64,
}
#[derive(Serialize)]
pub struct Page<T> {
pub items: Vec<T>,
pub total: i64,
pub page: i64,
pub size: i64,
pub pages: i64
pub pages: i64,
}
#[derive(Serialize)]
@@ -34,7 +32,7 @@ pub struct PageWithParent<T, P> {
pub page: i64,
pub size: i64,
pub pages: i64,
pub parent_item: P
pub parent_item: P,
}
impl<T> Page<T> {
@@ -44,7 +42,7 @@ impl<T> Page<T> {
total,
page: pagination.page,
size: pagination.size,
pages: (total + pagination.size - 1) / pagination.size
pages: (total + pagination.size - 1) / pagination.size,
}
}
}
@@ -57,7 +55,7 @@ impl<T, P> PageWithParent<T, P> {
page: pagination.page,
size: pagination.size,
pages: (total + pagination.size - 1) / pagination.size,
parent_item
parent_item,
}
}
}

View File

@@ -1,62 +1,28 @@
use chrono::NaiveDate;
use serde::Serialize;
use crate::prisma::{sequence, book};
use super::author::Author;
use super::date::naive_date_serializer;
use super::{author::Author, utils::{get_available_types, get_authors, get_translators}};
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
#[sqlx(type_name = "sequence_type")]
pub struct Sequence {
pub id: i32,
pub name: String,
}
impl From<sequence::Data> for Sequence {
fn from(val: sequence::Data) -> Self {
let sequence::Data { id, name, .. } = val;
Sequence { id, name }
}
}
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct SequenceBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub translators: Vec<Author>,
pub annotation_exists: bool,
}
impl From<book::Data> for SequenceBook {
fn from(value: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
translations,
book_annotation,
source,
..
} = value;
Self {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type, source.clone().unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
translators: get_translators(translations),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
pub position: i32,
}

View File

@@ -1,25 +1,8 @@
use serde::Serialize;
use crate::prisma::source;
#[derive(Serialize)]
#[derive(sqlx::FromRow, sqlx::Type, Serialize)]
#[sqlx(type_name = "source_type")]
pub struct Source {
pub id: i32,
pub name: String
pub name: String,
}
impl From<source::Data> for Source
{
fn from(val: source::Data) -> Self {
let source::Data {
id,
name,
..
} = val;
Source {
id,
name
}
}
}

View File

@@ -1,47 +1,21 @@
use chrono::NaiveDate;
use serde::Serialize;
use crate::prisma::book;
use super::date::naive_date_serializer;
use super::{author::Author, sequence::Sequence, utils::{get_available_types, get_authors, get_sequences}};
use super::{author::Author, sequence::Sequence};
#[derive(Serialize)]
#[derive(sqlx::FromRow, Serialize)]
pub struct TranslatorBook {
pub id: i32,
pub title: String,
pub lang: String,
pub file_type: String,
pub year: i32,
pub available_types: Vec<String>,
pub uploaded: String,
#[serde(serialize_with = "naive_date_serializer::serialize")]
pub uploaded: NaiveDate,
pub authors: Vec<Author>,
pub sequences: Vec<Sequence>,
pub annotation_exists: bool,
}
impl From<book::Data> for TranslatorBook {
fn from(val: book::Data) -> Self {
let book::Data {
id,
title,
lang,
file_type,
uploaded,
book_authors,
book_sequences,
book_annotation,
source,
..
} = val;
TranslatorBook {
id,
title,
lang,
file_type: file_type.clone(),
available_types: get_available_types(file_type.clone(), source.unwrap().name),
uploaded: uploaded.format("%Y-%m-%d").to_string(),
authors: get_authors(book_authors),
sequences: get_sequences(book_sequences),
annotation_exists: book_annotation.unwrap().is_some(),
}
}
}

View File

@@ -1,7 +1,3 @@
use crate::prisma::{translator, book_sequence, book_author, book_genre};
use super::{author::Author, sequence::Sequence, genre::Genre};
pub fn get_available_types(file_type: String, source_name: String) -> Vec<String> {
if file_type == "fb2" && source_name == "flibusta" {
vec![
@@ -14,43 +10,3 @@ pub fn get_available_types(file_type: String, source_name: String) -> Vec<String
vec![file_type]
}
}
pub fn get_authors(
book_authors: Option<Vec<book_author::Data>>
) -> Vec<Author> {
book_authors
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_translators(
translations: Option<Vec<translator::Data>>
) -> Vec<Author> {
translations
.unwrap()
.iter()
.map(|item| item.author.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_sequences(
book_sequences: Option<Vec<book_sequence::Data>>
) -> Vec<Sequence> {
book_sequences
.unwrap()
.iter()
.map(|item| item.sequence.clone().unwrap().as_ref().clone().into())
.collect()
}
pub fn get_genres(
book_genres: Option<Vec<book_genre::Data>>
) -> Vec<Genre> {
book_genres
.unwrap()
.iter()
.map(|item| item.genre.clone().unwrap().as_ref().clone().into())
.collect()
}

View File

@@ -1,279 +1,339 @@
use std::collections::HashSet;
use axum::{Router, extract::{Query, Path}, Json, response::IntoResponse, routing::get, http::StatusCode};
use axum::{
extract::{Path, Query},
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use rand::Rng;
use crate::{
meilisearch::{get_meili_client, AuthorMeili},
serializers::{
allowed_langs::AllowedLangs,
author::{Author, AuthorBook},
author_annotation::AuthorAnnotation,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::Sequence,
},
};
use crate::{prisma::{author, author_annotation::{self}, book, book_author, translator, book_sequence}, serializers::{pagination::{Pagination, Page, PageWithParent}, author::{Author, AuthorBook}, author_annotation::AuthorAnnotation, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}};
use super::{common::get_random_item::get_random_item, Database};
use super::Database;
async fn get_authors(
db: Database,
pagination: Query<Pagination>
) -> impl IntoResponse {
let authors_count = db
.author()
.count(vec![])
.exec()
.await
.unwrap();
let authors = db
.author()
.find_many(vec![])
.with(
author::author_annotation::fetch()
)
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
authors_count,
&pagination
);
Json(page)
}
async fn get_random_author(
db: Database,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!(
"author_langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index
.search()
.with_filter(&filter)
.execute::<AuthorMeili>()
.await
.unwrap();
let author_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<AuthorMeili>()
.await
.unwrap();
let author = &result.hits.get(0).unwrap().result;
author.id
};
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
async fn get_authors(db: Database, pagination: Query<Pagination>) -> impl IntoResponse {
let authors_count = sqlx::query_scalar!("SELECT COUNT(*) FROM authors",)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
Json::<Author>(author.into())
let authors = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
ORDER BY a.id ASC
OFFSET $1
LIMIT $2
"#,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let page: Page<Author> = Page::new(authors, authors_count, &pagination);
Json(page)
}
async fn get_author(
async fn get_random_author(
db: Database,
Path(author_id): Path<i32>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
) -> impl IntoResponse {
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap();
let author_id = {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!("author_langs IN [{}]", allowed_langs.join(", "));
get_random_item::<AuthorMeili>(authors_index, filter).await
};
let author = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_one(&db.0)
.await
.unwrap();
Json::<Author>(author)
}
async fn get_author(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
let author = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match author {
Some(author) => Json::<Author>(author.into()).into_response(),
Some(author) => Json::<Author>(author).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn get_author_annotation(
db: Database,
Path(author_id): Path<i32>,
) -> impl IntoResponse {
let author_annotation = db
.author_annotation()
.find_unique(
author_annotation::author_id::equals(author_id)
)
.exec()
.await
.unwrap();
async fn get_author_annotation(db: Database, Path(author_id): Path<i32>) -> impl IntoResponse {
let author_annotation = sqlx::query_as!(
AuthorAnnotation,
r#"
SELECT
aa.id,
aa.title,
aa.text,
aa.file
FROM author_annotations aa
WHERE aa.author = $1
"#,
author_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match author_annotation {
Some(annotation) => Json::<AuthorAnnotation>(annotation.into()).into_response(),
Some(annotation) => Json::<AuthorAnnotation>(annotation).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
async fn get_author_books(
db: Database,
Path(author_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let author = db
.author()
.find_unique(
author::id::equals(author_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap();
let author = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
author_id
)
.fetch_optional(&db.0)
.await
.unwrap();
let author = match author {
Some(author) => author,
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_count = db
.book()
.count(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.exec()
.await
.unwrap();
let books_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*)
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
"#,
author_id,
&allowed_langs
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = db
.book()
.find_many(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs)
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
let books = sqlx::query_as!(
AuthorBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
b.uploaded,
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
),
ARRAY[]::author_type[]
) AS "translators!: Vec<Author>",
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
sequences.id,
sequences.name
)::sequence_type
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
),
ARRAY[]::sequence_type[]
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
ORDER BY b.title ASC
OFFSET $3
LIMIT $4
"#,
author_id,
&allowed_langs,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let page: PageWithParent<AuthorBook, Author> = PageWithParent::new(
author.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
let page: PageWithParent<AuthorBook, Author> =
PageWithParent::new(author, books, books_count, &pagination);
Json(page).into_response()
}
async fn get_author_books_available_types(
db: Database,
Path(author_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::book_authors::some(vec![
book_author::author_id::equals(author_id)
]),
book::lang::in_vec(allowed_langs)
])
.exec()
// TODO: refactor
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE b.is_deleted = false AND ba.author = $1 AND b.lang = ANY($2)
"#,
author_id,
&allowed_langs
)
.fetch_all(&db.0)
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
}
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
for file_type in book.available_types {
file_types.insert(file_type);
}
}
Json::<Vec<String>>(file_types.into_iter().collect())
}
async fn search_authors(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!(
"author_langs IN [{}]",
allowed_langs.join(", ")
);
let filter = format!("author_langs IN [{}]", allowed_langs.join(", "));
let result = authors_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_offset(
((pagination.page - 1) * pagination.size)
.try_into()
.unwrap(),
)
.with_limit(pagination.size.try_into().unwrap())
.execute::<AuthorMeili>()
.await
@@ -282,18 +342,27 @@ async fn search_authors(
let total = result.estimated_total_hits.unwrap();
let author_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut authors = db
.author()
.find_many(vec![
author::id::in_vec(author_ids.clone())
])
.with(
author::author_annotation::fetch()
)
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
.await
.unwrap();
let mut authors = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = ANY($1)
"#,
&author_ids
)
.fetch_all(&db.0)
.await
.unwrap();
authors.sort_by(|a, b| {
let a_pos = author_ids.iter().position(|i| *i == a.id).unwrap();
@@ -302,23 +371,21 @@ async fn search_authors(
a_pos.cmp(&b_pos)
});
let page: Page<Author> = Page::new(
authors.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
let page: Page<Author> = Page::new(authors, total.try_into().unwrap(), &pagination);
Json(page)
}
pub async fn get_authors_router() -> Router {
Router::new()
.route("/", get(get_authors))
.route("/random", get(get_random_author))
.route("/:author_id", get(get_author))
.route("/:author_id/annotation", get(get_author_annotation))
.route("/:author_id/books", get(get_author_books))
.route("/:author_id/available_types", get(get_author_books_available_types))
.route("/search/:query", get(search_authors))
.route("/{author_id}", get(get_author))
.route("/{author_id}/annotation", get(get_author_annotation))
.route("/{author_id}/books", get(get_author_books))
.route(
"/{author_id}/available_types",
get(get_author_books_available_types),
)
.route("/search/{query}", get(search_authors))
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
use meilisearch_sdk::indexes::Index;
use rand::Rng;
use serde::de::DeserializeOwned;
use crate::meilisearch::GetId;
pub async fn get_random_item<'a, T>(index: Index, filter: String) -> i32
where
T: DeserializeOwned + GetId + 'static + Send + Sync,
{
let result = index
.search()
.with_filter(&filter)
.execute::<T>()
.await
.unwrap();
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap());
let result = index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<T>()
.await
.unwrap();
let item = &result.hits.first().unwrap().result;
item.get_id()
}

1
src/views/common/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod get_random_item;

View File

@@ -1,67 +1,95 @@
use std::collections::HashSet;
use axum::{Router, routing::get, extract::Query, Json, response::IntoResponse};
use prisma_client_rust::Direction;
use axum::{extract::Query, response::IntoResponse, routing::get, Json, Router};
use crate::{serializers::{pagination::{Pagination, Page}, genre::{Genre, GenreFilter}}, prisma::genre};
use crate::serializers::{
genre::{Genre, GenreFilter},
pagination::{Page, Pagination},
};
use crate::serializers::source::Source;
use super::Database;
pub async fn get_genres(
db: Database,
pagination: Query<Pagination>,
Query(GenreFilter { meta }): Query<GenreFilter>
Query(GenreFilter { meta }): Query<GenreFilter>,
) -> impl IntoResponse {
let filter = {
match meta {
Some(meta) => vec![
genre::meta::equals(meta)
],
None => vec![],
}
};
let genres_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*) FROM genres
WHERE (meta = $1 OR $1 IS NULL)
"#,
meta
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let genres_count = db
.genre()
.count(filter.clone())
.exec()
.await
.unwrap();
let genres = sqlx::query_as!(
Genre,
r#"
SELECT
genres.id,
genres.remote_id,
genres.code,
genres.description,
genres.meta,
(
SELECT
ROW(
sources.id,
sources.name
)::source_type
FROM sources
WHERE sources.id = genres.source
) AS "source!: Source"
FROM genres
WHERE (meta = $1 OR $1 IS NULL)
ORDER BY genres.id ASC
LIMIT $2 OFFSET $3
"#,
meta,
pagination.size,
(pagination.page - 1) * pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let genres = db
.genre()
.find_many(filter)
.with(
genre::source::fetch()
)
.order_by(genre::id::order(Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
let page: Page<Genre> = Page::new(
genres.iter().map(|item| item.clone().into()).collect(),
genres_count,
&pagination
);
let page: Page<Genre> = Page::new(genres, genres_count, &pagination);
Json(page)
}
pub async fn get_genre_metas(
db: Database
) -> impl IntoResponse {
let genres = db
.genre()
.find_many(vec![])
.order_by(genre::id::order(Direction::Asc))
.exec()
.await
.unwrap();
pub async fn get_genre_metas(db: Database) -> impl IntoResponse {
let genres = sqlx::query_as!(
Genre,
r#"
SELECT
genres.id,
genres.remote_id,
genres.code,
genres.description,
genres.meta,
(
SELECT
ROW(
sources.id,
sources.name
)::source_type
FROM sources
WHERE sources.id = genres.source
) AS "source!: Source"
FROM genres
ORDER BY genres.id ASC
"#
)
.fetch_all(&db.0)
.await
.unwrap();
let mut metas: HashSet<String> = HashSet::new();
@@ -69,9 +97,11 @@ pub async fn get_genre_metas(
metas.insert(genre.meta.clone());
}
Json::<Vec<String>>(metas.into_iter().collect())
}
let mut metas: Vec<String> = metas.into_iter().collect();
metas.sort();
Json::<Vec<String>>(metas)
}
pub async fn get_genres_router() -> Router {
Router::new()

View File

@@ -1,27 +1,35 @@
use std::sync::Arc;
use axum::{Router, routing::get, middleware::{self, Next}, Extension, http::{Request, StatusCode, self}, response::Response};
use axum::{
http::{self, Request, StatusCode},
middleware::{self, Next},
response::Response,
routing::get,
Extension, Router,
};
use axum_prometheus::PrometheusMetricLayer;
use tower_http::trace::{TraceLayer, self};
use sqlx::PgPool;
use tower_http::trace::{self, TraceLayer};
use tracing::Level;
use crate::{config::CONFIG, db::get_prisma_client, prisma::PrismaClient};
use crate::{config::CONFIG, db::get_postgres_pool};
use self::{authors::get_authors_router, genres::get_genres_router, books::get_books_router, sequences::get_sequences_router};
use self::translators::get_translators_router;
use self::{
authors::get_authors_router, books::get_books_router, genres::get_genres_router,
sequences::get_sequences_router,
};
pub mod authors;
pub mod books;
pub mod common;
pub mod genres;
pub mod sequences;
pub mod translators;
pub type Database = Extension<PgPool>;
pub type Database = Extension<Arc<PrismaClient>>;
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
let auth_header = req.headers()
async fn auth(req: Request<axum::body::Body>, next: Next) -> Result<Response, StatusCode> {
let auth_header = req
.headers()
.get(http::header::AUTHORIZATION)
.and_then(|header| header.to_str().ok());
@@ -38,35 +46,27 @@ async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode>
Ok(next.run(req).await)
}
pub async fn get_router() -> Router {
let client = Arc::new(get_prisma_client().await);
let client = get_postgres_pool().await;
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
let app_router = Router::new()
.nest("/api/v1/authors", get_authors_router().await)
.nest("/api/v1/translators", get_translators_router().await)
.nest("/api/v1/genres", get_genres_router().await)
.nest("/api/v1/books", get_books_router().await)
.nest("/api/v1/sequences", get_sequences_router().await)
.layer(middleware::from_fn(auth))
.layer(Extension(client))
.layer(prometheus_layer);
let metric_router = Router::new()
.route("/metrics", get(|| async move { metric_handle.render() }));
let metric_router =
Router::new().route("/metrics", get(|| async move { metric_handle.render() }));
Router::new()
.nest("/", app_router)
.nest("/", metric_router)
.layer(
TraceLayer::new_for_http()
.make_span_with(trace::DefaultMakeSpan::new()
.level(Level::INFO))
.on_response(trace::DefaultOnResponse::new()
.level(Level::INFO)),
)
Router::new().merge(app_router).merge(metric_router).layer(
TraceLayer::new_for_http()
.make_span_with(trace::DefaultMakeSpan::new().level(Level::INFO))
.on_response(trace::DefaultOnResponse::new().level(Level::INFO)),
)
}

View File

@@ -1,82 +1,79 @@
use std::collections::HashSet;
use axum::{Router, routing::get, extract::{Path, Query}, http::StatusCode, response::IntoResponse, Json};
use rand::Rng;
use axum::{
extract::{Path, Query},
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use crate::{prisma::{sequence, book_sequence, book, book_author, author, translator}, serializers::{sequence::{Sequence, SequenceBook}, allowed_langs::AllowedLangs, pagination::{PageWithParent, Pagination, Page}}, meilisearch::{get_meili_client, SequenceMeili}};
use super::Database;
use crate::{
meilisearch::{get_meili_client, SequenceMeili},
serializers::{
allowed_langs::AllowedLangs,
author::Author,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::{Sequence, SequenceBook},
},
};
use super::{common::get_random_item::get_random_item, Database};
async fn get_random_sequence(
db: Database,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("sequences");
let filter = format!(
"langs IN [{}]",
allowed_langs.join(", ")
);
let result = authors_index
.search()
.with_filter(&filter)
.execute::<SequenceMeili>()
.await
.unwrap();
let sequence_id = {
let offset: usize = rand::thread_rng().gen_range(0..result.estimated_total_hits.unwrap().try_into().unwrap());
let client = get_meili_client();
let result = authors_index
.search()
.with_limit(1)
.with_offset(offset)
.execute::<SequenceMeili>()
.await
.unwrap();
let authors_index = client.index("sequences");
let sequence = &result.hits.get(0).unwrap().result;
let filter = format!("langs IN [{}]", allowed_langs.join(", "));
sequence.id
get_random_item::<SequenceMeili>(authors_index, filter).await
};
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap()
.unwrap();
let sequence = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_one(&db.0)
.await
.unwrap();
Json::<Sequence>(sequence.into())
Json::<Sequence>(sequence)
}
async fn search_sequence(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let client = get_meili_client();
let sequence_index = client.index("sequences");
let filter = format!(
"langs IN [{}]",
allowed_langs.join(", ")
);
let filter = format!("langs IN [{}]", allowed_langs.join(", "));
let result = sequence_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_offset(
((pagination.page - 1) * pagination.size)
.try_into()
.unwrap(),
)
.with_limit(pagination.size.try_into().unwrap())
.execute::<SequenceMeili>()
.await
@@ -85,14 +82,16 @@ async fn search_sequence(
let total = result.estimated_total_hits.unwrap();
let sequence_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut sequences = db
.sequence()
.find_many(vec![
sequence::id::in_vec(sequence_ids.clone())
])
.exec()
.await
.unwrap();
let mut sequences = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = ANY($1)
"#,
&sequence_ids
)
.fetch_all(&db.0)
.await
.unwrap();
sequences.sort_by(|a, b| {
let a_pos = sequence_ids.iter().position(|i| *i == a.id).unwrap();
@@ -101,30 +100,25 @@ async fn search_sequence(
a_pos.cmp(&b_pos)
});
let page: Page<Sequence> = Page::new(
sequences.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
let page: Page<Sequence> = Page::new(sequences, total.try_into().unwrap(), &pagination);
Json(page)
}
async fn get_sequence(
db: Database,
Path(sequence_id): Path<i32>
) -> impl IntoResponse {
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap();
async fn get_sequence(db: Database, Path(sequence_id): Path<i32>) -> impl IntoResponse {
let sequence = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_optional(&db.0)
.await
.unwrap();
match sequence {
Some(sequence) => Json::<Sequence>(sequence.into()).into_response(),
Some(sequence) => Json::<Sequence>(sequence).into_response(),
None => StatusCode::NOT_FOUND.into_response(),
}
}
@@ -132,30 +126,38 @@ async fn get_sequence(
async fn get_sequence_available_types(
db: Database,
Path(sequence_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs)
])
.exec()
// TODO: refactor
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>"
FROM books b
JOIN book_sequences bs ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)
"#,
sequence_id,
&allowed_langs
)
.fetch_all(&db.0)
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
}
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
for file_type in book.available_types {
file_types.insert(file_type);
}
}
Json::<Vec<String>>(file_types.into_iter().collect())
@@ -164,90 +166,131 @@ async fn get_sequence_available_types(
async fn get_sequence_books(
db: Database,
Path(sequence_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let sequence = db
.sequence()
.find_unique(
sequence::id::equals(sequence_id)
)
.exec()
.await
.unwrap();
let sequence = sqlx::query_as!(
Sequence,
r#"
SELECT id, name FROM sequences WHERE id = $1
"#,
sequence_id
)
.fetch_optional(&db.0)
.await
.unwrap();
let sequence = match sequence {
Some(v) => v,
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_count = db
.book()
.count(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.exec()
let books_count = sqlx::query_scalar!(
"SELECT COUNT(*) FROM book_sequences bs
JOIN books b ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)",
sequence.id,
&allowed_langs
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let mut books = sqlx::query_as!(
SequenceBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
b.uploaded,
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
),
ARRAY[]::author_type[]
) AS "authors!: Vec<Author>",
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
FROM translations
JOIN authors ON authors.id = translations.author
WHERE translations.book = b.id
),
ARRAY[]::author_type[]
) AS "translators!: Vec<Author>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool",
bs.position
FROM books b
JOIN book_sequences bs ON b.id = bs.book
WHERE
b.is_deleted = FALSE AND
bs.sequence = $1 AND
b.lang = ANY($2)
ORDER BY bs.position
LIMIT $3 OFFSET $4
"#,
sequence.id,
&allowed_langs,
pagination.size,
(pagination.page - 1) * pagination.size,
)
.fetch_all(&db.0)
.await
.unwrap();
let books = db
.book()
.find_many(vec![
book::book_sequences::some(vec![
book_sequence::sequence_id::equals(sequence_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.with(
book::translations::fetch(vec![])
.with(
translator::author::fetch()
.with(
author::author_annotation::fetch()
)
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
.await
.unwrap();
books.sort_by(|a, b| a.position.cmp(&b.position));
let page: PageWithParent<SequenceBook, Sequence> = PageWithParent::new(
sequence.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
let page: PageWithParent<SequenceBook, Sequence> =
PageWithParent::new(sequence, books, books_count, &pagination);
Json(page).into_response()
}
pub async fn get_sequences_router() -> Router {
Router::new()
.route("/random", get(get_random_sequence))
.route("/search/:query", get(search_sequence))
.route("/:sequence_id", get(get_sequence))
.route("/:sequence_id/available_types", get(get_sequence_available_types))
.route("/:sequence_id/books", get(get_sequence_books))
.route("/search/{query}", get(search_sequence))
.route("/{sequence_id}", get(get_sequence))
.route(
"/{sequence_id}/available_types",
get(get_sequence_available_types),
)
.route("/{sequence_id}/books", get(get_sequence_books))
}

View File

@@ -1,147 +1,216 @@
use std::collections::HashSet;
use axum::{Router, routing::get, extract::{Path, Query}, response::IntoResponse, Json, http::StatusCode};
use axum::{
extract::{Path, Query},
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use crate::{serializers::{pagination::{Pagination, Page, PageWithParent}, author::Author, translator::TranslatorBook, allowed_langs::AllowedLangs}, meilisearch::{get_meili_client, AuthorMeili}, prisma::{author, book::{self}, translator, book_author, book_sequence}};
use crate::{
meilisearch::{get_meili_client, AuthorMeili},
serializers::{
allowed_langs::AllowedLangs,
author::Author,
book::BaseBook,
pagination::{Page, PageWithParent, Pagination},
sequence::Sequence,
translator::TranslatorBook,
},
};
use super::Database;
async fn get_translated_books(
db: Database,
Path(translator_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let translator = db
.author()
.find_unique(
author::id::equals(translator_id)
)
.with(
author::author_annotation::fetch()
)
.exec()
.await
.unwrap();
let translator = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = $1
"#,
translator_id
)
.fetch_optional(&db.0)
.await
.unwrap();
let translator = match translator {
Some(translator) => translator,
None => return StatusCode::NOT_FOUND.into_response(),
};
let books_count = db
.book()
.count(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs.clone())
])
.exec()
.await
.unwrap();
let books_count = sqlx::query_scalar!(
r#"
SELECT COUNT(*)
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
"#,
translator_id,
&allowed_langs
)
.fetch_one(&db.0)
.await
.unwrap()
.unwrap();
let books = db
.book()
.find_many(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs)
])
.with(
book::source::fetch()
)
.with(
book::book_annotation::fetch()
)
.with(
book::book_authors::fetch(vec![])
.with(
book_author::author::fetch()
.with(
author::author_annotation::fetch()
let books = sqlx::query_as!(
TranslatorBook,
r#"
SELECT
b.id,
b.title,
b.lang,
b.file_type,
b.year,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>",
b.uploaded,
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
authors.id,
authors.first_name,
authors.last_name,
authors.middle_name,
EXISTS(
SELECT * FROM author_annotations WHERE author = authors.id
)
)::author_type
)
)
)
.with(
book::book_sequences::fetch(vec![])
.with(
book_sequence::sequence::fetch()
)
)
.order_by(book::id::order(prisma_client_rust::Direction::Asc))
.skip((pagination.page - 1) * pagination.size)
.take(pagination.size)
.exec()
FROM book_authors
JOIN authors ON authors.id = book_authors.author
WHERE book_authors.book = b.id
),
ARRAY[]::author_type[]
) AS "authors!: Vec<Author>",
COALESCE(
(
SELECT
ARRAY_AGG(
ROW(
sequences.id,
sequences.name
)::sequence_type
)
FROM book_sequences
JOIN sequences ON sequences.id = book_sequences.sequence
WHERE book_sequences.book = b.id
),
ARRAY[]::sequence_type[]
) AS "sequences!: Vec<Sequence>",
EXISTS(
SELECT * FROM book_annotations WHERE book = b.id
) AS "annotation_exists!: bool"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
OFFSET $3
LIMIT $4
"#,
translator_id,
&allowed_langs,
(pagination.page - 1) * pagination.size,
pagination.size
)
.fetch_all(&db.0)
.await
.unwrap();
let page: PageWithParent<TranslatorBook, Author> = PageWithParent::new(
translator.into(),
books.iter().map(|item| item.clone().into()).collect(),
books_count,
&pagination
);
let page: PageWithParent<TranslatorBook, Author> =
PageWithParent::new(translator, books, books_count, &pagination);
Json(page).into_response()
}
async fn get_translated_books_available_types(
db: Database,
Path(translator_id): Path<i32>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
) -> impl IntoResponse {
let books = db
.book()
.find_many(vec![
book::translations::some(vec![
translator::author_id::equals(translator_id)
]),
book::lang::in_vec(allowed_langs)
])
.exec()
// TODO: refactor
let books = sqlx::query_as!(
BaseBook,
r#"
SELECT
b.id,
CASE WHEN b.file_type = 'fb2' THEN ARRAY['fb2', 'epub', 'mobi', 'fb2zip']::text[] ELSE ARRAY[b.file_type]::text[] END AS "available_types!: Vec<String>"
FROM books b
JOIN book_authors ba ON b.id = ba.book
WHERE
b.is_deleted = false
AND ba.author = $1
AND b.lang = ANY($2)
"#,
translator_id,
&allowed_langs
)
.fetch_all(&db.0)
.await
.unwrap();
let mut file_types: HashSet<String> = HashSet::new();
for book in books {
file_types.insert(book.file_type.clone());
}
if file_types.contains(&"fb2".to_string()) {
file_types.insert("epub".to_string());
file_types.insert("mobi".to_string());
file_types.insert("fb2zip".to_string());
for file_type in book.available_types {
file_types.insert(file_type);
}
}
Json::<Vec<String>>(file_types.into_iter().collect())
}
async fn search_translators(
db: Database,
Path(query): Path<String>,
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<AllowedLangs>,
pagination: Query<Pagination>
axum_extra::extract::Query(AllowedLangs { allowed_langs }): axum_extra::extract::Query<
AllowedLangs,
>,
pagination: Query<Pagination>,
) -> impl IntoResponse {
let client = get_meili_client();
let authors_index = client.index("authors");
let filter = format!(
"translator_langs IN [{}]",
allowed_langs.join(", ")
);
let filter = format!("translator_langs IN [{}]", allowed_langs.join(", "));
let result = authors_index
.search()
.with_query(&query)
.with_filter(&filter)
.with_offset(((pagination.page - 1) * pagination.size).try_into().unwrap())
.with_offset(
((pagination.page - 1) * pagination.size)
.try_into()
.unwrap(),
)
.with_limit(pagination.size.try_into().unwrap())
.execute::<AuthorMeili>()
.await
@@ -150,18 +219,27 @@ async fn search_translators(
let total = result.estimated_total_hits.unwrap();
let translator_ids: Vec<i32> = result.hits.iter().map(|a| a.result.id).collect();
let mut translators = db
.author()
.find_many(vec![
author::id::in_vec(translator_ids.clone())
])
.with(
author::author_annotation::fetch()
)
.order_by(author::id::order(prisma_client_rust::Direction::Asc))
.exec()
.await
.unwrap();
let mut translators = sqlx::query_as!(
Author,
r#"
SELECT
a.id,
a.first_name,
a.last_name,
COALESCE(a.middle_name, '') AS "middle_name!: String",
CASE
WHEN aa.id IS NOT NULL THEN true
ELSE false
END AS "annotation_exists!: bool"
FROM authors a
LEFT JOIN author_annotations aa ON a.id = aa.author
WHERE a.id = ANY($1)
"#,
&translator_ids
)
.fetch_all(&db.0)
.await
.unwrap();
translators.sort_by(|a, b| {
let a_pos = translator_ids.iter().position(|i| *i == a.id).unwrap();
@@ -170,19 +248,17 @@ async fn search_translators(
a_pos.cmp(&b_pos)
});
let page: Page<Author> = Page::new(
translators.iter().map(|item| item.clone().into()).collect(),
total.try_into().unwrap(),
&pagination
);
let page: Page<Author> = Page::new(translators, total.try_into().unwrap(), &pagination);
Json(page)
}
pub async fn get_translators_router() -> Router {
Router::new()
.route("/:translator_id/books", get(get_translated_books))
.route("/:translator_id/available_types", get(get_translated_books_available_types))
.route("/search/:query", get(search_translators))
.route("/{translator_id}/books", get(get_translated_books))
.route(
"/{translator_id}/available_types",
get(get_translated_books_available_types),
)
.route("/search/{query}", get(search_translators))
}