diff --git a/.github/CHANGELOG.md b/.github/CHANGELOG.md index 336ff67e6..bedfa127e 100644 --- a/.github/CHANGELOG.md +++ b/.github/CHANGELOG.md @@ -1,5 +1,46 @@ # Changelog + +## next + +### Added + +- ✅ Improve coverage testing and filesystem coverage ([#351](https://github.com/stumpapp/stump/issues/351)) [[efbae49](https://github.com/stumpapp/stump/commit/efbae490993ca691b3e960612b513cec6e646efb)] +- ✨ Delete job-associated logs and small QOL changes ([#348](https://github.com/stumpapp/stump/issues/348)) [[5d00c20](https://github.com/stumpapp/stump/commit/5d00c207ea819fd6ef5f06f7752f691c8dbcc419)] +- ✨ Image analysis job ([#307](https://github.com/stumpapp/stump/issues/307)) [[8a8bd86](https://github.com/stumpapp/stump/commit/8a8bd867e740640745ec464d848d9ff9b0c312fb)] +- ✨ Support grid and table layouts ([#322](https://github.com/stumpapp/stump/issues/322)) [[2f1b085](https://github.com/stumpapp/stump/commit/2f1b085cad3cedec30d421b11b4d2b7ac9d0d1dd)] +- ✨ Email to device ([#296](https://github.com/stumpapp/stump/issues/296)) [[f5e5a09](https://github.com/stumpapp/stump/commit/f5e5a09a5e7be2e32ad725ba81efc8ef41fc14a8)] + +### Changed + +- 💄 Fix large button [[32532c0](https://github.com/stumpapp/stump/commit/32532c0bd181652a76f8fca9c8fd95723c7434e5)] +- 🚨 Fix type mismatch [[b049668](https://github.com/stumpapp/stump/commit/b049668af549af5a3190b78c8553a6d697ae7f8d)] +- 🚨 Fix compilation error in test [[71a4cd0](https://github.com/stumpapp/stump/commit/71a4cd065d61537f2837ab0e79188ae100aa501a)] + +### Fixed + +- 🐛 Focus EPUB.js iframe each location change ([#359](https://github.com/stumpapp/stump/issues/359)) [[1ce33e8](https://github.com/stumpapp/stump/commit/1ce33e8f1754005c2a3f4c5157e3b438b8a426a8)] +- 🐛 Fix first visit API URL determination ([#358](https://github.com/stumpapp/stump/issues/358)) [[d2b8ed4](https://github.com/stumpapp/stump/commit/d2b8ed47992de88dd1267478e0adf1f85d5d6aba)] +- 🐛 Fix DB error during media analysis ([#354](https://github.com/stumpapp/stump/issues/354)) [[73efc83](https://github.com/stumpapp/stump/commit/73efc8308f3ce14a390f35233a8aed0dec462ea5)] +- 🐛 Fix nested file structure support for ZIP/RAR format ([#353](https://github.com/stumpapp/stump/issues/353)) [[3aa02a7](https://github.com/stumpapp/stump/commit/3aa02a7ae51f521bcfde0e46aa76353432ed79af)] +- 🐛 Fix OPDS path detection regression ([#350](https://github.com/stumpapp/stump/issues/350)) [[52092c6](https://github.com/stumpapp/stump/commit/52092c636619719f9168e0989faeb52783ffd384)] + +### Miscellaneous + +- 🌐 Update translations ([#352](https://github.com/stumpapp/stump/issues/352)) [[09cae5b](https://github.com/stumpapp/stump/commit/09cae5b875a00ce00d64354dcae04c0e093100de)] +- 🌐 Update translations ([#345](https://github.com/stumpapp/stump/issues/345)) [[755403e](https://github.com/stumpapp/stump/commit/755403e01113c54a09aee9a9b514d7261d31eec8)] +- Merge pull request [#342](https://github.com/stumpapp/stump/issues/342) from stumpapp/experimental [[2d5fac5](https://github.com/stumpapp/stump/commit/2d5fac55be4d82fe56d1ea99c98471d9b98771c4)] +- Merge branch 'develop' into experimental [[3c0f2d4](https://github.com/stumpapp/stump/commit/3c0f2d4d87f2249d05fb23b308f63e138f3f3d3d)] +- Merge remote-tracking branch 'origin/develop' into experimental [[8c48560](https://github.com/stumpapp/stump/commit/8c485602a97d2c2f996dba455d2e83b07100b34f)] +- Merge remote-tracking branch 'origin/develop' into experimental [[707b9d0](https://github.com/stumpapp/stump/commit/707b9d0c0e6e62463369834e519a45d1ac3311cd)] +- Merge remote-tracking branch 'origin/develop' into experimental [[1108406](https://github.com/stumpapp/stump/commit/1108406c92a97e823ffbca4f4d263033bc3ec033)] +- Merge remote-tracking branch 'origin/develop' into experimental [[6c52759](https://github.com/stumpapp/stump/commit/6c52759da9a9f2d7ad05608dcdbe7747eafd236b)] +- Merge remote-tracking branch 'origin/develop' into experimental [[8b4ffaf](https://github.com/stumpapp/stump/commit/8b4ffaf75d2a00d1727f61ad8324f378af47495c)] +- Merge remote-tracking branch 'origin/develop' into experimental [[8c431e8](https://github.com/stumpapp/stump/commit/8c431e8dfdc9292e37ae072d7045627c6be460c8)] +- Merge remote-tracking branch 'origin/develop' into experimental [[aada56e](https://github.com/stumpapp/stump/commit/aada56e6f942125d46decac3c04d1fc2e2b8dc21)] +- Merge remote-tracking branch 'origin/develop' into experimental [[ddb5e16](https://github.com/stumpapp/stump/commit/ddb5e16473c1de8d7149abe4e4a959980c049373)] + + ## 0.0.3 (2024-05-13) diff --git a/.gitignore b/.gitignore index a9e13c577..0a7ee8b96 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ logs/ .vscode/**/* !.vscode/settings.json !.vscode/extensions.json +!.vscode/tasks.json !.vscode/*.todo build/ @@ -46,4 +47,7 @@ apps/server/client/* # nix .envrc -.direnv \ No newline at end of file +.direnv + +#Code coverage +lcov.info diff --git a/.prettierignore b/.prettierignore index 3e7b5e914..8d45e60ff 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,6 +7,7 @@ dist build .next .expo +*.hbs packages/i18n/src/locales/*.json diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 93d94f4b8..709e4f3a9 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -6,6 +6,7 @@ "bradlc.vscode-tailwindcss", "Gruntfuggly.todo-tree", "mike-co.import-sorter", - "aaron-bond.better-comments" + "aaron-bond.better-comments", + "ryanluker.vscode-coverage-gutters" ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 396c09d41..2f7463492 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -12,6 +12,6 @@ ["cva\\(([^)]*)\\)", "[\"'`]([^\"'`]*).*?[\"'`]"], ["cx\\(([^)]*)\\)", "(?:'|\"|`)([^']*)(?:'|\"|`)"] ], - "tailwindCSS.classAttributes": ["class", "className", ".*CLASSES", ".*VARIANTS"], + "tailwindCSS.classAttributes": ["class", "className", ".*ClassName", ".*CLASSES", ".*VARIANTS"], "typescript.tsdk": "node_modules/typescript/lib" } diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 000000000..3f7afeb40 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Setup", + "type": "shell", + "command": "yarn setup", + "group": "build", + "problemMatcher": [], + "presentation": { + "panel": "shared", + "showReuseMessage": true, + "clear": false + } + }, + { + "label": "Run tests", + "type": "shell", + "command": "cargo test", + "group": "test", + "problemMatcher": ["$rustc"], + "presentation": { + "panel": "shared", + "showReuseMessage": true, + "clear": false + } + }, + { + "label": "Generate coverage", + "type": "shell", + "command": "cargo llvm-cov --lcov --output-path lcov.info", + "group": "test", + "problemMatcher": ["$rustc"], + "presentation": { + "panel": "shared", + "showReuseMessage": true, + "clear": false + } + } + ] +} diff --git a/Cargo.lock b/Cargo.lock index 01a3591e9..dbbb0e03f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -29,17 +29,42 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + [[package]] name = "aes" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", "cipher", "cpufeatures", ] +[[package]] +name = "aes-gcm-siv" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae0784134ba9375416d469ec31e7c5f9fa94405049cf08c5ce5b4698be673e0d" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "polyval", + "subtle", + "zeroize", +] + [[package]] name = "ahash" version = "0.7.6" @@ -96,6 +121,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + [[package]] name = "alphanumeric-sort" version = "1.5.3" @@ -177,6 +208,27 @@ version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +[[package]] +name = "arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + [[package]] name = "ascii" version = "0.9.3" @@ -392,7 +444,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.6.2", "object", "rustc-demangle", ] @@ -437,12 +489,6 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - [[package]] name = "bcrypt" version = "0.15.1" @@ -468,6 +514,15 @@ dependencies = [ "serde", ] +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bindgen" version = "0.68.1" @@ -509,6 +564,17 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +[[package]] +name = "blake2b_simd" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + [[package]] name = "block" version = "0.1.6" @@ -600,9 +666,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.11.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytemuck" @@ -777,6 +843,16 @@ dependencies = [ "windows-targets 0.52.0", ] +[[package]] +name = "chumsky" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" +dependencies = [ + "hashbrown 0.14.3", + "stacker", +] + [[package]] name = "ciborium" version = "0.2.2" @@ -913,7 +989,7 @@ dependencies = [ [[package]] name = "codegen" -version = "0.0.3" +version = "0.0.4" [[package]] name = "codespan-reporting" @@ -1025,9 +1101,9 @@ dependencies = [ [[package]] name = "constant_time_eq" -version = "0.1.5" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" [[package]] name = "convert_case" @@ -1120,11 +1196,26 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] @@ -1213,12 +1304,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" -dependencies = [ - "cfg-if", -] +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crunchy" @@ -1233,6 +1321,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", + "rand_core 0.6.4", "typenum", ] @@ -1294,6 +1383,15 @@ dependencies = [ "syn 1.0.107", ] +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + [[package]] name = "cty" version = "0.2.2" @@ -1475,6 +1573,12 @@ dependencies = [ "adler32", ] +[[package]] +name = "deflate64" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83ace6c86376be0b6cdcf3fb41882e81d94b31587573d1cfa9d01cd06bba210d" + [[package]] name = "deranged" version = "0.3.10" @@ -1485,6 +1589,17 @@ dependencies = [ "serde", ] +[[package]] +name = "derive_arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.58", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -1630,6 +1745,17 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" +[[package]] +name = "displaydoc" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.58", +] + [[package]] name = "dml" version = "0.1.0" @@ -1711,6 +1837,36 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" +[[package]] +name = "email" +version = "0.0.4" +dependencies = [ + "handlebars", + "lettre", + "serde", + "serde_json", + "specta", + "thiserror", + "tracing", + "utoipa", +] + +[[package]] +name = "email-encoding" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbfb21b9878cf7a348dcb8559109aabc0ec40d69924bd706fa5149846c4fef75" +dependencies = [ + "base64 0.21.5", + "memchr", +] + +[[package]] +name = "email_address" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2153bd83ebc09db15bcbdc3e2194d901804952e3dc96967e1cd3b0c5c32d112" + [[package]] name = "embed_plist" version = "1.2.2" @@ -1773,7 +1929,7 @@ dependencies = [ "regex", "thiserror", "xml-rs", - "zip", + "zip 0.6.6", ] [[package]] @@ -1832,7 +1988,7 @@ dependencies = [ "flume", "half", "lebe", - "miniz_oxide", + "miniz_oxide 0.6.2", "smallvec", "threadpool", ] @@ -1933,12 +2089,12 @@ checksum = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" [[package]] name = "flate2" -version = "1.0.25" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.7.3", ] [[package]] @@ -2446,6 +2602,20 @@ dependencies = [ "crunchy", ] +[[package]] +name = "handlebars" +version = "5.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab283476b99e66691dee3f1640fea91487a8d81f50fb5ecc75538f8f8879a1e4" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "hashbrown" version = "0.11.2" @@ -2475,9 +2645,13 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.1" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +dependencies = [ + "ahash 0.8.6", + "allocator-api2", +] [[package]] name = "hashlink" @@ -2894,7 +3068,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.1", + "hashbrown 0.14.3", "serde", ] @@ -2976,10 +3150,11 @@ dependencies = [ [[package]] name = "integrations" -version = "0.0.3" +version = "0.0.4" dependencies = [ "async-trait", "dotenv", + "lettre", "reqwest 0.12.3", "serde_json", "thiserror", @@ -3247,6 +3422,37 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" +[[package]] +name = "lettre" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357ff5edb6d8326473a64c82cf41ddf78ab116f89668c50c4fac1b321e5e80f4" +dependencies = [ + "async-trait", + "base64 0.21.5", + "chumsky", + "email-encoding", + "email_address", + "fastrand", + "futures-io", + "futures-util", + "hostname", + "httpdate", + "idna", + "mime", + "nom", + "percent-encoding", + "quoted_printable", + "rustls", + "rustls-pemfile", + "socket2 0.5.5", + "tokio", + "tokio-rustls", + "tracing", + "url", + "webpki-roots", +] + [[package]] name = "libc" version = "0.2.152" @@ -3391,11 +3597,17 @@ dependencies = [ "serde", ] +[[package]] +name = "lockfree-object-pool" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" + [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "loom" @@ -3434,6 +3646,16 @@ dependencies = [ "url", ] +[[package]] +name = "lzma-rs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" +dependencies = [ + "byteorder", + "crc", +] + [[package]] name = "mac" version = "0.1.1" @@ -3526,9 +3748,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.6.3" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memoffset" @@ -3704,6 +3926,15 @@ dependencies = [ "adler", ] +[[package]] +name = "miniz_oxide" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +dependencies = [ + "adler", +] + [[package]] name = "mio" version = "0.8.10" @@ -4309,17 +4540,6 @@ dependencies = [ "schema-ast", ] -[[package]] -name = "password-hash" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" -dependencies = [ - "base64ct", - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "paste" version = "1.0.11" @@ -4334,14 +4554,12 @@ checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" [[package]] name = "pbkdf2" -version = "0.11.0" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" dependencies = [ "digest 0.10.7", "hmac", - "password-hash", - "sha2 0.10.6", ] [[package]] @@ -4677,7 +4895,19 @@ dependencies = [ "bitflags 1.3.2", "crc32fast", "flate2", - "miniz_oxide", + "miniz_oxide 0.6.2", +] + +[[package]] +name = "polyval" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52cff9d1d4dee5fe6d03729099f4a310a41179e0a10dbf542039873f2e826fb" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", ] [[package]] @@ -4934,6 +5164,15 @@ dependencies = [ "url", ] +[[package]] +name = "psm" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +dependencies = [ + "cc", +] + [[package]] name = "qoi" version = "0.4.1" @@ -5086,6 +5325,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "quoted_printable" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79ec282e887b434b68c18fe5c121d38e72a5cf35119b59e54ec5b992ea9c8eb0" + [[package]] name = "radix_trie" version = "0.2.1" @@ -5487,6 +5732,18 @@ dependencies = [ "smallvec", ] +[[package]] +name = "rust-argon2" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5885493fdf0be6cdff808d1533ce878d21cfa49c7086fa00c66355cd9141bfc" +dependencies = [ + "base64 0.21.5", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + [[package]] name = "rust-embed" version = "6.8.1" @@ -5567,9 +5824,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.22.3" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" +checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" dependencies = [ "log", "ring", @@ -5950,9 +6207,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -6042,6 +6299,12 @@ dependencies = [ "libc", ] +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "simple_asn1" version = "0.6.2" @@ -6054,6 +6317,22 @@ dependencies = [ "time", ] +[[package]] +name = "simple_crypt" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a335d088ffc07695a1aee7b94b72a70ba438bed139cf3f3397fcc6c102d113" +dependencies = [ + "aes-gcm-siv", + "anyhow", + "bincode", + "log", + "rust-argon2", + "serde", + "serde_derive", + "tar", +] + [[package]] name = "siphasher" version = "0.3.10" @@ -6315,6 +6594,19 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "stacker" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "winapi", +] + [[package]] name = "state" version = "0.5.3" @@ -6396,7 +6688,7 @@ dependencies = [ [[package]] name = "stump_core" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alphanumeric-sort", "async-channel", @@ -6406,6 +6698,7 @@ dependencies = [ "cuid", "data-encoding", "dirs 5.0.1", + "email", "epub", "futures", "globset", @@ -6416,12 +6709,14 @@ dependencies = [ "pdf", "pdfium-render", "prisma-client-rust", + "rand 0.8.5", "rayon", "regex", "ring", "serde", "serde-xml-rs", "serde_json", + "simple_crypt", "specta", "temp-env", "tempfile", @@ -6439,7 +6734,7 @@ dependencies = [ "walkdir", "webp", "xml-rs", - "zip", + "zip 2.1.3", ] [[package]] @@ -6457,7 +6752,7 @@ dependencies = [ [[package]] name = "stump_server" -version = "0.0.3" +version = "0.0.4" dependencies = [ "async-stream", "async-trait", @@ -6906,18 +7201,18 @@ checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" [[package]] name = "thiserror" -version = "1.0.58" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.58" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", @@ -7478,6 +7773,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + [[package]] name = "unrar" version = "0.5.3" @@ -7617,7 +7922,7 @@ dependencies = [ "serde", "serde_json", "utoipa", - "zip", + "zip 0.6.6", ] [[package]] @@ -8431,9 +8736,23 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.6.0" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.58", +] [[package]] name = "zip" @@ -8441,44 +8760,78 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" dependencies = [ - "aes", "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", +] + +[[package]] +name = "zip" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775a2b471036342aa69bc5a602bc889cb0a06cda00477d0c69566757d5553d39" +dependencies = [ + "aes", + "arbitrary", "bzip2", "constant_time_eq", "crc32fast", "crossbeam-utils", + "deflate64", + "displaydoc", "flate2", "hmac", + "indexmap 2.2.6", + "lzma-rs", + "memchr", "pbkdf2", + "rand 0.8.5", "sha1", + "thiserror", "time", + "zeroize", + "zopfli", "zstd", ] +[[package]] +name = "zopfli" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946" +dependencies = [ + "bumpalo", + "crc32fast", + "lockfree-object-pool", + "log", + "once_cell", + "simd-adler32", +] + [[package]] name = "zstd" -version = "0.11.2+zstd.1.5.2" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" +checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "5.0.2+zstd.1.5.2" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" +checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" dependencies = [ - "libc", "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.9+zstd.1.5.5" +version = "2.0.10+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" +checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 11b667fed..981757997 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,7 @@ members = [ ] [workspace.package] -version = "0.0.3" +version = "0.0.4" rust-version = "1.77.2" [workspace.dependencies] @@ -17,6 +17,13 @@ async-stream = "0.3.5" bcrypt = "0.15.1" futures = "0.3.30" futures-util = "0.3.30" +lettre = { version = "0.11.4", default-features = false, features = [ + "builder", + "hostname", + "smtp-transport", + "tracing", + "tokio1-rustls-tls", +] } prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust.git", tag = "0.6.11", features = [ "sqlite-create-many", "migrations", @@ -29,9 +36,11 @@ prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client- "sqlite", "mocking" ], default-features = false } +rand = "0.8.5" reqwest = { version = "0.12.3", default-features = false, features = [ "json", "rustls-tls" ] } serde = { version = "1.0.197", features = ["derive"] } serde_json = "1.0.115" +simple_crypt = "0.2.3" specta = "1.0.5" tempfile = "3.10.1" thiserror = "1.0.58" diff --git a/apps/desktop/package.json b/apps/desktop/package.json index 8709924d1..cfa8d51a9 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -1,6 +1,6 @@ { "name": "@stump/desktop", - "version": "0.0.3", + "version": "0.0.4", "description": "", "license": "MIT", "scripts": { diff --git a/apps/expo/package.json b/apps/expo/package.json index 16897e8c1..5eda3b99c 100644 --- a/apps/expo/package.json +++ b/apps/expo/package.json @@ -51,6 +51,6 @@ "tailwindcss": "3.3.2" }, "name": "@stump/mobile", - "version": "0.0.3", + "version": "0.0.4", "private": true } diff --git a/apps/expo/src/App.tsx b/apps/expo/src/App.tsx index 0f5fac98c..2d21c7c11 100644 --- a/apps/expo/src/App.tsx +++ b/apps/expo/src/App.tsx @@ -55,8 +55,8 @@ export default function AppWrapper() { // TODO: remove, just debugging stuff useEffect(() => { // setBaseUrl('https://demo.stumpapp.dev') - // setBaseUrl('http://localhost:10801') - setBaseUrl('http://192.168.0.202:10801') + setBaseUrl('http://localhost:10801') + // setBaseUrl('http://192.168.0.202:10801') }, [setBaseUrl]) useEffect(() => { @@ -65,8 +65,6 @@ export default function AppWrapper() { } }, [isReady]) - // console.log({ baseUrl, isConnectedToServer, isReady, storeUser }) - /** * An effect that will verify the baseUrl is accessible to the app. */ diff --git a/apps/server/package.json b/apps/server/package.json index 3f596a389..f23c35aec 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -1,7 +1,7 @@ { "name": "@stump/server", "private": true, - "version": "0.0.3", + "version": "0.0.4", "scripts": { "lint": "cargo clippy --package stump_server -- -D warnings", "format": "cargo fmt --package stump_server", diff --git a/apps/server/src/filter/basic_filter.rs b/apps/server/src/filter/basic_filter.rs index 1dbd15615..df15ad43a 100644 --- a/apps/server/src/filter/basic_filter.rs +++ b/apps/server/src/filter/basic_filter.rs @@ -318,6 +318,18 @@ pub struct MediaFilter { pub relation_filter: MediaRelationFilter, } +impl MediaFilter { + pub fn ids(ids: Vec) -> Self { + Self { + base_filter: MediaBaseFilter { + id: ids, + ..Default::default() + }, + ..Default::default() + } + } +} + #[derive(Default, Debug, Clone, Deserialize, Serialize, ToSchema)] pub struct LogFilter { pub level: Option, diff --git a/apps/server/src/http_server.rs b/apps/server/src/http_server.rs index 2220495a1..362562f33 100644 --- a/apps/server/src/http_server.rs +++ b/apps/server/src/http_server.rs @@ -35,6 +35,11 @@ pub async fn run_http_server(config: StumpConfig) -> ServerResult<()> { .await .map_err(|e| ServerError::ServerStartError(e.to_string()))?; + // Initialize the encryption key, if it doesn't exist + core.init_encryption() + .await + .map_err(|e| ServerError::ServerStartError(e.to_string()))?; + core.init_journal_mode() .await .map_err(|e| ServerError::ServerStartError(e.to_string()))?; diff --git a/apps/server/src/main.rs b/apps/server/src/main.rs index 4d4a88df1..2769a36e5 100644 --- a/apps/server/src/main.rs +++ b/apps/server/src/main.rs @@ -18,6 +18,10 @@ fn debug_setup() { "STUMP_CLIENT_DIR", env!("CARGO_MANIFEST_DIR").to_string() + "/../web/dist", ); + std::env::set_var( + "EMAIL_TEMPLATES_DIR", + env!("CARGO_MANIFEST_DIR").to_string() + "/../../crates/email/templates", + ); std::env::set_var("STUMP_PROFILE", "debug"); } diff --git a/apps/server/src/middleware/auth.rs b/apps/server/src/middleware/auth.rs index dfde5f66f..1216fcfe2 100644 --- a/apps/server/src/middleware/auth.rs +++ b/apps/server/src/middleware/auth.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; use axum::{ body::BoxBody, - extract::{FromRef, FromRequestParts}, + extract::{FromRef, FromRequestParts, OriginalUri}, http::{header, request::Parts, Method, StatusCode}, response::{IntoResponse, Redirect, Response}, }; @@ -18,6 +18,7 @@ use tower_sessions::Session; use crate::{ config::{session::SESSION_USER_KEY, state::AppState}, + routers::enforce_max_sessions, utils::{decode_base64_credentials, verify_password}, }; @@ -63,17 +64,24 @@ where return Ok(Self); } } else { - tracing::trace!("No session found, checking for auth header"); + tracing::trace!("No existing session found"); } + let request_uri = + if let Ok(path) = OriginalUri::from_request_parts(parts, &state).await { + path.0.path().to_owned() + } else { + parts.uri.path().to_owned() + }; + let is_opds = request_uri.starts_with("/opds"); + let is_swagger = request_uri.starts_with("/swagger-ui"); + let auth_header = parts .headers .get(header::AUTHORIZATION) .and_then(|value| value.to_str().ok()); - - let is_opds = parts.uri.path().starts_with("/opds"); - let is_swagger = parts.uri.path().starts_with("/swagger-ui"); let has_auth_header = auth_header.is_some(); + tracing::trace!(is_opds, has_auth_header, uri = ?parts.uri, "Checking auth header"); let Some(auth_header) = auth_header else { @@ -139,6 +147,7 @@ async fn handle_basic_auth( let is_match = verify_password(&user.hashed_password, &decoded_credentials.password) .map_err(|e| e.into_response())?; + // TODO: restrict session count here if is_match && user.is_locked { tracing::error!( username = &user.username, @@ -150,6 +159,10 @@ async fn handle_basic_auth( username = &user.username, "Basic authentication sucessful. Creating session for user" ); + enforce_max_sessions(&user, client).await.map_err(|e| { + tracing::error!("Failed to enforce max sessions: {}", e); + (StatusCode::INTERNAL_SERVER_ERROR).into_response() + })?; let user = User::from(user); session.insert(SESSION_USER_KEY, user).map_err(|e| { tracing::error!("Failed to insert user into session: {}", e); diff --git a/apps/server/src/routers/api/mod.rs b/apps/server/src/routers/api/mod.rs index f4d97ac3d..c476f78d8 100644 --- a/apps/server/src/routers/api/mod.rs +++ b/apps/server/src/routers/api/mod.rs @@ -18,8 +18,9 @@ mod tests { }; use super::v1::{ - auth::*, book_club::*, epub::*, job::*, library::*, media::*, metadata::*, - series::*, smart_list::*, user::*, ClaimResponse, StumpVersion, UpdateCheck, + auth::*, book_club::*, emailer::*, epub::*, job::*, library::*, media::*, + metadata::*, series::*, smart_list::*, user::*, ClaimResponse, StumpVersion, + UpdateCheck, }; #[allow(dead_code)] @@ -54,6 +55,7 @@ mod tests { file.write_all(b"// SERVER TYPE GENERATION\n\n")?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all( @@ -65,7 +67,26 @@ mod tests { format!("{}\n\n", ts_export::()?).as_bytes(), )?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; - file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; diff --git a/apps/server/src/routers/api/v1/auth.rs b/apps/server/src/routers/api/v1/auth.rs index 06632992f..a1f515e03 100644 --- a/apps/server/src/routers/api/v1/auth.rs +++ b/apps/server/src/routers/api/v1/auth.rs @@ -36,6 +36,35 @@ pub(crate) fn mount() -> Router { ) } +pub async fn enforce_max_sessions( + for_user: &user::Data, + db: &PrismaClient, +) -> APIResult<()> { + let existing_sessions = for_user + .sessions() + .cloned() + .unwrap_or_else(|error| { + tracing::error!(?error, "Failed to load user's existing session(s)"); + Vec::default() + }) + .to_owned(); + let existing_login_sessions_count = existing_sessions.len() as i32; + + match (for_user.max_sessions_allowed, existing_login_sessions_count) { + (Some(max_login_sessions), count) if count >= max_login_sessions => { + let oldest_session_id = existing_sessions + .iter() + .min_by_key(|session| session.expires_at) + .map(|session| session.id.clone()); + handle_remove_earliest_session(db, for_user.id.clone(), oldest_session_id) + .await?; + }, + _ => (), + } + + Ok(()) +} + #[derive(Deserialize, Type, ToSchema)] pub struct LoginOrRegisterArgs { pub username: String, @@ -88,11 +117,12 @@ async fn handle_remove_earliest_session( session_id: Option, ) -> APIResult { if let Some(oldest_session_id) = session_id { - let _deleted_session = client + let deleted_session = client .session() .delete(session::id::equals(oldest_session_id)) .exec() .await?; + tracing::trace!(?deleted_session, "Removed oldest session for user"); Ok(1) } else { tracing::warn!("No existing session ID was provided for enforcing the maximum number of sessions. Deleting all sessions for user instead."); @@ -204,23 +234,7 @@ async fn login( return Err(APIError::Unauthorized); } - let existing_sessions = db_user - .sessions() - .cloned() - .unwrap_or_else(|error| { - tracing::error!(?error, "Failed to load user's existing session(s)"); - Vec::default() - }) - .to_owned(); - let existing_login_sessions_count = existing_sessions.len() as i32; - - match (db_user.max_sessions_allowed, existing_login_sessions_count) { - (Some(max_login_sessions), count) if count >= max_login_sessions => { - let oldest_session_id = existing_sessions.iter().min_by_key(|session| session.expires_at).map(|session| session.id.clone()); - handle_remove_earliest_session(&state.db, db_user.id.clone(), oldest_session_id).await?; - }, - _ => (), - } + enforce_max_sessions(&db_user, &client).await?; let updated_user = state .db @@ -338,8 +352,7 @@ pub async fn register( .exec() .await?; - // FIXME: these next two queries will be removed once nested create statements are - // supported on the prisma client. Until then, this ugly mess is necessary. + // TODO(prisma 0.7.0): Nested create let _user_preferences = db .user_preferences() .create(vec![ diff --git a/apps/server/src/routers/api/v1/book_club.rs b/apps/server/src/routers/api/v1/book_club.rs index 72fa6688f..492d85f41 100644 --- a/apps/server/src/routers/api/v1/book_club.rs +++ b/apps/server/src/routers/api/v1/book_club.rs @@ -231,7 +231,7 @@ async fn create_book_club( let viewer = get_user_and_enforce_permission(&session, UserPermission::CreateBookClub)?; - // TODO: refactor when nested create is supported + // TODO(prisma 0.7.0): Nested create let (book_club, _) = db ._transaction() .run(|client| async move { diff --git a/apps/server/src/routers/api/v1/emailer.rs b/apps/server/src/routers/api/v1/emailer.rs new file mode 100644 index 000000000..e3eab8164 --- /dev/null +++ b/apps/server/src/routers/api/v1/emailer.rs @@ -0,0 +1,868 @@ +use std::path::PathBuf; + +use axum::{ + extract::{Path, State}, + middleware::from_extractor_with_state, + routing::{get, post}, + Json, Router, +}; +use prisma_client_rust::{chrono::Utc, Direction}; +use serde::{Deserialize, Serialize}; +use serde_qs::axum::QsQuery; +use specta::Type; +use stump_core::{ + db::entity::{ + AttachmentMeta, EmailerConfig, EmailerConfigInput, EmailerSendRecord, + EmailerSendTo, Media, RegisteredEmailDevice, SMTPEmailer, User, UserPermission, + }, + filesystem::{read_entire_file, ContentType, FileParts, PathUtils}, + prisma::{emailer, emailer_send_record, registered_email_device, user, PrismaClient}, + AttachmentPayload, EmailContentType, +}; +use tower_sessions::Session; +use utoipa::ToSchema; + +use crate::{ + config::state::AppState, + errors::{APIError, APIResult}, + filter::{chain_optional_iter, MediaFilter}, + middleware::auth::Auth, + routers::api::v1::media::apply_media_filters_for_user, + utils::enforce_session_permissions, +}; + +pub(crate) fn mount(app_state: AppState) -> Router { + Router::new() + .nest( + "/emailers", + Router::new() + .route("/", get(get_emailers).post(create_emailer)) + .nest( + "/:id", + Router::new() + .route( + "/", + get(get_emailer_by_id) + .put(update_emailer) + // .patch(patch_emailer) + .delete(delete_emailer), + ) + .nest( + "/send-history", + Router::new().route("/", get(get_emailer_send_history)), + ), + ) + .route("/send-attachment", post(send_attachment_email)), + ) + .nest( + "/email-devices", + Router::new() + .route("/", get(get_email_devices).post(create_email_device)) + .nest( + "/:id", + Router::new().route( + "/", + get(get_email_device_by_id) + .put(update_email_device) + .patch(patch_email_device) + .delete(delete_email_device), + ), + ), + ) + .layer(from_extractor_with_state::(app_state)) +} + +#[derive(Deserialize, ToSchema, Type)] +pub struct EmailerIncludeParams { + #[serde(default)] + pub include_send_history: bool, +} + +#[utoipa::path( + get, + path = "/api/v1/emailers", + tag = "emailer", + responses( + (status = 200, description = "Successfully retrieved emailers", body = Vec), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Bad request"), + (status = 500, description = "Internal server error") + ) +)] +async fn get_emailers( + State(ctx): State, + QsQuery(include_params): QsQuery, + session: Session, +) -> APIResult>> { + enforce_session_permissions(&session, &[UserPermission::EmailerRead])?; + + let client = &ctx.db; + + let mut query = client.emailer().find_many(vec![]); + + // TODO: consider auto truncating? + if include_params.include_send_history { + query = query.with(emailer::send_history::fetch(vec![])) + } + + let emailers = query + .exec() + .await? + .into_iter() + .map(SMTPEmailer::try_from) + .collect::>>(); + let emailers = emailers.into_iter().collect::, _>>()?; + + Ok(Json(emailers)) +} + +#[utoipa::path( + get, + path = "/api/v1/emailers/:id", + tag = "emailer", + params( + ("id" = i32, Path, description = "The emailer ID") + ), + responses( + (status = 200, description = "Successfully retrieved emailer", body = Notifier), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Notifier not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn get_emailer_by_id( + State(ctx): State, + Path(id): Path, + session: Session, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerRead])?; + + let client = &ctx.db; + + let emailer = client + .emailer() + .find_first(vec![emailer::id::equals(id)]) + .exec() + .await? + .ok_or(APIError::NotFound("Emailer not found".to_string()))?; + + Ok(Json(SMTPEmailer::try_from(emailer)?)) +} + +/// Input object for creating or updating an emailer +#[derive(Deserialize, ToSchema, Type)] +pub struct CreateOrUpdateEmailer { + /// The friendly name for the emailer + name: String, + /// Whether the emailer is the primary emailer + is_primary: bool, + /// The emailer configuration + config: EmailerConfigInput, +} + +/// Create a new emailer +#[utoipa::path( + post, + path = "/api/v1/emailers", + tag = "emailer", + request_body = CreateOrUpdateEmailer, + responses( + (status = 200, description = "Successfully created emailer"), + (status = 400, description = "Bad request"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 500, description = "Internal server error") + ) +)] +async fn create_emailer( + State(ctx): State, + session: Session, + Json(payload): Json, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerCreate])?; + + let client = &ctx.db; + + let config = EmailerConfig::from_client_config(payload.config, &ctx).await?; + let emailer = client + .emailer() + .create( + payload.name, + config.sender_email, + config.sender_display_name, + config.username, + config.encrypted_password, + config.smtp_host.to_string(), + config.smtp_port.into(), + vec![ + emailer::is_primary::set(payload.is_primary), + emailer::max_attachment_size_bytes::set(config.max_attachment_size_bytes), + ], + ) + .exec() + .await?; + Ok(Json(SMTPEmailer::try_from(emailer)?)) +} + +/// Update an existing emailer by ID +#[utoipa::path( + put, + path = "/api/v1/emailers/:id", + tag = "emailer", + request_body = CreateOrUpdateEmailer, + params( + ("id" = i32, Path, description = "The id of the emailer to update") + ), + responses( + (status = 200, description = "Successfully updated emailer"), + (status = 400, description = "Bad request"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn update_emailer( + State(ctx): State, + Path(id): Path, + session: Session, + Json(payload): Json, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerManage])?; + + let client = &ctx.db; + let config = EmailerConfig::from_client_config(payload.config, &ctx).await?; + let updated_emailer = client + .emailer() + .update( + emailer::id::equals(id), + vec![ + emailer::name::set(payload.name), + emailer::sender_email::set(config.sender_email), + emailer::sender_display_name::set(config.sender_display_name), + emailer::username::set(config.username), + emailer::encrypted_password::set(config.encrypted_password), + emailer::smtp_host::set(config.smtp_host.to_string()), + emailer::smtp_port::set(config.smtp_port.into()), + emailer::max_attachment_size_bytes::set(config.max_attachment_size_bytes), + ], + ) + .exec() + .await?; + Ok(Json(SMTPEmailer::try_from(updated_emailer)?)) +} + +// #[derive(Deserialize, ToSchema, Type)] +// pub struct PatchEmailer {} + +// #[utoipa::path( +// patch, +// path = "/api/v1/emailers/:id/", +// tag = "emailer", +// params( +// ("id" = i32, Path, description = "The ID of the emailer") +// ), +// responses( +// (status = 200, description = "Successfully updated emailer"), +// (status = 401, description = "Unauthorized"), +// (status = 403, description = "Forbidden"), +// (status = 404, description = "Notifier not found"), +// (status = 500, description = "Internal server error"), +// ) +// )] +// async fn patch_emailer( +// State(ctx): State, +// Path(id): Path, +// session: Session, +// Json(payload): Json, +// ) -> APIResult> { +// // enforce_session_permissions(&session, &[UserPermission::ManageNotifier])?; + +// let client = &ctx.db; + +// unimplemented!() + +// // Ok(Json(SMTPEmailer::try_from(patched_emailer)?)) +// } + +/// Delete an emailer by ID +#[utoipa::path( + delete, + path = "/api/v1/emailers/:id/", + tag = "emailer", + params( + ("id" = i32, Path, description = "The emailer ID"), + ), + responses( + (status = 200, description = "Successfully deleted emailer"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Notifier not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn delete_emailer( + State(ctx): State, + Path(id): Path, + session: Session, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerManage])?; + + let client = &ctx.db; + + let deleted_emailer = client + .emailer() + .delete(emailer::id::equals(id)) + .exec() + .await?; + + Ok(Json(SMTPEmailer::try_from(deleted_emailer)?)) +} + +#[derive(Debug, Deserialize, ToSchema, Type)] +pub struct EmailerSendRecordIncludeParams { + #[serde(default)] + include_sent_by: bool, +} + +#[utoipa::path( + get, + path = "/api/v1/emailers/:id/send-history", + tag = "emailer", + params( + ("id" = i32, Path, description = "The ID of the emailer") + ), + responses( + (status = 200, description = "Successfully retrieved emailer send history"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 500, description = "Internal server error") + ) +)] +async fn get_emailer_send_history( + State(ctx): State, + Path(emailer_id): Path, + QsQuery(include_params): QsQuery, + session: Session, +) -> APIResult>> { + tracing::trace!(?emailer_id, ?include_params, "get_emailer_send_history"); + enforce_session_permissions(&session, &[UserPermission::EmailerRead])?; + + let client = &ctx.db; + + let mut query = client + .emailer_send_record() + .find_many(vec![emailer_send_record::emailer_id::equals(emailer_id)]); + + if include_params.include_sent_by { + query = query.with(emailer_send_record::sent_by::fetch()); + } + + let history = query + .order_by(emailer_send_record::sent_at::order(Direction::Desc)) + .exec() + .await?; + + Ok(Json( + history + .into_iter() + .map(EmailerSendRecord::try_from) + .collect::>>() + .into_iter() + .collect::, _>>()?, + )) +} + +#[derive(Deserialize, ToSchema, Type)] +pub struct SendAttachmentEmailsPayload { + media_ids: Vec, + send_to: Vec, +} + +#[derive(Serialize, ToSchema, Type)] +pub struct SendAttachmentEmailResponse { + sent_emails_count: i32, + errors: Vec, +} + +async fn get_and_validate_recipients( + user: &User, + client: &PrismaClient, + send_to: &[EmailerSendTo], +) -> APIResult> { + let mut recipients = Vec::new(); + for to in send_to { + let recipient = match to { + EmailerSendTo::Device { device_id } => { + let device = client + .registered_email_device() + .find_first(vec![registered_email_device::id::equals(*device_id)]) + .exec() + .await? + .ok_or(APIError::NotFound("Device not found".to_string()))?; + device.email + }, + EmailerSendTo::Anonymous { email } => email.clone(), + }; + recipients.push(recipient); + } + + let forbidden_devices = client + .registered_email_device() + .find_many(vec![registered_email_device::forbidden::equals(true)]) + .exec() + .await?; + let forbidden_recipients = recipients + .iter() + .filter(|r| forbidden_devices.iter().any(|d| d.email == **r)) + .cloned() + .collect::>(); + let has_forbidden_recipients = !forbidden_recipients.is_empty(); + + if has_forbidden_recipients { + tracing::error!( + ?user, + ?forbidden_recipients, + "User attempted to send an email to unauthorized recipient(s)!" + ); + return Err(APIError::forbidden_discreet()); + } + + Ok(recipients) +} + +async fn send_attachment_email( + State(ctx): State, + session: Session, + Json(payload): Json, +) -> APIResult> { + let by_user = enforce_session_permissions( + &session, + &chain_optional_iter( + [UserPermission::EmailSend], + [payload + .send_to + .iter() + .any(|to| matches!(to, EmailerSendTo::Anonymous { .. })) + .then_some(UserPermission::EmailArbitrarySend)], + ), + )?; + + let client = &ctx.db; + + let emailer = client + .emailer() + .find_first(vec![emailer::is_primary::equals(true)]) + .exec() + .await? + .ok_or(APIError::NotFound("Primary emailer not found".to_string()))?; + let emailer = SMTPEmailer::try_from(emailer)?; + let emailer_id = emailer.id; + let max_attachment_size_bytes = emailer.config.max_attachment_size_bytes; + + let expected_books_len = payload.media_ids.len(); + let books = client + .media() + .find_many(apply_media_filters_for_user( + MediaFilter::ids(payload.media_ids), + &by_user, + )) + .exec() + .await? + .into_iter() + .map(Media::from) + .collect::>(); + + if books.len() != expected_books_len { + tracing::error!(?books, ?expected_books_len, "Some media IDs were not found"); + return Err(APIError::BadRequest( + "Some media IDs were not found".to_string(), + )); + } + + let (tx, tx_client) = client._transaction().begin().await?; + let recipients = + match get_and_validate_recipients(&by_user, &tx_client, &payload.send_to).await { + Ok(r) => { + tx.commit(tx_client).await?; + r + }, + Err(e) => { + tx.rollback(tx_client).await?; + return Err(e); + }, + }; + + let emailer_client = emailer.into_client(&ctx).await?; + let mut record_creates = + Vec::<(i32, String, Vec)>::new(); + let mut errors = Vec::new(); + + // TODO: Refactor this to chunk the books and send them in batches according to + // the max attachments per email limit + + for book in books { + let FileParts { + file_name, + extension, + .. + } = PathBuf::from(&book.path).file_parts(); + let content = read_entire_file(book.path)?; + + // TODO: should error? + match (content.len(), max_attachment_size_bytes) { + (_, Some(max_size)) if content.len() as i32 > max_size => { + tracing::warn!("Attachment too large: {} > {}", content.len(), max_size); + continue; + }, + (_, _) if content.len() < 5 => { + tracing::warn!("Attachment too small: {} < 5", content.len()); + continue; + }, + _ => {}, + } + + let content_type = + ContentType::from_bytes_with_fallback(&content[..5], &extension) + .mime_type() + .parse::() + .map_err(|_| { + APIError::InternalServerError( + "Failed to parse content type".to_string(), + ) + })?; + + let attachment_meta = AttachmentMeta::new( + file_name.clone(), + Some(book.id.clone()), + content.len() as i32, + ) + .into_data() + .map_or_else( + |e| { + tracing::error!(?e, "Failed to serialize attachment meta"); + None + }, + Some, + ); + + for recipient in recipients.iter() { + let send_result = emailer_client + .send_attachment( + "Attachment from Stump", + recipient, + AttachmentPayload { + name: file_name.clone(), + content: content.clone(), + content_type: content_type.clone(), + }, + ) + .await; + + match send_result { + Ok(_) => { + record_creates.push(( + emailer_id, + recipient.clone(), + vec![ + emailer_send_record::sent_by::connect(user::id::equals( + by_user.id.clone(), + )), + emailer_send_record::attachment_meta::set( + attachment_meta.clone(), + ), + ], + )); + }, + Err(e) => { + tracing::error!(?e, "Failed to send email"); + errors.push(format!( + "Failed to send {} to {}: {}", + file_name, recipient, e + )); + continue; + }, + } + } + } + + let sent_emails_count = record_creates.len(); + // Note: create_many threw a strange error... + let audit_result = client + ._batch(record_creates.into_iter().map(|(eid, recipient, params)| { + client.emailer_send_record().create( + emailer::id::equals(eid), + recipient, + params, + ) + })) + .await; + if let Err(error) = audit_result { + tracing::error!(?error, "Failed to create emailer send records!"); + errors.push(format!("Failed to create emailer send records: {}", error)); + } + + let updated_emailer_result = client + .emailer() + .update( + emailer::id::equals(emailer_id), + vec![emailer::last_used_at::set(Some(Utc::now().into()))], + ) + .exec() + .await; + if let Err(error) = updated_emailer_result { + tracing::error!(?error, "Failed to update emailer last used at!"); + errors.push(format!("Failed to update emailer last used at: {}", error)); + } + + Ok(Json(SendAttachmentEmailResponse { + sent_emails_count: sent_emails_count as i32, + errors, + })) +} + +/// Get all email devices on the server +#[utoipa::path( + get, + path = "/api/v1/email-devices", + tag = "email-devices", + responses( + (status = 200, description = "Successfully retrieved email devices"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 500, description = "Internal server error") + ) +)] +async fn get_email_devices( + State(ctx): State, + session: Session, +) -> APIResult>> { + enforce_session_permissions(&session, &[UserPermission::EmailSend])?; + + let client = &ctx.db; + + let devices = client + .registered_email_device() + .find_many(vec![]) + .exec() + .await?; + + Ok(Json( + devices + .into_iter() + .map(RegisteredEmailDevice::from) + .collect(), + )) +} + +/// Get an email device by its ID +#[utoipa::path( + get, + path = "/api/v1/email-devices/:id", + tag = "email-devices", + params( + ("id" = i32, Path, description = "The ID of the email device") + ), + responses( + (status = 200, description = "Successfully retrieved email device"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Device not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn get_email_device_by_id( + State(ctx): State, + Path(id): Path, + session: Session, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailSend])?; + + let client = &ctx.db; + + let device = client + .registered_email_device() + .find_unique(registered_email_device::id::equals(id)) + .exec() + .await? + .ok_or(APIError::NotFound("Device not found".to_string()))?; + + Ok(Json(RegisteredEmailDevice::from(device))) +} + +/// Input object for creating or updating an email device +#[derive(Deserialize, ToSchema, Type)] +pub struct CreateOrUpdateEmailDevice { + /// The friendly name of the email device, e.g. "Aaron's Kobo" + name: String, + /// The email address of the device + email: String, + /// Whether the device is forbidden from receiving emails from the server. + forbidden: bool, +} + +/// Create a new email device +#[utoipa::path( + post, + path = "/api/v1/email-devices", + tag = "email-devices", + responses( + (status = 200, description = "Successfully created email device"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 500, description = "Internal server error") + ) +)] +async fn create_email_device( + State(ctx): State, + session: Session, + Json(payload): Json, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerManage])?; + + let client = &ctx.db; + + let device = client + .registered_email_device() + .create( + payload.name, + payload.email, + vec![registered_email_device::forbidden::set(payload.forbidden)], + ) + .exec() + .await?; + + Ok(Json(RegisteredEmailDevice::from(device))) +} + +/// Update an existing email device by its ID +#[utoipa::path( + put, + path = "/api/v1/email-devices/:id", + tag = "email-devices", + params( + ("id" = i32, Path, description = "The ID of the email device") + ), + responses( + (status = 200, description = "Successfully updated email device"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Device not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn update_email_device( + State(ctx): State, + Path(id): Path, + session: Session, + Json(payload): Json, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerManage])?; + + let client = &ctx.db; + + let device = client + .registered_email_device() + .update( + registered_email_device::id::equals(id), + vec![ + registered_email_device::name::set(payload.name), + registered_email_device::email::set(payload.email), + registered_email_device::forbidden::set(payload.forbidden), + ], + ) + .exec() + .await?; + + Ok(Json(RegisteredEmailDevice::from(device))) +} + +/// Patch an existing email device by its ID +#[derive(Deserialize, ToSchema, Type)] +pub struct PatchEmailDevice { + /// The friendly name of the email device, e.g. "Aaron's Kobo" + pub name: Option, + /// The email address of the device + pub email: Option, + /// Whether the device is forbidden from receiving emails from the server. + pub forbidden: Option, +} + +#[utoipa::path( + patch, + path = "/api/v1/email-devices/:id", + tag = "email-devices", + params( + ("id" = i32, Path, description = "The ID of the email device") + ), + responses( + (status = 200, description = "Successfully patched email device"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Device not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn patch_email_device( + State(ctx): State, + Path(id): Path, + session: Session, + Json(payload): Json, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerManage])?; + + let client = &ctx.db; + + let device = client + .registered_email_device() + .update( + registered_email_device::id::equals(id), + chain_optional_iter( + [], + [ + payload.name.map(registered_email_device::name::set), + payload.email.map(registered_email_device::email::set), + payload + .forbidden + .map(registered_email_device::forbidden::set), + ], + ), + ) + .exec() + .await?; + + Ok(Json(RegisteredEmailDevice::from(device))) +} + +/// Delete an email device by its ID +#[utoipa::path( + delete, + path = "/api/v1/email-devices/:id", + tag = "email-devices", + params( + ("id" = i32, Path, description = "The ID of the email device") + ), + responses( + (status = 200, description = "Successfully deleted email device"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Device not found"), + (status = 500, description = "Internal server error") + ) +)] +async fn delete_email_device( + State(ctx): State, + Path(id): Path, + session: Session, +) -> APIResult> { + enforce_session_permissions(&session, &[UserPermission::EmailerManage])?; + + let client = &ctx.db; + + let device = client + .registered_email_device() + .delete(registered_email_device::id::equals(id)) + .exec() + .await?; + + Ok(Json(RegisteredEmailDevice::from(device))) +} diff --git a/apps/server/src/routers/api/v1/library.rs b/apps/server/src/routers/api/v1/library.rs index 23bf40a45..cf6fa1d70 100644 --- a/apps/server/src/routers/api/v1/library.rs +++ b/apps/server/src/routers/api/v1/library.rs @@ -25,6 +25,7 @@ use stump_core::{ PrismaCountTrait, }, filesystem::{ + analyze_media_job::{AnalyzeMediaJob, AnalyzeMediaJobVariant}, get_unknown_thumnail, image::{ self, generate_thumbnail, place_thumbnail, remove_thumbnails, @@ -96,6 +97,7 @@ pub(crate) fn mount(app_state: AppState) -> Router { .route("/clean", put(clean_library)) .route("/series", get(get_library_series)) .route("/media", get(get_library_media)) + .route("/analyze", post(start_media_analysis)) .nest( "/thumbnail", Router::new() @@ -1335,7 +1337,7 @@ async fn create_library( ))); } - // TODO: refactor once nested create is supported + // TODO(prisma 0.7.0): Nested create // https://github.com/Brendonovich/prisma-client-rust/issues/44 let library_options_arg = input.library_options.unwrap_or_default(); let transaction_result: Result = db @@ -1726,3 +1728,38 @@ async fn get_library_stats( Ok(Json(stats)) } + +#[utoipa::path( + post, + path = "/api/v1/libraries/:id/analyze", + tag = "library", + params( + ("id" = String, Path, description = "The ID of the library to analyze") + ), + responses( + (status = 200, description = "Successfully started library media analysis"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Library not found"), + (status = 500, description = "Internal server error"), + ) +)] +async fn start_media_analysis( + Path(id): Path, + State(ctx): State, + session: Session, +) -> APIResult<()> { + let _ = enforce_session_permissions(&session, &[UserPermission::ManageLibrary])?; + + // Start analysis job + ctx.enqueue_job(AnalyzeMediaJob::new( + AnalyzeMediaJobVariant::AnalyzeLibrary(id), + )) + .map_err(|e| { + let err = "Failed to enqueue analyze library media job"; + error!(?e, err); + APIError::InternalServerError(err.to_string()) + })?; + + APIResult::Ok(()) +} diff --git a/apps/server/src/routers/api/v1/log.rs b/apps/server/src/routers/api/v1/log.rs index 7e182e555..4f19cef45 100644 --- a/apps/server/src/routers/api/v1/log.rs +++ b/apps/server/src/routers/api/v1/log.rs @@ -33,7 +33,7 @@ use crate::{ pub(crate) fn mount(app_state: AppState) -> Router { Router::new() - .route("/logs", get(get_logs)) + .route("/logs", get(get_logs).delete(delete_logs)) .nest( "/logs/file", Router::new() @@ -141,6 +141,32 @@ async fn get_logs( Ok(Json(Pageable::from(logs))) } +#[utoipa::path( + delete, + path = "/api/v1/logs", + tag = "log", + responses( + (status = 200, description = "Successfully deleted logs."), + (status = 401, description = "Unauthorized."), + (status = 403, description = "Forbidden."), + (status = 500, description = "Internal server error."), + ) +)] +async fn delete_logs( + State(ctx): State, + filters: QsQuery, + session: Session, +) -> APIResult<()> { + enforce_session_permissions(&session, &[UserPermission::ManageServer])?; + + let where_params = apply_log_filters(filters.0); + + let affected_records = ctx.db.log().delete_many(where_params).exec().await?; + tracing::debug!(affected_records, "Deleted logs"); + + Ok(()) +} + async fn tail_log_file( State(ctx): State, session: Session, diff --git a/apps/server/src/routers/api/v1/media.rs b/apps/server/src/routers/api/v1/media.rs index 66b72b8e4..3680e37ce 100644 --- a/apps/server/src/routers/api/v1/media.rs +++ b/apps/server/src/routers/api/v1/media.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use axum::{ extract::{DefaultBodyLimit, Multipart, Path, State}, middleware::from_extractor_with_state, - routing::{get, put}, + routing::{get, post, put}, Json, Router, }; use axum_extra::extract::Query; @@ -23,6 +23,7 @@ use stump_core::{ CountQueryReturn, }, filesystem::{ + analyze_media_job::{AnalyzeMediaJob, AnalyzeMediaJobVariant}, get_unknown_thumnail, image::{ generate_thumbnail, place_thumbnail, remove_thumbnails, ImageFormat, @@ -38,6 +39,7 @@ use stump_core::{ }, }; use tower_sessions::Session; +use tracing::error; use utoipa::ToSchema; use crate::{ @@ -81,6 +83,7 @@ pub(crate) fn mount(app_state: AppState) -> Router { // TODO: configurable max file size .layer(DefaultBodyLimit::max(20 * 1024 * 1024)), // 20MB ) + .route("/analyze", post(start_media_analysis)) .route("/page/:page", get(get_media_page)) .route( "/progress", @@ -1194,12 +1197,12 @@ async fn replace_media_thumbnail( // Note: I chose to *safely* attempt the removal as to not block the upload, however after some // user testing I'd like to see if this becomes a problem. We'll see! - match remove_thumbnails(&[book_id.clone()], ctx.config.get_thumbnails_dir()) { - Ok(count) => tracing::info!("Removed {} thumbnails!", count), - Err(e) => tracing::error!( + if let Err(e) = remove_thumbnails(&[book_id.clone()], ctx.config.get_thumbnails_dir()) + { + tracing::error!( ?e, "Failed to remove existing media thumbnail before replacing!" - ), + ); } let path_buf = place_thumbnail(&book_id, ext, &bytes, &ctx.config)?; @@ -1550,3 +1553,38 @@ async fn put_media_complete_status( completed_at: updated_or_created_rp.completed_at.map(|ca| ca.to_rfc3339()), })) } + +#[utoipa::path( + post, + path = "/api/v1/media/:id/analyze", + tag = "media", + params( + ("id" = String, Path, description = "The ID of the media to analyze") + ), + responses( + (status = 200, description = "Successfully started media analysis"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Media not found"), + (status = 500, description = "Internal server error"), + ) +)] +async fn start_media_analysis( + Path(id): Path, + State(ctx): State, + session: Session, +) -> APIResult<()> { + let _ = enforce_session_permissions(&session, &[UserPermission::ManageLibrary])?; + + // Start analysis job + ctx.enqueue_job(AnalyzeMediaJob::new( + AnalyzeMediaJobVariant::AnalyzeSingleItem(id), + )) + .map_err(|e| { + let err = "Failed to enqueue analyze media job"; + error!(?e, err); + APIError::InternalServerError(err.to_string()) + })?; + + APIResult::Ok(()) +} diff --git a/apps/server/src/routers/api/v1/mod.rs b/apps/server/src/routers/api/v1/mod.rs index 46be6710a..1d89c590f 100644 --- a/apps/server/src/routers/api/v1/mod.rs +++ b/apps/server/src/routers/api/v1/mod.rs @@ -15,6 +15,7 @@ use crate::{ pub(crate) mod auth; pub(crate) mod book_club; +pub(crate) mod emailer; pub(crate) mod epub; pub(crate) mod filesystem; pub(crate) mod job; @@ -33,6 +34,7 @@ pub(crate) fn mount(app_state: AppState) -> Router { Router::new() .merge(auth::mount()) .merge(epub::mount(app_state.clone())) + .merge(emailer::mount(app_state.clone())) .merge(library::mount(app_state.clone())) .merge(media::mount(app_state.clone())) .merge(metadata::mount(app_state.clone())) diff --git a/apps/server/src/routers/api/v1/notifier.rs b/apps/server/src/routers/api/v1/notifier.rs index 1776fe78a..ccf8d3667 100644 --- a/apps/server/src/routers/api/v1/notifier.rs +++ b/apps/server/src/routers/api/v1/notifier.rs @@ -7,7 +7,7 @@ use axum::{ use serde::Deserialize; use specta::Type; use stump_core::{ - db::entity::{Notifier, NotifierConfig, NotifierType, UserPermission}, + db::entity::{Notifier, NotifierConfigInput, NotifierType, UserPermission}, prisma::notifier, }; use tower_sessions::Session; @@ -108,7 +108,7 @@ async fn get_notifier_by_id( pub struct CreateOrUpdateNotifier { #[serde(rename = "type")] _type: NotifierType, - config: NotifierConfig, + config: NotifierConfigInput, } #[utoipa::path( @@ -132,14 +132,10 @@ async fn create_notifier( enforce_session_permissions(&session, &[UserPermission::CreateNotifier])?; let client = &ctx.db; - + let config = payload.config.into_config(&ctx).await?.into_bytes()?; let notifier = client .notifier() - .create( - payload._type.to_string(), - payload.config.into_bytes()?, - vec![], - ) + .create(payload._type.to_string(), config, vec![]) .exec() .await?; @@ -172,13 +168,14 @@ async fn update_notifier( enforce_session_permissions(&session, &[UserPermission::ManageNotifier])?; let client = &ctx.db; + let config = payload.config.into_config(&ctx).await?.into_bytes()?; let notifier = client .notifier() .update( notifier::id::equals(id), vec![ notifier::r#type::set(payload._type.to_string()), - notifier::config::set(payload.config.into_bytes()?), + notifier::config::set(config), ], ) .exec() @@ -191,7 +188,7 @@ async fn update_notifier( pub struct PatchNotifier { #[serde(rename = "type")] _type: Option, - config: Option, + config: Option, } #[utoipa::path( @@ -219,10 +216,11 @@ async fn patch_notifier( let client = &ctx.db; - let config = payload - .config - .map(|config| config.into_bytes()) - .transpose()?; + let config = if let Some(config) = payload.config { + Some(config.into_config(&ctx).await?.into_bytes()?) + } else { + None + }; let patched_notifier = client .notifier() diff --git a/apps/server/src/routers/api/v1/series.rs b/apps/server/src/routers/api/v1/series.rs index 915537778..38402bc26 100644 --- a/apps/server/src/routers/api/v1/series.rs +++ b/apps/server/src/routers/api/v1/series.rs @@ -19,6 +19,7 @@ use stump_core::{ PrismaCountTrait, SeriesDAO, DAO, }, filesystem::{ + analyze_media_job::{AnalyzeMediaJob, AnalyzeMediaJobVariant}, get_unknown_thumnail, image::{ generate_thumbnail, place_thumbnail, remove_thumbnails, ImageFormat, @@ -74,6 +75,7 @@ pub(crate) fn mount(app_state: AppState) -> Router { .route("/", get(get_series_by_id)) .route("/scan", post(scan_series)) .route("/media", get(get_series_media)) + .route("/analyze", post(start_media_analysis)) .route("/media/next", get(get_next_in_series)) .route( "/thumbnail", @@ -1059,3 +1061,38 @@ async fn get_series_is_complete( async fn put_series_is_complete() -> APIResult> { Err(APIError::NotImplemented) } + +#[utoipa::path( + post, + path = "/api/v1/series/:id/analyze", + tag = "series", + params( + ("id" = String, Path, description = "The ID of the series to analyze") + ), + responses( + (status = 200, description = "Successfully started series media analysis"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden"), + (status = 404, description = "Series not found"), + (status = 500, description = "Internal server error"), + ) +)] +async fn start_media_analysis( + Path(id): Path, + State(ctx): State, + session: Session, +) -> APIResult<()> { + let _ = enforce_session_permissions(&session, &[UserPermission::ManageLibrary])?; + + // Start analysis job + ctx.enqueue_job(AnalyzeMediaJob::new(AnalyzeMediaJobVariant::AnalyzeSeries( + id, + ))) + .map_err(|e| { + let err = "Failed to enqueue analyze series media job"; + error!(?e, err); + APIError::InternalServerError(err.to_string()) + })?; + + APIResult::Ok(()) +} diff --git a/apps/server/src/routers/mod.rs b/apps/server/src/routers/mod.rs index f631ee4ea..511890967 100644 --- a/apps/server/src/routers/mod.rs +++ b/apps/server/src/routers/mod.rs @@ -9,6 +9,8 @@ mod sse; mod utoipa; mod ws; +pub(crate) use api::v1::auth::enforce_max_sessions; + pub(crate) fn mount(app_state: AppState) -> Router { let mut app_router = Router::new(); diff --git a/apps/server/src/routers/opds/mod.rs b/apps/server/src/routers/opds/mod.rs new file mode 100644 index 000000000..f40977471 --- /dev/null +++ b/apps/server/src/routers/opds/mod.rs @@ -0,0 +1,9 @@ +use axum::Router; + +use crate::config::state::AppState; + +pub(crate) mod v1_2; + +pub(crate) fn mount(app_state: AppState) -> Router { + Router::new().nest("/opds", Router::new().merge(v1_2::mount(app_state))) +} diff --git a/apps/server/src/routers/opds.rs b/apps/server/src/routers/opds/v1_2.rs similarity index 99% rename from apps/server/src/routers/opds.rs rename to apps/server/src/routers/opds/v1_2.rs index d06b6553a..031c2fc4b 100644 --- a/apps/server/src/routers/opds.rs +++ b/apps/server/src/routers/opds/v1_2.rs @@ -12,7 +12,7 @@ use stump_core::{ media::get_page, ContentType, }, - opds::{ + opds::v1_2::{ entry::OpdsEntry, feed::OpdsFeed, link::{OpdsLink, OpdsLinkRel, OpdsLinkType}, @@ -33,7 +33,7 @@ use crate::{ }, }; -use super::api::v1::{ +use crate::routers::api::v1::{ media::{apply_in_progress_filter_for_user, apply_media_age_restriction}, series::apply_series_age_restriction, }; @@ -41,7 +41,7 @@ use super::api::v1::{ pub(crate) fn mount(app_state: AppState) -> Router { Router::new() .nest( - "/opds/v1.2", + "/v1.2", Router::new() .route("/catalog", get(catalog)) .route("/keep-reading", get(keep_reading)) diff --git a/apps/server/src/routers/utoipa.rs b/apps/server/src/routers/utoipa.rs index 4a032ca80..2a27fd6de 100644 --- a/apps/server/src/routers/utoipa.rs +++ b/apps/server/src/routers/utoipa.rs @@ -62,6 +62,8 @@ use super::api::{ api::v1::library::create_library, api::v1::library::update_library, api::v1::library::delete_library, + api::v1::log::get_logs, + api::v1::log::delete_logs, api::v1::media::get_media, api::v1::media::get_duplicate_media, api::v1::media::get_in_progress_media, @@ -145,7 +147,7 @@ use super::api::{ CreateTags, CleanLibraryResponse, MediaIsComplete, SeriesIsComplete, PutMediaCompletionStatus, SmartList, SmartListMeta, SmartListItems, SmartListView, CreateOrUpdateSmartList, CreateOrUpdateSmartListView, SmartListItemGrouping, SmartFilter, FilterJoin, EntityVisibility, - SmartListViewConfig, SmartListTableColumnSelection, SmartListTableSortingState, + SmartListViewConfig, ReactTableColumnSort, ReactTableGlobalSort, MediaSmartFilter, MediaMetadataSmartFilter, SeriesSmartFilter, SeriesMetadataSmartFilter, LibrarySmartFilter, Notifier, CreateOrUpdateNotifier, PatchNotifier ) diff --git a/apps/web/package.json b/apps/web/package.json index 6415179c2..db22771c1 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -1,6 +1,6 @@ { "name": "@stump/web", - "version": "0.0.3", + "version": "0.0.4", "description": "", "license": "MIT", "scripts": { @@ -14,8 +14,8 @@ "@stump/browser": "*", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router": "^6.22.3", - "react-router-dom": "^6.22.3" + "react-router": "^6.23.0", + "react-router-dom": "^6.23.0" }, "devDependencies": { "@types/react": "^18.2.78", diff --git a/apps/web/src/App.tsx b/apps/web/src/App.tsx index 71d3eba68..187502d66 100644 --- a/apps/web/src/App.tsx +++ b/apps/web/src/App.tsx @@ -5,7 +5,7 @@ const getDebugUrl = () => { return `http://${hostname}:10801` } -export const baseUrl = import.meta.env.PROD ? window.location.href : getDebugUrl() +export const baseUrl = import.meta.env.PROD ? window.location.origin : getDebugUrl() export default function App() { return diff --git a/core/Cargo.toml b/core/Cargo.toml index 6e5532327..d511c1406 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -4,52 +4,47 @@ version = { workspace = true } edition = "2021" [dependencies] -tokio = { workspace = true } -serde = { workspace = true } -prisma-client-rust = { workspace = true } -specta = { workspace = true } - -### Async Utils ### -rayon = "1.8.0" -futures = { workspace = true } -async-trait = { workspace = true } +alphanumeric-sort = "1.5.3" async-channel = "2.1.0" - -### Filesystem Utils ### -walkdir = "2.4.0" -globset = "0.4.14" +async-trait = { workspace = true } +cuid = "1.3.2" +data-encoding = "2.5.0" dirs = "5.0.1" +email = { path = "../crates/email" } +epub = { git = "https://github.com/stumpapp/epub-rs", rev = "38e091abe96875952556ab7dec195022d0230e14" } +futures = { workspace = true } +globset = "0.4.14" +image = "0.24.7" +infer = "0.15.0" +itertools = "0.12.1" +prisma-client-rust = { workspace = true } +rand = { workspace = true } +serde = { workspace = true } +serde-xml-rs = "0.6.0" # Support for XML serialization/deserialization +serde_json = { workspace = true } +simple_crypt = { workspace = true } +specta = { workspace = true } +tokio = { workspace = true } toml = "0.8.8" trash = "3.1.2" -infer = "0.15.0" -image = "0.24.7" -webp = "0.2.6" -zip = "0.6.6" -epub = { git = "https://github.com/stumpapp/epub-rs", rev = "38e091abe96875952556ab7dec195022d0230e14" } -unrar = { version = "0.5.2" } # pdf = "0.8.1" pdf = { git = "https://github.com/pdf-rs/pdf", rev = "3bc9e636d31b1846e51b58c7429914e640866f53" } # TODO: revert back to crates.io once fix(es) release pdfium-render = "0.8.16" -data-encoding = "2.5.0" +rayon = "1.8.0" +regex = "1.10.4" ring = "0.17.8" - -### Errors and Logging ### thiserror = { workspace = true } tracing = { workspace = true } tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } tracing-appender = "0.2.3" - -### Misc Utils ### +unrar = { version = "0.5.2" } urlencoding = { workspace = true } -cuid = "1.3.2" -xml-rs = "0.8.20" # XML reader/writer -serde-xml-rs = "0.6.0" # Support for XML serialization/deserialization -serde_json = { workspace = true } -itertools = "0.12.1" utoipa = { version = "3.5.0" } uuid = "1.8.0" -regex = "1.10.4" -alphanumeric-sort = "1.5.3" +walkdir = "2.4.0" +webp = "0.2.6" +xml-rs = "0.8.20" # XML reader/writer +zip = "2.1.3" [dev-dependencies] temp-env = "0.3.6" diff --git a/core/integration-tests/data/example.webp b/core/integration-tests/data/example.webp new file mode 100644 index 000000000..122741b60 Binary files /dev/null and b/core/integration-tests/data/example.webp differ diff --git a/core/integration-tests/data/nested-macos-compressed.cbz b/core/integration-tests/data/nested-macos-compressed.cbz new file mode 100644 index 000000000..f9521ce3a Binary files /dev/null and b/core/integration-tests/data/nested-macos-compressed.cbz differ diff --git a/core/integration-tests/data/rust_book.pdf b/core/integration-tests/data/rust_book.pdf new file mode 100644 index 000000000..7eec2bb92 Binary files /dev/null and b/core/integration-tests/data/rust_book.pdf differ diff --git a/core/integration-tests/data/science_comics_001.cbz b/core/integration-tests/data/science_comics_001.cbz new file mode 100644 index 000000000..3a719f93d Binary files /dev/null and b/core/integration-tests/data/science_comics_001.cbz differ diff --git a/core/prisma/migrations/20240412235240_emailer_and_encryption/migration.sql b/core/prisma/migrations/20240412235240_emailer_and_encryption/migration.sql new file mode 100644 index 000000000..a11777696 --- /dev/null +++ b/core/prisma/migrations/20240412235240_emailer_and_encryption/migration.sql @@ -0,0 +1,45 @@ +-- AlterTable +ALTER TABLE "server_config" ADD COLUMN "encryption_key" TEXT; + +-- CreateTable +CREATE TABLE "registered_email_devices" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "name" TEXT NOT NULL, + "email" TEXT NOT NULL, + "forbidden" BOOLEAN NOT NULL DEFAULT false +); + +-- CreateTable +CREATE TABLE "emailer_send_records" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "emailer_id" INTEGER NOT NULL, + "recipient_email" TEXT NOT NULL, + "attachment_meta" BLOB, + "sent_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "sent_by_user_id" TEXT, + CONSTRAINT "emailer_send_records_emailer_id_fkey" FOREIGN KEY ("emailer_id") REFERENCES "emailers" ("id") ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT "emailer_send_records_sent_by_user_id_fkey" FOREIGN KEY ("sent_by_user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE +); + +-- CreateTable +CREATE TABLE "emailers" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "name" TEXT NOT NULL, + "is_primary" BOOLEAN NOT NULL DEFAULT false, + "sender_email" TEXT NOT NULL, + "sender_display_name" TEXT NOT NULL, + "username" TEXT NOT NULL, + "encrypted_password" TEXT NOT NULL, + "smtp_host" TEXT NOT NULL, + "smtp_port" INTEGER NOT NULL, + "tls_enabled" BOOLEAN NOT NULL DEFAULT false, + "max_attachment_size_bytes" INTEGER, + "max_num_attachments" INTEGER, + "last_used_at" DATETIME +); + +-- CreateIndex +CREATE UNIQUE INDEX "registered_email_devices_name_key" ON "registered_email_devices"("name"); + +-- CreateIndex +CREATE UNIQUE INDEX "emailers_name_key" ON "emailers"("name"); diff --git a/core/prisma/schema.prisma b/core/prisma/schema.prisma index 4aa104e36..7f068471b 100644 --- a/core/prisma/schema.prisma +++ b/core/prisma/schema.prisma @@ -43,6 +43,7 @@ model User { library_visits LastLibraryVisit[] smart_lists SmartList[] smart_list_access_rules SmartListAccessRule[] + email_usage_history EmailerSendRecord[] @@map("users") } @@ -207,7 +208,7 @@ model Media { // TODO: determine what is optional and what is safe to make required model MediaMetadata { - // TODO: remove this once nested create is supported, just use media_id + // TODO(prisma 0.7.0): Nested create id String @id @default(cuid()) title String? @@ -694,7 +695,7 @@ model UserPreferences { model JobScheduleConfig { id String @id @default(cuid()) - // The interval (in seconds) in which to run the scheduled confu + // The interval (in seconds) in which to run the scheduled configuration interval_secs Int @default(86400) // The libraries to exclude from scheduled scans, if any @@ -708,13 +709,61 @@ model JobScheduleConfig { model Notifier { id Int @id @default(autoincrement()) - type String //DISCORD | TELEGRAM - config Bytes //too many variants to support concrete type + type String // DISCORD | TELEGRAM + config Bytes // There will be too many variants to support concrete type(s) @@map("notifiers") } -// TODO: notifier support +model RegisteredEmailDevice { + id Int @id @default(autoincrement()) + + name String @unique + email String + forbidden Boolean @default(false) + + @@map("registered_email_devices") +} + +model EmailerSendRecord { + id Int @id @default(autoincrement()) + + emailer_id Int + emailer Emailer @relation(fields: [emailer_id], references: [id], onDelete: Cascade) + + recipient_email String + attachment_meta Bytes? // { name: "...", size: ... } + sent_at DateTime @default(now()) + + sent_by_user_id String? + sent_by User? @relation(fields: [sent_by_user_id], references: [id], onDelete: Cascade) + + @@map("emailer_send_records") +} + +model Emailer { + id Int @id @default(autoincrement()) + + name String @unique + is_primary Boolean @default(false) + + sender_email String + sender_display_name String + username String + encrypted_password String + smtp_host String + smtp_port Int + tls_enabled Boolean @default(false) + max_attachment_size_bytes Int? // null = unlimited + max_num_attachments Int? // null = unlimited + + last_used_at DateTime? + + send_history EmailerSendRecord[] + + @@map("emailers") +} + // An external invitation sent to a provided email for the user to join the server model ServerInvitation { id String @id @default(cuid()) @@ -725,8 +774,6 @@ model ServerInvitation { created_at DateTime @default(now()) expires_at DateTime - // notifier Notifier? @relation(fields: [notifier_id], references: [id]) - @@map("server_invitations") } @@ -736,6 +783,10 @@ model ServerConfig { public_url String? // The public URL of the server, if any initial_wal_setup_complete Boolean @default(false) // Whether the initial WAL setup has been completed + // TODO: For obvious reasons, this is severely insecure lol i.e. don't store an encryption key in the database... + // However, I don't have a better solution at the moment. This, at best, provides a small barrier to entry I guess + // for bad actors. I am not overly knowledgeable in cryptography, so I'm not sure what the best solution is here. + encryption_key String? // The encryption key used to encrypt sensitive data // TODO: make this an array, so we can support multiple job types and not assume it will only ever be scheduled scan // The schedule configuration. If not set, no scheduled scans will be run. diff --git a/core/src/config/stump_config.rs b/core/src/config/stump_config.rs index ce9607f89..32f4de675 100644 --- a/core/src/config/stump_config.rs +++ b/core/src/config/stump_config.rs @@ -28,8 +28,6 @@ pub mod env_keys { pub const SESSION_TTL_KEY: &str = "SESSION_TTL"; pub const SESSION_EXPIRY_INTERVAL_KEY: &str = "SESSION_EXPIRY_CLEANUP_INTERVAL"; pub const SCANNER_CHUNK_SIZE_KEY: &str = "STUMP_SCANNER_CHUNK_SIZE"; - pub const ENABLE_EXPERIMENTAL_CONCURRENCY_KEY: &str = - "ENABLE_EXPERIMENTAL_CONCURRENCY"; } use env_keys::*; @@ -81,6 +79,8 @@ pub struct StumpConfig { pub db_path: Option, /// The client directory. pub client_dir: String, + /// An optional custom path for the templates directory. + pub custom_templates_dir: Option, /// The configuration root for the Stump application, cotains thumbnails, cache, and logs. pub config_dir: String, /// A list of origins for CORS. @@ -113,6 +113,7 @@ impl StumpConfig { db_path: None, client_dir: String::from("./dist"), config_dir, + custom_templates_dir: None, allowed_origins: vec![], pdfium_path: None, disable_swagger: false, @@ -135,6 +136,7 @@ impl StumpConfig { db_path: None, client_dir: env!("CARGO_MANIFEST_DIR").to_string() + "/../web/dist", config_dir: super::get_default_config_dir(), + custom_templates_dir: None, allowed_origins: vec![], pdfium_path: None, disable_swagger: false, @@ -232,6 +234,10 @@ impl StumpConfig { env_configs.pdfium_path = Some(pdfium_path); } + if let Ok(custom_templates_dir) = env::var("EMAIL_TEMPLATES_DIR") { + self.custom_templates_dir = Some(custom_templates_dir); + } + if let Ok(hash_cost) = env::var(HASH_COST_KEY) { if let Ok(val) = hash_cost.parse() { env_configs.password_hash_cost = Some(val); @@ -351,6 +357,14 @@ impl StumpConfig { PathBuf::from(&self.config_dir).join("thumbnails") } + /// Returns a `PathBuf` to the Stump templates directory. + pub fn get_templates_dir(&self) -> PathBuf { + self.custom_templates_dir.clone().map_or_else( + || PathBuf::from(&self.config_dir).join("templates"), + PathBuf::from, + ) + } + /// Returns a `PathBuf` to the Stump avatars directory pub fn get_avatars_dir(&self) -> PathBuf { PathBuf::from(&self.config_dir).join("avatars") @@ -508,6 +522,7 @@ mod tests { db_path: Some("not_a_real_path".to_string()), client_dir: "not_a_real_dir".to_string(), config_dir: "also_not_a_real_dir".to_string(), + custom_templates_dir: None, allowed_origins: vec![ "origin1".to_string(), "origin2".to_string(), @@ -565,6 +580,7 @@ mod tests { session_ttl: 3600 * 24, expired_session_cleanup_interval: 60 * 60 * 8, scanner_chunk_size: DEFAULT_SCANNER_CHUNK_SIZE, + custom_templates_dir: None, } ); }, @@ -594,6 +610,7 @@ mod tests { db_path: Some("not_a_real_path".to_string()), client_dir: "not_a_real_dir".to_string(), config_dir: "also_not_a_real_dir".to_string(), + custom_templates_dir: None, allowed_origins: vec!["origin1".to_string(), "origin2".to_string()], pdfium_path: Some("not_a_path_to_pdfium".to_string()), disable_swagger: false, @@ -713,6 +730,7 @@ mod tests { expired_session_cleanup_interval: DEFAULT_SESSION_EXPIRY_CLEANUP_INTERVAL, scanner_chunk_size: DEFAULT_SCANNER_CHUNK_SIZE, + custom_templates_dir: None, } ); }, diff --git a/core/src/context.rs b/core/src/context.rs index 2097cbdf3..a6e8ec336 100644 --- a/core/src/context.rs +++ b/core/src/context.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use prisma_client_rust::not; use tokio::sync::{ broadcast::{channel, Receiver, Sender}, mpsc::error::SendError, @@ -10,7 +11,8 @@ use crate::{ db, event::CoreEvent, job::{Executor, JobController, JobControllerCommand}, - prisma, + prisma::{self, server_config}, + CoreError, CoreResult, }; type EventChannel = (Sender, Receiver); @@ -173,4 +175,19 @@ impl Ctx { tracing::trace!("Sent core event"); } } + + pub async fn get_encryption_key(&self) -> CoreResult { + let server_config = self + .db + .server_config() + .find_first(vec![not![server_config::encryption_key::equals(None)]]) + .exec() + .await?; + + let encryption_key = server_config + .and_then(|config| config.encryption_key) + .ok_or(CoreError::EncryptionKeyNotSet)?; + + Ok(encryption_key) + } } diff --git a/core/src/db/entity/common.rs b/core/src/db/entity/common.rs index 5ce111237..11ca921a1 100644 --- a/core/src/db/entity/common.rs +++ b/core/src/db/entity/common.rs @@ -13,8 +13,27 @@ pub trait Cursor { pub enum LayoutMode { #[serde(rename = "GRID")] Grid, - #[serde(rename = "LIST")] - List, + #[serde(rename = "TABLE")] + TABLE, +} + +/// A struct representing a sort order for a column using react-table (tanstack) +#[derive(Default, Clone, Debug, Deserialize, Serialize, Type, ToSchema)] +pub struct ReactTableColumnSort { + /// The ID of the column + id: String, + /// The position of the column in the table + position: u32, +} + +/// A struct representing a global sort order for a table using react-table (tanstack) +#[derive(Default, Clone, Debug, Deserialize, Serialize, Type, ToSchema)] +pub struct ReactTableGlobalSort { + /// Whether the sorting is descending + desc: bool, + /// The ID of the column that is sorted + #[serde(rename = "id")] + column_id: String, } #[derive(Debug, Deserialize, Serialize, Type, ToSchema, Clone, Copy, PartialEq, Eq)] diff --git a/core/src/db/entity/emailer/device.rs b/core/src/db/entity/emailer/device.rs new file mode 100644 index 000000000..851ea64de --- /dev/null +++ b/core/src/db/entity/emailer/device.rs @@ -0,0 +1,24 @@ +use serde::{Deserialize, Serialize}; +use specta::Type; +use utoipa::ToSchema; + +use crate::prisma::registered_email_device; + +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct RegisteredEmailDevice { + id: i32, + name: String, + email: String, + forbidden: bool, +} + +impl From for RegisteredEmailDevice { + fn from(data: registered_email_device::Data) -> Self { + Self { + id: data.id, + name: data.name, + email: data.email, + forbidden: data.forbidden, + } + } +} diff --git a/core/src/db/entity/emailer/entity.rs b/core/src/db/entity/emailer/entity.rs new file mode 100644 index 000000000..165fbb1fe --- /dev/null +++ b/core/src/db/entity/emailer/entity.rs @@ -0,0 +1,128 @@ +use email::{EmailerClient, EmailerClientConfig}; +use serde::{Deserialize, Serialize}; +use specta::Type; +use utoipa::ToSchema; + +use crate::{ + prisma::emailer, + utils::{decrypt_string, encrypt_string}, + CoreError, CoreResult, Ctx, +}; + +/// The config for an SMTP emailer +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct EmailerConfig { + /// The email address to send from + pub sender_email: String, + /// The display name to use for the sender + pub sender_display_name: String, + /// The username to use for the SMTP server, typically the same as the sender email + pub username: String, + /// The encrypted password to use for the SMTP server + #[serde(skip_serializing)] + pub encrypted_password: String, + /// The SMTP host to use + pub smtp_host: String, + /// The SMTP port to use + pub smtp_port: u16, + /// Whether to use TLS for the SMTP connection + pub tls_enabled: bool, + /// The maximum size of an attachment in bytes + pub max_attachment_size_bytes: Option, + /// The maximum number of attachments that can be sent in a single email + pub max_num_attachments: Option, +} + +impl EmailerConfig { + /// Convert the config into a client config, which is used for the actual sending of emails + pub async fn into_client_config(self, ctx: &Ctx) -> CoreResult { + let password = decrypt_string(&self.encrypted_password, ctx).await?; + Ok(EmailerClientConfig { + sender_email: self.sender_email, + sender_display_name: self.sender_display_name, + username: self.username, + password, + host: self.smtp_host, + port: self.smtp_port, + tls_enabled: self.tls_enabled, + max_attachment_size_bytes: self.max_attachment_size_bytes, + max_num_attachments: self.max_num_attachments, + }) + } + + pub async fn from_client_config( + config: EmailerClientConfig, + ctx: &Ctx, + ) -> CoreResult { + let encrypted_password = encrypt_string(&config.password, ctx).await?; + Ok(EmailerConfig { + sender_email: config.sender_email, + sender_display_name: config.sender_display_name, + username: config.username, + encrypted_password, + smtp_host: config.host, + smtp_port: config.port, + tls_enabled: config.tls_enabled, + max_attachment_size_bytes: config.max_attachment_size_bytes, + max_num_attachments: config.max_num_attachments, + }) + } +} + +pub type EmailerConfigInput = EmailerClientConfig; + +/// An SMTP emailer entity, which stores SMTP configuration data to be used for sending emails. +/// +// Stump supports multiple emailers, however for the initial POC of this feature only one emailer +/// will be configurable. This will be expanded in the future. +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct SMTPEmailer { + pub id: i32, + /// The friendly name for the emailer, used primarily to identify it in the UI + pub name: String, + /// Whether the emailer is the primary emailer for the system + pub is_primary: bool, + /// The configuration for the emailer + pub config: EmailerConfig, + /// The last time the emailer was used + pub last_used_at: Option, +} + +impl SMTPEmailer { + pub async fn into_client(self, ctx: &Ctx) -> CoreResult { + let config = self.config.into_client_config(ctx).await?; + let template_dir = ctx.config.get_templates_dir(); + Ok(EmailerClient::new(config, template_dir)) + } +} + +#[derive(Serialize, Deserialize, ToSchema, Type)] +#[serde(untagged)] +pub enum EmailerSendTo { + Device { device_id: i32 }, + Anonymous { email: String }, +} + +impl TryFrom for SMTPEmailer { + type Error = CoreError; + + fn try_from(data: emailer::Data) -> Result { + Ok(SMTPEmailer { + id: data.id, + name: data.name, + is_primary: data.is_primary, + config: EmailerConfig { + sender_email: data.sender_email, + sender_display_name: data.sender_display_name, + username: data.username, + encrypted_password: data.encrypted_password, + smtp_host: data.smtp_host, + smtp_port: data.smtp_port as u16, + tls_enabled: data.tls_enabled, + max_attachment_size_bytes: data.max_attachment_size_bytes, + max_num_attachments: data.max_num_attachments, + }, + last_used_at: data.last_used_at.map(|t| t.to_rfc3339()), + }) + } +} diff --git a/core/src/db/entity/emailer/history.rs b/core/src/db/entity/emailer/history.rs new file mode 100644 index 000000000..49787eb5b --- /dev/null +++ b/core/src/db/entity/emailer/history.rs @@ -0,0 +1,83 @@ +use crate::{db::entity::User, CoreError, CoreResult}; +use serde::{Deserialize, Serialize}; +use specta::Type; +use utoipa::ToSchema; + +use crate::prisma::emailer_send_record; + +/// The metadata of an attachment that was sent with an email +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct AttachmentMeta { + /// The filename of the attachment + pub filename: String, + /// The associated media ID of the attachment, if there is one + pub media_id: Option, + /// The size of the attachment in bytes + pub size: i32, +} + +impl AttachmentMeta { + /// Create a new attachment meta + pub fn new(filename: String, media_id: Option, size: i32) -> Self { + Self { + filename, + media_id, + size, + } + } + + // TODO: This is a little awkward, and will have to change once emails properly send + // multiple attachments at once + /// Convert the attachment meta into a byte array, wrapped in a vec + pub fn into_data(&self) -> CoreResult> { + serde_json::to_vec(&vec![self]).map_err(CoreError::from) + } +} + +/// A record of an email that was sent, used to keep track of emails that +/// were sent by specific emailer(s) +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct EmailerSendRecord { + /// The ID of this record + id: i32, + /// The ID of the emailer that sent this email + emailer_id: i32, + /// The email of the recipient of this email + recipient_email: String, + /// The metadata of the attachment, if there is one + attachment_meta: Option>, + /// The timestamp of when this email was sent + sent_at: String, + /// The user ID of the user that sent this email + sent_by_user_id: Option, + /// The user that sent this email + #[serde(skip_serializing_if = "Option::is_none")] + sent_by: Option, +} + +impl TryFrom for EmailerSendRecord { + type Error = CoreError; + + fn try_from(data: emailer_send_record::Data) -> Result { + let sent_by = data.sent_by().ok().flatten().cloned().map(User::from); + let attachment_meta = data.attachment_meta.as_deref().and_then(|data| { + serde_json::from_slice(data).map_or_else( + |error| { + tracing::error!(?error, "Failed to deserialize attachment meta"); + None + }, + Some, + ) + }); + + Ok(Self { + id: data.id, + emailer_id: data.emailer_id, + recipient_email: data.recipient_email, + attachment_meta, + sent_at: data.sent_at.to_rfc3339(), + sent_by_user_id: data.sent_by_user_id.map(|id| id.to_string()), + sent_by, + }) + } +} diff --git a/core/src/db/entity/emailer/mod.rs b/core/src/db/entity/emailer/mod.rs new file mode 100644 index 000000000..06d24ac70 --- /dev/null +++ b/core/src/db/entity/emailer/mod.rs @@ -0,0 +1,7 @@ +mod device; +mod entity; +mod history; + +pub use device::*; +pub use entity::*; +pub use history::*; diff --git a/core/src/db/entity/library.rs b/core/src/db/entity/library.rs index d826cfc49..babdeb0f9 100644 --- a/core/src/db/entity/library.rs +++ b/core/src/db/entity/library.rs @@ -115,7 +115,7 @@ pub struct LibraryOptions { pub hard_delete_conversions: bool, pub library_pattern: LibraryPattern, pub thumbnail_config: Option, - // TODO: don't make Option after pcr supports nested create + // TODO(prisma 0.7.0): Nested create // https://github.com/Brendonovich/prisma-client-rust/issues/44 pub library_id: Option, } diff --git a/core/src/db/entity/mod.rs b/core/src/db/entity/mod.rs index 3c6b2993c..60f9b02c2 100644 --- a/core/src/db/entity/mod.rs +++ b/core/src/db/entity/mod.rs @@ -1,5 +1,6 @@ mod book_club; pub(crate) mod common; +mod emailer; mod epub; mod job; mod library; @@ -18,6 +19,7 @@ pub use self::epub::*; pub use self::log::*; pub use book_club::*; +pub use emailer::*; pub use job::*; pub use library::*; pub use media::*; @@ -30,7 +32,10 @@ pub use smart_list::*; pub use tag::*; pub use user::*; -pub use common::{AccessRole, Cursor, EntityVisibility, FileStatus, LayoutMode}; +pub use common::{ + AccessRole, Cursor, EntityVisibility, FileStatus, LayoutMode, ReactTableColumnSort, + ReactTableGlobalSort, +}; pub mod macros { pub use super::book_club::prisma_macros::*; diff --git a/core/src/db/entity/notifier.rs b/core/src/db/entity/notifier.rs index bef07228b..ced452374 100644 --- a/core/src/db/entity/notifier.rs +++ b/core/src/db/entity/notifier.rs @@ -1,4 +1,4 @@ -use crate::{prisma::notifier, CoreError}; +use crate::{prisma::notifier, utils::encrypt_string, CoreError, CoreResult, Ctx}; use serde::{Deserialize, Serialize}; use specta::Type; use std::str::FromStr; @@ -6,24 +6,39 @@ use utoipa::ToSchema; #[derive(Serialize, Deserialize, ToSchema, Type)] pub struct Notifier { + /// The ID of the notifier id: i32, // Note: This isn't really needed, we could rely on tags. However, in order to have at least one // readable field in the DB (since the config is dumped to bytes) I left this in #[serde(rename = "type")] _type: NotifierType, + /// The config is stored as bytes in the DB, and is deserialized into the correct type when + /// needed. If there are sensitive fields, they should be encrypted before being stored. config: NotifierConfig, } +/// The config for a Discord notifier +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct DiscordConfig { + /// The webhook URL to send to + pub webhook_url: String, +} + +/// The config for a Telegram notifier +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct TelegramConfig { + /// The encrypted token to use for the Telegram bot. This is encrypted before being stored, + /// and decrypted when needed. + pub encrypted_token: String, + /// The chat ID to send to + pub chat_id: String, +} + #[derive(Serialize, Deserialize, ToSchema, Type)] #[serde(untagged)] pub enum NotifierConfig { - Discord { - webhook_url: String, - }, - Telegram { - encrypted_token: String, - chat_id: String, - }, + Discord(DiscordConfig), + Telegram(TelegramConfig), } impl NotifierConfig { @@ -32,6 +47,34 @@ impl NotifierConfig { } } +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct TelegramConfigInput { + pub token: String, + pub chat_id: String, +} + +#[derive(Serialize, Deserialize, ToSchema, Type)] +#[serde(untagged)] +pub enum NotifierConfigInput { + Discord(DiscordConfig), + Telegram(TelegramConfigInput), +} + +impl NotifierConfigInput { + pub async fn into_config(self, ctx: &Ctx) -> CoreResult { + match self { + NotifierConfigInput::Discord(config) => Ok(NotifierConfig::Discord(config)), + NotifierConfigInput::Telegram(config) => { + let encrypted_token = encrypt_string(&config.token, ctx).await?; + Ok(NotifierConfig::Telegram(TelegramConfig { + encrypted_token, + chat_id: config.chat_id, + })) + }, + } + } +} + #[derive(Serialize, Deserialize, ToSchema, Type)] pub enum NotifierType { #[serde(rename = "DISCORD")] diff --git a/core/src/db/entity/series/entity.rs b/core/src/db/entity/series/entity.rs index c39f32577..b5764c31c 100644 --- a/core/src/db/entity/series/entity.rs +++ b/core/src/db/entity/series/entity.rs @@ -67,7 +67,7 @@ impl Series { self.media_count = Some(count); } - // TODO: change once nested creates are supported + // TODO(prisma 0.7.0): Nested created pub fn create_action( self, ) -> ( diff --git a/core/src/db/entity/smart_list/view.rs b/core/src/db/entity/smart_list/view.rs index 7449508e2..949ab283b 100644 --- a/core/src/db/entity/smart_list/view.rs +++ b/core/src/db/entity/smart_list/view.rs @@ -2,7 +2,11 @@ use serde::{Deserialize, Serialize}; use specta::Type; use utoipa::ToSchema; -use crate::{prisma::smart_list_view, CoreError}; +use crate::{ + db::entity::common::{ReactTableColumnSort, ReactTableGlobalSort}, + prisma::smart_list_view, + CoreError, +}; #[derive(Default, Clone, Debug, Deserialize, Serialize, Type, ToSchema)] pub struct SmartListView { @@ -19,15 +23,15 @@ pub struct SmartListView { pub struct SmartListViewConfig { /// The columns present in the book table(s) - book_columns: Vec, + book_columns: Vec, /// The columns present in the grouping entity table - group_columns: Vec, + group_columns: Vec, /// The sorting state of the book table(s) #[serde(rename = "book_sorting")] - book_sorting_state: Option>, + book_sorting_state: Option>, /// The sorting state of the grouping entity table view #[serde(rename = "group_sorting")] - group_sorting_state: Option>, + group_sorting_state: Option>, /// Whether the table view allows multi-sorting #[serde(default)] #[specta(optional)] @@ -38,24 +42,6 @@ pub struct SmartListViewConfig { search: Option, } -#[derive(Default, Clone, Debug, Deserialize, Serialize, Type, ToSchema)] - -pub struct SmartListTableSortingState { - /// Whether the sorting is descending - desc: bool, - /// The ID of the column that is sorted - #[serde(rename = "id")] - column_id: String, -} - -#[derive(Default, Clone, Debug, Deserialize, Serialize, Type, ToSchema)] -pub struct SmartListTableColumnSelection { - /// The ID of the column - id: String, - /// The position of the column in the table - position: u32, -} - impl TryFrom for SmartListView { type Error = CoreError; diff --git a/core/src/db/entity/user/entity.rs b/core/src/db/entity/user/entity.rs index e4bb8fcc5..439b26bde 100644 --- a/core/src/db/entity/user/entity.rs +++ b/core/src/db/entity/user/entity.rs @@ -7,7 +7,9 @@ use crate::{ prisma, }; -use super::{AgeRestriction, LoginActivity, UserPermission, UserPreferences}; +use super::{ + AgeRestriction, LoginActivity, PermissionSet, UserPermission, UserPreferences, +}; #[derive(Default, Debug, Clone, Serialize, Deserialize, Type, ToSchema)] pub struct User { @@ -71,19 +73,14 @@ impl From for User { let login_sessions_count = data.sessions().map(|sessions| sessions.len() as i32).ok(); + let permission_set = data.permissions.map(PermissionSet::from); + User { id: data.id, username: data.username, is_server_owner: data.is_server_owner, - permissions: data - .permissions - .map(|p| { - p.split(',') - .map(|p| p.trim()) - .filter(|p| !p.is_empty()) - .map(|p| p.into()) - .collect() - }) + permissions: permission_set + .map(|ps| ps.resolve_into_vec()) .unwrap_or_default(), max_sessions_allowed: data.max_sessions_allowed, user_preferences, diff --git a/core/src/db/entity/user/permissions.rs b/core/src/db/entity/user/permissions.rs index a2d06a529..dbd77a86a 100644 --- a/core/src/db/entity/user/permissions.rs +++ b/core/src/db/entity/user/permissions.rs @@ -1,3 +1,4 @@ +use itertools::Itertools; use serde::{Deserialize, Serialize}; use specta::Type; use utoipa::ToSchema; @@ -23,7 +24,9 @@ impl From for AgeRestriction { // TODO: consider adding self:update permission, useful for child accounts /// Permissions that can be granted to a user. Some permissions are implied by others, /// and will be automatically granted if the "parent" permission is granted. -#[derive(Debug, Clone, Copy, Serialize, Deserialize, Type, ToSchema, Eq, PartialEq)] +#[derive( + Debug, Clone, Copy, Serialize, Deserialize, Type, ToSchema, Eq, PartialEq, Hash, +)] pub enum UserPermission { ///TODO: Expand permissions for bookclub + smartlist /// Grant access to the book club feature @@ -32,6 +35,21 @@ pub enum UserPermission { /// Grant access to create a book club (access book club) #[serde(rename = "bookclub:create")] CreateBookClub, + /// Grant access to read any emailers in the system + #[serde(rename = "emailer:read")] + EmailerRead, + /// Grant access to create an emailer + #[serde(rename = "emailer:create")] + EmailerCreate, + /// Grant access to manage an emailer + #[serde(rename = "emailer:manage")] + EmailerManage, + /// Grant access to send an email + #[serde(rename = "email:send")] + EmailSend, + /// Grant access to send an arbitrary email, bypassing any registered device requirements + #[serde(rename = "email:arbitrary_send")] + EmailArbitrarySend, /// Grant access to access the smart list feature. This includes the ability to create and edit smart lists #[serde(rename = "smartlist:read")] AccessSmartList, @@ -91,6 +109,12 @@ impl UserPermission { pub fn associated(&self) -> Vec { match self { UserPermission::CreateBookClub => vec![UserPermission::AccessBookClub], + UserPermission::EmailerRead => vec![UserPermission::EmailSend], + UserPermission::EmailerCreate => vec![UserPermission::EmailerRead], + UserPermission::EmailerManage => { + vec![UserPermission::EmailerCreate, UserPermission::EmailerRead] + }, + UserPermission::EmailArbitrarySend => vec![UserPermission::EmailSend], UserPermission::CreateLibrary => { vec![UserPermission::EditLibrary, UserPermission::ScanLibrary] }, @@ -123,6 +147,11 @@ impl ToString for UserPermission { match self { UserPermission::AccessBookClub => "bookclub:read".to_string(), UserPermission::CreateBookClub => "bookclub:create".to_string(), + UserPermission::EmailerRead => "emailer:read".to_string(), + UserPermission::EmailerCreate => "emailer:create".to_string(), + UserPermission::EmailerManage => "emailer:manage".to_string(), + UserPermission::EmailSend => "email:send".to_string(), + UserPermission::EmailArbitrarySend => "email:arbitrary_send".to_string(), UserPermission::AccessSmartList => "smartlist:read".to_string(), UserPermission::FileExplorer => "file:explorer".to_string(), UserPermission::UploadFile => "file:upload".to_string(), @@ -149,6 +178,11 @@ impl From<&str> for UserPermission { match s { "bookclub:read" => UserPermission::AccessBookClub, "bookclub:create" => UserPermission::CreateBookClub, + "emailer:read" => UserPermission::EmailerRead, + "emailer:create" => UserPermission::EmailerCreate, + "emailer:manage" => UserPermission::EmailerManage, + "email:send" => UserPermission::EmailSend, + "email:arbitrary_send" => UserPermission::EmailArbitrarySend, "smartlist:read" => UserPermission::AccessSmartList, "file:explorer" => UserPermission::FileExplorer, "file:upload" => UserPermission::UploadFile, @@ -165,7 +199,39 @@ impl From<&str> for UserPermission { "notifier:manage" => UserPermission::ManageNotifier, "notifier:delete" => UserPermission::DeleteNotifier, "server:manage" => UserPermission::ManageServer, + // FIXME: Don't panic smh _ => panic!("Invalid user permission: {}", s), } } } + +/// A wrapper around a Vec used for including any associated permissions +/// from the underlying permissions +#[derive(Debug, Serialize, Deserialize, ToSchema, Type)] +pub struct PermissionSet(Vec); + +impl PermissionSet { + /// Unwrap the underlying Vec and include any associated permissions + pub fn resolve_into_vec(self) -> Vec { + self.0 + .into_iter() + .flat_map(|permission| { + let mut v = vec![permission]; + v.extend(permission.associated()); + v + }) + .unique() + .collect() + } +} + +impl From for PermissionSet { + fn from(s: String) -> PermissionSet { + let permissions = s + .split(',') + .map(|s| s.trim()) + .map(UserPermission::from) + .collect(); + PermissionSet(permissions) + } +} diff --git a/core/src/db/query/ordering.rs b/core/src/db/query/ordering.rs index 3d994b72e..85de05dc7 100644 --- a/core/src/db/query/ordering.rs +++ b/core/src/db/query/ordering.rs @@ -44,6 +44,8 @@ impl Default for QueryOrder { } } +// TODO(prisma 0.7.0): Support order by relation + impl TryInto for QueryOrder { type Error = CoreError; @@ -70,6 +72,8 @@ impl TryInto for QueryOrder { } } +// TODO(prisma 0.7.0): Support order by relation + impl TryInto for QueryOrder { type Error = CoreError; @@ -92,6 +96,8 @@ impl TryInto for QueryOrder { } } +// TODO(prisma 0.7.0): Support order by relation + impl TryInto for QueryOrder { type Error = CoreError; diff --git a/core/src/error.rs b/core/src/error.rs index e8ce28a6e..a28d86e12 100644 --- a/core/src/error.rs +++ b/core/src/error.rs @@ -7,12 +7,18 @@ pub type CoreResult = Result; #[derive(Error, Debug)] pub enum CoreError { - #[error("Failed to initialize Stump core: {0}")] - InitializationError(String), #[error( "Attempted to initialize StumpCore with a config dir that does not exist: {0}" )] ConfigDirDoesNotExist(String), + #[error("Encryption key must be set")] + EncryptionKeyNotSet, + #[error("Failed to encrypt: {0}")] + EncryptionFailed(String), + #[error("Failed to decrypt: {0}")] + DecryptionFailed(String), + #[error("Failed to initialize Stump core: {0}")] + InitializationError(String), #[error("Query error: {0}")] QueryError(#[from] prisma_client_rust::queries::QueryError), #[error("Invalid query error: {0}")] diff --git a/core/src/filesystem/archive.rs b/core/src/filesystem/archive.rs index d570570c5..c0ec1c300 100644 --- a/core/src/filesystem/archive.rs +++ b/core/src/filesystem/archive.rs @@ -5,7 +5,7 @@ use std::{ }; use tracing::{trace, warn}; use walkdir::WalkDir; -use zip::write::FileOptions; +use zip::{write::FileOptions, CompressionMethod}; /// Creates a new zip file at `destination` from the contents of the folder `unpacked_path`. pub(crate) fn zip_dir( @@ -17,8 +17,8 @@ pub(crate) fn zip_dir( let mut zip_writer = zip::ZipWriter::new(zip_file); - let options = FileOptions::default() - .compression_method(zip::CompressionMethod::Stored) + let options: FileOptions<'_, ()> = FileOptions::default() + .compression_method(CompressionMethod::Stored) .unix_permissions(0o755); trace!("Creating zip file at {:?}", destination); @@ -35,7 +35,6 @@ pub(crate) fn zip_dir( // Some unzip tools unzip files with directory paths correctly, some do not! if path.is_file() { trace!("Adding file to zip file: {:?} as {:?}", path, name); - #[allow(deprecated)] zip_writer.start_file_from_path(name, options)?; let mut f = File::open(path)?; diff --git a/core/src/filesystem/common.rs b/core/src/filesystem/common.rs index 0e25b2e0a..30fe3bdfa 100644 --- a/core/src/filesystem/common.rs +++ b/core/src/filesystem/common.rs @@ -71,14 +71,35 @@ pub struct FileParts { } pub trait PathUtils { + /// Returns the file name, file stem, and extension of the file. fn file_parts(&self) -> FileParts; + /// Returns the result of `infer::get_from_path`. fn infer_kind(&self) -> std::io::Result>; + /// Returns the content type of the file based on the extension. + fn naive_content_type(&self) -> ContentType; + /// Returns true if the file is hidden (i.e. starts with a dot). Also checks for + /// files within a __MACOSX directory. fn is_hidden_file(&self) -> bool; + /// Returns true if the file is supported by Stump. fn should_ignore(&self) -> bool; + /// Returns true if the file is an image. fn is_supported(&self) -> bool; + /// Returns true if the file is an image. fn is_img(&self) -> bool; + /// Returns true if the file is a thumbnail image. This calls the `is_img` function + /// from the same trait, and then checks if the file name is one of the following: + /// - cover + /// - thumbnail + /// - folder + /// + /// These will *potentially* be reserved filenames in the future... Not sure + /// if this functionality will be kept. fn is_thumbnail_img(&self) -> bool; + /// Returns true if the directory has any media files in it. This is a shallow + /// check, and will not check subdirectories. fn dir_has_media(&self) -> bool; + /// Returns true if the directory has any media files in it. This is a deep + /// check, and will check *all* subdirectories. fn dir_has_media_deep(&self) -> bool; } @@ -120,13 +141,33 @@ impl PathUtils for Path { infer::get_from_path(self) } + fn naive_content_type(&self) -> ContentType { + let extension = self + .extension() + .and_then(|e| e.to_str()) + .unwrap_or_default(); + + if extension.is_empty() { + return ContentType::UNKNOWN; + } + + ContentType::from_extension(extension) + } + /// Returns true if the file is hidden (i.e. starts with a dot). fn is_hidden_file(&self) -> bool { + // If the file is contained inside of a __MACOSX directory, assume it is hidden. + // We don't want to deal with these files. + if self.starts_with("__MACOSX") { + return true; + } + let FileParts { file_name, .. } = self.file_parts(); file_name.starts_with('.') } + // TODO(327): Remove infer usage /// Returns true if the file is a supported media file. This is a strict check when /// infer can determine the file type, and a loose extension-based check when infer cannot. fn is_supported(&self) -> bool { @@ -148,28 +189,11 @@ impl PathUtils for Path { !self.is_supported() } - /// Returns true if the file is an image. This is a strict check when infer - /// can determine the file type, and a loose extension-based check when infer cannot. + /// Returns true if the file is an image. This is a naive check based on the extension. fn is_img(&self) -> bool { - if let Ok(Some(file_type)) = infer::get_from_path(self) { - return file_type.mime_type().starts_with("image/"); - } - - let FileParts { extension, .. } = self.file_parts(); - - extension.eq_ignore_ascii_case("jpg") - || extension.eq_ignore_ascii_case("png") - || extension.eq_ignore_ascii_case("jpeg") + self.naive_content_type().is_image() } - /// Returns true if the file is a thumbnail image. This calls the `is_img` function - /// from the same trait, and then checks if the file name is one of the following: - /// - cover - /// - thumbnail - /// - folder - /// - /// These will *potentially* be reserved filenames in the future... Not sure - /// if this functionality will be kept. fn is_thumbnail_img(&self) -> bool { if !self.is_img() { return false; @@ -180,8 +204,6 @@ impl PathUtils for Path { is_accepted_cover_name(&file_stem) } - /// Returns true if the directory has any media files in it. This is a shallow - /// check, and will not check subdirectories. fn dir_has_media(&self) -> bool { if !self.is_dir() { return false; @@ -205,8 +227,6 @@ impl PathUtils for Path { } } - /// Returns true if the directory has any media files in it. This is a deep - /// check, and will check *all* subdirectories. fn dir_has_media_deep(&self) -> bool { if !self.is_dir() { return false; diff --git a/core/src/filesystem/image/thumbnail/generation_job.rs b/core/src/filesystem/image/thumbnail/generation_job.rs index 7453e9e48..0fc6e9a0d 100644 --- a/core/src/filesystem/image/thumbnail/generation_job.rs +++ b/core/src/filesystem/image/thumbnail/generation_job.rs @@ -3,8 +3,8 @@ use specta::Type; use crate::{ job::{ - error::JobError, JobExecuteLog, JobExt, JobOutputExt, JobTaskOutput, WorkerCtx, - WorkingState, WrappedJob, + error::JobError, JobExecuteLog, JobExt, JobOutputExt, JobProgress, JobTaskOutput, + WorkerCtx, WorkingState, WrappedJob, }, prisma::{media, series}, }; @@ -149,6 +149,7 @@ impl JobExt for ThumbnailGenerationJob { ThumbnailGenerationJobVariant::MediaGroup(media_ids) => media_ids.clone(), }; + // TODO Should find a way to keep the same ThumbnailManager around for the whole job execution let manager = ThumbnailManager::new(ctx.config.clone()) .map_err(|e| JobError::TaskFailed(e.to_string()))?; @@ -202,6 +203,10 @@ impl JobExt for ThumbnailGenerationJob { .filter(|m| manager.has_thumbnail(m.id.as_str())) .map(|m| m.id.clone()) .collect::>(); + ctx.report_progress(JobProgress::msg( + format!("Removing {} thumbnails", media_ids_to_remove.len()) + .as_str(), + )); let JobTaskOutput { output: sub_output, logs: sub_logs, @@ -220,6 +225,13 @@ impl JobExt for ThumbnailGenerationJob { .collect::>() }; + ctx.report_progress(JobProgress::msg( + format!( + "Generating {} thumbnails", + media_to_generate_thumbnails.len() + ) + .as_str(), + )); let JobTaskOutput { output: sub_output, logs: sub_logs, diff --git a/core/src/filesystem/image/thumbnail/mod.rs b/core/src/filesystem/image/thumbnail/mod.rs index 5936134e6..b464cb589 100644 --- a/core/src/filesystem/image/thumbnail/mod.rs +++ b/core/src/filesystem/image/thumbnail/mod.rs @@ -68,7 +68,6 @@ pub fn generate_thumbnail( Ok(thumbnail_path) } -// TODO: does this need to return a result? pub fn generate_thumbnails( media: &[Media], options: ImageProcessorOptions, diff --git a/core/src/filesystem/image/webp.rs b/core/src/filesystem/image/webp.rs index f237420a7..27b2f8d3a 100644 --- a/core/src/filesystem/image/webp.rs +++ b/core/src/filesystem/image/webp.rs @@ -1,4 +1,6 @@ -use image::{imageops, io::Reader, DynamicImage, EncodableLayout, GenericImageView}; +use std::fs; + +use image::{imageops, DynamicImage, EncodableLayout, GenericImageView}; use webp::Encoder; use crate::filesystem::{error::FileError, image::process::resized_dimensions}; @@ -12,7 +14,8 @@ impl ImageProcessor for WebpProcessor { buffer: &[u8], options: ImageProcessorOptions, ) -> Result, FileError> { - let mut image = image::load_from_memory(buffer)?; + let mut image = + image::load_from_memory_with_format(buffer, image::ImageFormat::WebP)?; if let Some(resize_options) = options.resize_options { let resized_image = WebpProcessor::resize_image(image, resize_options); @@ -30,8 +33,8 @@ impl ImageProcessor for WebpProcessor { path: &str, options: ImageProcessorOptions, ) -> Result, FileError> { - let image = Reader::open(path)?.with_guessed_format()?.decode()?; - Self::generate(image.as_bytes(), options) + let bytes = fs::read(path)?; + Self::generate(&bytes, options) } } @@ -53,3 +56,65 @@ impl WebpProcessor { )) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::filesystem::image::{ImageFormat, ImageProcessorOptions, ImageResizeMode}; + use std::{fs, path::PathBuf}; + + #[test] + fn test_generate_webp_from_data() { + let bytes = get_test_webp_data(); + let options = ImageProcessorOptions { + resize_options: None, + format: ImageFormat::Webp, + quality: None, + page: None, + }; + + WebpProcessor::generate(&bytes, options).unwrap(); + } + + #[test] + fn test_generate_webp_from_path() { + let webp_path = get_test_webp_path(); + let options = ImageProcessorOptions { + resize_options: None, + format: ImageFormat::Webp, + quality: None, + page: None, + }; + + WebpProcessor::generate_from_path(&webp_path, options).unwrap(); + } + + #[test] + fn test_resize_webp() { + let webp_path = get_test_webp_path(); + let options = ImageProcessorOptions { + resize_options: Some(ImageResizeOptions { + mode: ImageResizeMode::Scaled, + height: 2.0, + width: 2.0, + }), + format: ImageFormat::Webp, + quality: None, + page: None, + }; + + WebpProcessor::generate_from_path(&webp_path, options).unwrap(); + } + + fn get_test_webp_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/example.webp") + .to_string_lossy() + .to_string() + } + + fn get_test_webp_data() -> Vec { + let path = get_test_webp_path(); + fs::read(path).expect("Failed to fetch example webp image") + } +} diff --git a/core/src/filesystem/media/analyze_media_job.rs b/core/src/filesystem/media/analyze_media_job.rs new file mode 100644 index 000000000..8b4c77fe2 --- /dev/null +++ b/core/src/filesystem/media/analyze_media_job.rs @@ -0,0 +1,245 @@ +use serde::{Deserialize, Serialize}; +use specta::Type; + +use crate::{ + db::entity::Media, + filesystem::media::process::get_page_count, + job::{ + error::JobError, JobExt, JobOutputExt, JobTaskOutput, WorkerCtx, WorkingState, + WrappedJob, + }, + prisma::{media, media_metadata, series}, +}; + +type MediaID = String; +type SeriesID = String; +type LibraryID = String; + +#[derive(Clone)] +pub enum AnalyzeMediaJobVariant { + /// Analyze an individual media item, specified by ID. + AnalyzeSingleItem(MediaID), + /// Analyze all media in a library, specified by library ID. + AnalyzeLibrary(LibraryID), + /// Analyze all media in a series, specified by series ID. + AnalyzeSeries(SeriesID), + /// Analyze all media in a media group, specified with a list of media IDs. + AnalyzeMediaGroup(Vec), +} + +#[derive(Serialize, Deserialize, Debug)] +pub enum AnalyzeMediaTask { + /// Analyze the image for an individual media item, specified by ID. + AnalyzeImage(MediaID), +} + +#[derive(Clone, Serialize, Deserialize, Default, Debug, Type)] +pub struct AnalyzeMediaOutput { + /// The number of images analyzed + images_analyzed: u64, + /// The number of media items updated + media_updated: u64, +} + +impl JobOutputExt for AnalyzeMediaOutput { + fn update(&mut self, updated: Self) { + self.images_analyzed += updated.images_analyzed; + } +} + +/// A job that analyzes a media item and updates the database +/// with information from the analysis. +#[derive(Clone)] +pub struct AnalyzeMediaJob { + pub variant: AnalyzeMediaJobVariant, +} + +impl AnalyzeMediaJob { + /// Create a new [AnalyzeMediaJob] for the media specified by `id`. + pub fn new(variant: AnalyzeMediaJobVariant) -> Box> { + WrappedJob::new(Self { variant }) + } +} + +#[async_trait::async_trait] +impl JobExt for AnalyzeMediaJob { + const NAME: &'static str = "analyze_media"; + + type Output = AnalyzeMediaOutput; + type Task = AnalyzeMediaTask; + + fn description(&self) -> Option { + match &self.variant { + AnalyzeMediaJobVariant::AnalyzeSingleItem(id) => { + Some(format!("Analyze media item with id: {}", id)) + }, + AnalyzeMediaJobVariant::AnalyzeLibrary(id) => { + Some(format!("Analyze library with id: {}", id)) + }, + AnalyzeMediaJobVariant::AnalyzeSeries(id) => { + Some(format!("Analyze series with id: {}", id)) + }, + AnalyzeMediaJobVariant::AnalyzeMediaGroup(ids) => { + Some(format!("Analyze media group with ids: {:?}", ids)) + }, + } + } + + async fn init( + &mut self, + ctx: &WorkerCtx, + ) -> Result, JobError> { + let output = Self::Output::default(); + + // We match over the job variant to build a list of tasks to process + let tasks = match &self.variant { + // Single item is easy + AnalyzeMediaJobVariant::AnalyzeSingleItem(id) => { + vec![AnalyzeMediaTask::AnalyzeImage(id.clone())] + }, + // For libraries we need a list of ids + AnalyzeMediaJobVariant::AnalyzeLibrary(id) => { + let library_media = ctx + .db + .media() + .find_many(vec![media::series::is(vec![series::library_id::equals( + Some(id.clone()), + )])]) + .select(media::select!({ id })) + .exec() + .await + .map_err(|e| JobError::InitFailed(e.to_string()))?; + + library_media + .into_iter() + .map(|media| AnalyzeMediaTask::AnalyzeImage(media.id)) + .collect() + }, + // We also need a list for series + AnalyzeMediaJobVariant::AnalyzeSeries(id) => { + let series_media = ctx + .db + .media() + .find_many(vec![media::series_id::equals(Some(id.clone()))]) + .select(media::select!({ id })) + .exec() + .await + .map_err(|e| JobError::InitFailed(e.to_string()))?; + + series_media + .into_iter() + .map(|media| AnalyzeMediaTask::AnalyzeImage(media.id)) + .collect() + }, + // Media groups already include a vector of ids + AnalyzeMediaJobVariant::AnalyzeMediaGroup(ids) => ids + .iter() + .map(|id| AnalyzeMediaTask::AnalyzeImage(id.clone())) + .collect(), + }; + + Ok(WorkingState { + output: Some(output), + tasks: tasks.into(), + completed_tasks: 0, + logs: vec![], + }) + } + + async fn execute_task( + &self, + ctx: &WorkerCtx, + task: Self::Task, + ) -> Result, JobError> { + let mut output = Self::Output::default(); + + match task { + AnalyzeMediaTask::AnalyzeImage(id) => { + // Get media by id from the database + let media_item: Media = ctx + .db + .media() + .find_unique(media::id::equals(id.clone())) + .with(media::metadata::fetch()) + .exec() + .await + .map_err(|e: prisma_client_rust::QueryError| { + JobError::TaskFailed(e.to_string()) + })? + .ok_or_else(|| { + JobError::TaskFailed(format!( + "Unable to find media item with id: {}", + id + )) + })? + .into(); + + let path = media_item.path; + let page_count = get_page_count(&path, &ctx.config)?; + output.images_analyzed += 1; + + // Check if a metadata update is neded + if let Some(metadata) = media_item.metadata { + // Great, there's already metadata! + // Check if the value matches the currently recorded one, update if not. + if let Some(meta_page_count) = metadata.page_count { + if meta_page_count != page_count { + ctx.db + .media_metadata() + .update( + media_metadata::media_id::equals(media_item.id), + vec![media_metadata::page_count::set(Some( + page_count, + ))], + ) + .exec() + .await?; + output.media_updated += 1; + } + } else { + // Page count was `None` so we update it. + ctx.db + .media_metadata() + .update( + media_metadata::media_id::equals(media_item.id), + vec![media_metadata::page_count::set(Some(page_count))], + ) + .exec() + .await?; + output.media_updated += 1; + } + } else { + // Metadata doesn't exist, create it + let new_metadata = ctx + .db + .media_metadata() + .create(vec![ + media_metadata::id::set(media_item.id.clone()), + media_metadata::page_count::set(Some(page_count)), + ]) + .exec() + .await?; + + // And link it to the media item + ctx.db + .media() + .update( + media::id::equals(media_item.id), + vec![media::metadata::connect(media_metadata::id::equals( + new_metadata.id, + ))], + ) + .exec() + .await?; + output.media_updated += 1; + } + }, + } + + Ok(JobTaskOutput { + output, + subtasks: vec![], + logs: vec![], + }) + } +} diff --git a/core/src/filesystem/media/builder.rs b/core/src/filesystem/media/builder.rs index 3fc3590ef..bb451ea39 100644 --- a/core/src/filesystem/media/builder.rs +++ b/core/src/filesystem/media/builder.rs @@ -141,3 +141,58 @@ impl SeriesBuilder { }) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::filesystem::media::tests::{ + get_test_cbz_path, get_test_epub_path, get_test_pdf_path, get_test_rar_path, + get_test_zip_path, + }; + + #[test] + fn test_build_media() { + // Test with zip + let media = build_media_test_helper(get_test_zip_path()); + assert!(media.is_ok()); + let media = media.unwrap(); + assert_eq!(media.extension, "zip"); + + // Test with cbz + let media = build_media_test_helper(get_test_cbz_path()); + assert!(media.is_ok()); + let media = media.unwrap(); + assert_eq!(media.extension, "cbz"); + + // Test with rar + let media = build_media_test_helper(get_test_rar_path()); + assert!(media.is_ok()); + let media = media.unwrap(); + assert_eq!(media.extension, "rar"); + + // Test with epub + let media = build_media_test_helper(get_test_epub_path()); + assert!(media.is_ok()); + let media = media.unwrap(); + assert_eq!(media.extension, "epub"); + + // Test with pdf + let media = build_media_test_helper(get_test_pdf_path()); + assert!(media.is_ok()); + let media = media.unwrap(); + assert_eq!(media.extension, "pdf"); + } + + fn build_media_test_helper(path: String) -> Result { + let path = Path::new(&path); + let library_options = LibraryOptions { + convert_rar_to_zip: false, + hard_delete_conversions: false, + ..Default::default() + }; + let series_id = "series_id"; + let config = Arc::new(StumpConfig::debug()); + + MediaBuilder::new(&path, series_id, library_options, &config).build() + } +} diff --git a/core/src/filesystem/media/common.rs b/core/src/filesystem/media/common.rs index 63c07d5b3..f30ac715c 100644 --- a/core/src/filesystem/media/common.rs +++ b/core/src/filesystem/media/common.rs @@ -36,11 +36,13 @@ where #[cfg(test)] mod tests { + use super::*; + #[test] fn test_is_accepted_cover_name() { let cover_file_names = ["cover", "thumbnail", "folder"]; for cover_name in cover_file_names { - assert!(super::is_accepted_cover_name(cover_name)); + assert!(is_accepted_cover_name(cover_name)); } } @@ -48,14 +50,14 @@ mod tests { fn test_is_not_accepted_cover_name() { let cover_file_names = vec!["cover1", "thumbnail1", "folder1"]; for cover_name in cover_file_names { - assert!(!super::is_accepted_cover_name(cover_name)); + assert!(!is_accepted_cover_name(cover_name)); } } #[test] fn test_sort_numeric_file_names() { let mut names = ["3.jpg", "1.jpg", "5.jpg", "2.jpg", "4.jpg"]; - super::sort_file_names(&mut names); + sort_file_names(&mut names); let expected = ["1.jpg", "2.jpg", "3.jpg", "4.jpg", "5.jpg"]; assert_eq!(names, expected); } @@ -63,18 +65,31 @@ mod tests { #[test] fn test_sort_alphanumeric_file_names() { let mut names = ["shot-2", "shot-1", "shot-11", "shot-10", "shot-3"]; - super::sort_file_names(&mut names); + sort_file_names(&mut names); let expected = ["shot-1", "shot-2", "shot-3", "shot-10", "shot-11"]; assert_eq!(names, expected); } #[test] - fn should_parse_incomplete_metadata() { + fn test_should_parse_incomplete_metadata() { let contents = "\n\n Delete\n 1\n 2016\n In the near future, where science can implant or remove human memories and the government uses brain scan technology in criminal investigations, a mute girl witnesses a multiple murder and must turn to a handyman for protection from the police and an army of killers. From the Harley Quinn team of writers Jimmy Palmiotti and Justin Grey and artist John Timms, with covers by Amanda Conner.\n\n\nNote: The digital edition (3/2/2016) for this issue was released before the print edition.\n Tagged with ComicTagger 1.3.0-alpha.0 using info from Comic Vine on 2021-12-01 20:34:52. [Issue ID 517895]\n 2016\n 03\n 31\n Jimmy Palmiotti, Justin Gray\n John Timms, John Timms\n John Timms, John Timms\n David Curiel, Paul Mounts\n Bill Tortolini\n Amanda Conner, Paul Mounts\n Alex Wald, Joanne Starer\n 1First Comics\n https://comicvine.gamespot.com/delete-1/4000-517895/\n 27\n (digital) (Son of Ultron-Empire)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n"; - let metadata = super::metadata_from_buf(contents.to_string()).unwrap(); + let metadata = metadata_from_buf(contents.to_string()).unwrap(); assert_eq!(metadata.series, Some("Delete".to_string())); assert_eq!(metadata.number, Some(1f64)); assert_eq!(metadata.volume, Some(2016)); } + + #[test] + fn test_malformed_media_xml() { + // An empty string + let contents = String::from(""); + let metadata = metadata_from_buf(contents); + assert!(metadata.is_none()); + + // Something JSON-ish instead of xml + let contents = String::from("metadata: { contents: oops }"); + let metadata = metadata_from_buf(contents); + assert!(metadata.is_none()); + } } diff --git a/core/src/filesystem/media/epub.rs b/core/src/filesystem/media/epub.rs index 4d0094676..9e0c80401 100644 --- a/core/src/filesystem/media/epub.rs +++ b/core/src/filesystem/media/epub.rs @@ -97,6 +97,15 @@ impl FileProcessor for EpubProcessor { } } + fn get_page_count(path: &str, _: &StumpConfig) -> Result { + // TODO At present, this likely does not return the correct count of + // pages. It should be updated when a better method is determined. + let epub_file = Self::open(path)?; + let pages = epub_file.get_num_pages() as i32; + + Ok(pages) + } + fn get_page_content_types( path: &str, pages: Vec, @@ -108,8 +117,8 @@ impl FileProcessor for EpubProcessor { for chapter in pages { if chapter == 1 { // Assume this is the cover page - // FIXME: This is wrong. I just don't want to deal with it right now... - content_types.insert(chapter, ContentType::JPEG); + let (content_type, _) = Self::get_cover_internal(&mut epub_file)?; + content_types.insert(chapter, content_type); continue; } @@ -144,20 +153,9 @@ impl EpubProcessor { EpubDoc::new(path).map_err(|e| FileError::EpubOpenError(e.to_string())) } - /// Returns the cover image for the epub file. If a cover image cannot be extracted via the - /// metadata, it will go through two rounds of fallback methods: - /// - /// 1. Attempt to find a resource with the default ID of "cover" - /// 2. Attempt to find a resource with a mime type of "image/jpeg" or "image/png", and weight the - /// results based on how likely they are to be the cover. For example, if the cover is named - /// "cover.jpg", it's probably the cover. The entry with the heighest weight, if any, will be - /// returned. - pub fn get_cover(path: &str) -> Result<(ContentType, Vec), FileError> { - let mut epub_file = EpubDoc::new(path).map_err(|e| { - tracing::error!("Failed to open epub file: {}", e); - FileError::EpubOpenError(e.to_string()) - })?; - + fn get_cover_internal( + epub_file: &mut EpubDoc>, + ) -> Result<(ContentType, Vec), FileError> { let cover_id = epub_file.get_cover_id().unwrap_or_else(|| { tracing::debug!("Epub file does not contain cover metadata"); DEFAULT_EPUB_COVER_ID.to_string() @@ -205,6 +203,23 @@ impl EpubProcessor { )) } + /// Returns the cover image for the epub file. If a cover image cannot be extracted via the + /// metadata, it will go through two rounds of fallback methods: + /// + /// 1. Attempt to find a resource with the default ID of "cover" + /// 2. Attempt to find a resource with a mime type of "image/jpeg" or "image/png", and weight the + /// results based on how likely they are to be the cover. For example, if the cover is named + /// "cover.jpg", it's probably the cover. The entry with the heighest weight, if any, will be + /// returned. + pub fn get_cover(path: &str) -> Result<(ContentType, Vec), FileError> { + let mut epub_file = EpubDoc::new(path).map_err(|e| { + tracing::error!("Failed to open epub file: {}", e); + FileError::EpubOpenError(e.to_string()) + })?; + + EpubProcessor::get_cover_internal(&mut epub_file) + } + pub fn get_chapter( path: &str, chapter: usize, @@ -357,3 +372,49 @@ pub(crate) fn normalize_resource_path(path: PathBuf, root: &str) -> PathBuf { PathBuf::from(adjusted_str) } + +#[cfg(test)] +mod tests { + use super::*; + use crate::filesystem::media::tests::get_test_epub_path; + + #[test] + fn test_process() { + let path = get_test_epub_path(); + let config = StumpConfig::debug(); + + let processed_file = EpubProcessor::process( + &path, + FileProcessorOptions { + convert_rar_to_zip: false, + delete_conversion_source: false, + }, + &config, + ); + assert!(processed_file.is_ok()); + } + + #[test] + fn test_get_page_content_types() { + let path = get_test_epub_path(); + + let cover = EpubProcessor::get_page_content_types(&path, vec![1]); + assert!(cover.is_ok()); + } + + #[test] + fn test_get_cover() { + let path = get_test_epub_path(); + + let cover = EpubProcessor::get_cover(&path); + assert!(cover.is_ok()); + } + + #[test] + fn test_get_chapter() { + let path = get_test_epub_path(); + + let chapter = EpubProcessor::get_chapter(&path, 1); + assert!(chapter.is_ok()); + } +} diff --git a/core/src/filesystem/media/mod.rs b/core/src/filesystem/media/mod.rs index d5f83fa66..592dd9263 100644 --- a/core/src/filesystem/media/mod.rs +++ b/core/src/filesystem/media/mod.rs @@ -1,3 +1,4 @@ +pub mod analyze_media_job; mod builder; mod common; pub(crate) mod epub; @@ -13,3 +14,58 @@ pub use process::{ get_content_types_for_pages, get_page, process, FileProcessor, FileProcessorOptions, ProcessedFile, SeriesJson, }; + +#[cfg(test)] +mod tests { + use std::{fs, path::PathBuf}; + + pub fn get_test_zip_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/book.zip") + .to_string_lossy() + .to_string() + } + + pub fn get_test_rar_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/book.rar") + .to_string_lossy() + .to_string() + } + + pub fn get_test_rar_file_data() -> Vec { + let test_rar_path = get_test_rar_path(); + + fs::read(test_rar_path).expect("Failed to fetch test rar file") + } + + pub fn get_test_epub_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/book.epub") + .to_string_lossy() + .to_string() + } + + pub fn get_test_pdf_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/rust_book.pdf") + .to_string_lossy() + .to_string() + } + + pub fn get_test_cbz_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/science_comics_001.cbz") + .to_string_lossy() + .to_string() + } + + // Note: each page should be 96623 bytes. The macOS metadata files should be 220 bytes, but + // ignored by the processor. Commenting the sizes for posterity. + pub fn get_nested_macos_compressed_cbz_path() -> String { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("integration-tests/data/nested-macos-compressed.cbz") + .to_string_lossy() + .to_string() + } +} diff --git a/core/src/filesystem/media/pdf.rs b/core/src/filesystem/media/pdf.rs index e595f0892..f49d6f60f 100644 --- a/core/src/filesystem/media/pdf.rs +++ b/core/src/filesystem/media/pdf.rs @@ -118,6 +118,13 @@ impl FileProcessor for PdfProcessor { } } + fn get_page_count(path: &str, config: &StumpConfig) -> Result { + let pdfium = PdfProcessor::renderer(&config.pdfium_path)?; + let document = pdfium.load_pdf_from_file(path, None)?; + + Ok(document.pages().len() as i32) + } + fn get_page_content_types( _: &str, pages: Vec, @@ -251,3 +258,33 @@ impl FileConverter for PdfProcessor { Ok(zip_path) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::filesystem::media::tests::get_test_pdf_path; + + #[test] + fn test_process() { + let path = get_test_pdf_path(); + let config = StumpConfig::debug(); + + let processed_file = PdfProcessor::process( + &path, + FileProcessorOptions { + convert_rar_to_zip: false, + delete_conversion_source: false, + }, + &config, + ); + assert!(processed_file.is_ok()); + } + + #[test] + fn test_get_page_content_types() { + let path = get_test_pdf_path(); + + let content_types = PdfProcessor::get_page_content_types(&path, vec![1]); + assert!(content_types.is_ok()); + } +} diff --git a/core/src/filesystem/media/process.rs b/core/src/filesystem/media/process.rs index 9b59b0db2..dd6d51ce7 100644 --- a/core/src/filesystem/media/process.rs +++ b/core/src/filesystem/media/process.rs @@ -69,6 +69,9 @@ pub trait FileProcessor { config: &StumpConfig, ) -> Result<(ContentType, Vec), FileError>; + /// Get the number of pages in the file. + fn get_page_count(path: &str, config: &StumpConfig) -> Result; + /// Get the content types of a list of pages of the file. This should determine content /// types by actually testing the bytes for each page. fn get_page_content_types( @@ -160,6 +163,22 @@ pub fn get_page( } } +pub fn get_page_count(path: &str, config: &StumpConfig) -> Result { + let mime = ContentType::from_file(path).mime_type(); + + match mime.as_str() { + "application/zip" | "application/vnd.comicbook+zip" => { + ZipProcessor::get_page_count(path, config) + }, + "application/vnd.rar" | "application/vnd.comicbook-rar" => { + RarProcessor::get_page_count(path, config) + }, + "application/epub+zip" => EpubProcessor::get_page_count(path, config), + "application/pdf" => PdfProcessor::get_page_count(path, config), + _ => Err(FileError::UnsupportedFileType(path.to_string())), + } +} + pub fn get_content_types_for_pages( path: &str, pages: Vec, diff --git a/core/src/filesystem/media/rar.rs b/core/src/filesystem/media/rar.rs index 484959306..86b752dda 100644 --- a/core/src/filesystem/media/rar.rs +++ b/core/src/filesystem/media/rar.rs @@ -124,13 +124,21 @@ impl FileProcessor for RarProcessor { while let Ok(Some(header)) = archive.read_header() { let entry = header.entry(); + + if entry.filename.is_hidden_file() { + archive = header.skip()?; + continue; + } + if entry.filename.as_os_str() == "ComicInfo.xml" { let (data, rest) = header.read()?; metadata_buf = Some(data); archive = rest; } else { - // TODO: check for valid page type before incrementing - pages += 1; + // If the entry is not an image then it cannot be a valid page + if entry.filename.is_img() { + pages += 1; + } archive = header.skip()?; } } @@ -157,34 +165,23 @@ impl FileProcessor for RarProcessor { ) -> Result<(ContentType, Vec), FileError> { let archive = RarProcessor::open_for_listing(file)?; - let mut valid_entries = archive + let sorted_entries = archive .into_iter() .filter_map(|entry| entry.ok()) - .filter(|entry| { - if entry.is_file() { - let filename = - entry.filename.as_path().to_string_lossy().to_lowercase(); - filename.ends_with(".jpg") - || filename.ends_with(".jpeg") - || filename.ends_with(".png") - } else { - false - } - }) + .filter(|entry| entry.filename.is_img() && !entry.filename.is_hidden_file()) + .sorted_by(|a, b| alphanumeric_sort::compare_path(&a.filename, &b.filename)) .collect::>(); - valid_entries - .sort_by(|a, b| alphanumeric_sort::compare_path(&a.filename, &b.filename)); - let target_entry = valid_entries + let target_entry = sorted_entries .into_iter() .nth((page - 1) as usize) .ok_or(FileError::RarReadError)?; + let FileParts { extension, .. } = target_entry.filename.as_path().file_parts(); let mut bytes = None; let mut archive = RarProcessor::open_for_processing(file)?; while let Ok(Some(header)) = archive.read_header() { - let is_target = - header.entry().filename.as_os_str() == target_entry.filename.as_os_str(); + let is_target = header.entry().filename == target_entry.filename; if is_target { let (data, _) = header.read()?; bytes = Some(data); @@ -194,18 +191,32 @@ impl FileProcessor for RarProcessor { } } - let content_type = if let Some(bytes) = &bytes { - if bytes.len() < 5 { - return Err(FileError::NoImageError); - } - let mut magic_header = [0; 5]; - magic_header.copy_from_slice(&bytes[0..5]); - ContentType::from_bytes(&magic_header) - } else { - ContentType::UNKNOWN + let Some(bytes) = bytes else { + return Err(FileError::NoImageError); }; - Ok((content_type, bytes.ok_or(FileError::NoImageError)?)) + if bytes.len() < 5 { + debug!(path = ?file, ?bytes, "File is too small to determine content type"); + return Err(FileError::NoImageError); + } + let mut magic_header = [0; 5]; + magic_header.copy_from_slice(&bytes[0..5]); + let content_type = + ContentType::from_bytes_with_fallback(&magic_header, &extension); + + Ok((content_type, bytes)) + } + + fn get_page_count(path: &str, _: &StumpConfig) -> Result { + let archive = RarProcessor::open_for_listing(path)?; + + let page_count = archive + .into_iter() + .filter_map(|entry| entry.ok()) + .filter(|entry| entry.filename.is_img() && !entry.filename.is_hidden_file()) + .count(); + + Ok(page_count as i32) } fn get_page_content_types( @@ -214,50 +225,36 @@ impl FileProcessor for RarProcessor { ) -> Result, FileError> { let archive = RarProcessor::open_for_listing(path)?; - let entries = archive + let sorted_entries = archive .into_iter() .filter_map(|entry| entry.ok()) - .filter(|entry| { - if entry.is_file() { - let filename = - entry.filename.as_path().to_string_lossy().to_lowercase(); - filename.ends_with(".jpg") - || filename.ends_with(".jpeg") - || filename.ends_with(".png") - } else { - false - } - }) + .filter(|entry| entry.filename.is_img()) .sorted_by(|a, b| alphanumeric_sort::compare_path(&a.filename, &b.filename)) - .enumerate() - .map(|(idx, header)| (PathBuf::from(header.filename.as_os_str()), idx)) - .collect::>(); + .collect::>(); let mut content_types = HashMap::new(); - let mut archive = RarProcessor::open_for_processing(path)?; - while let Ok(Some(header)) = archive.read_header() { - archive = if let Some(tuple) = - entries.get_key_value(&PathBuf::from(header.entry().filename.as_os_str())) - { - let page = *tuple.1 as i32; - if pages.contains(&page) { - let (data, rest) = header.read()?; - let path = Path::new(tuple.0); - let extension = path - .extension() - .and_then(|s| s.to_str()) - .unwrap_or_default(); - - content_types.insert( - page, - ContentType::from_bytes_with_fallback(&data, extension), - ); - rest - } else { - header.skip()? - } - } else { - header.skip()? + + let mut pages_found = 0; + for entry in sorted_entries { + let path = entry.filename; + + if path.is_hidden_file() { + trace!(path = ?path, "Skipping hidden file"); + continue; + } + + let content_type = path.naive_content_type(); + let is_page_in_target = pages.contains(&(pages_found + 1)); + + if is_page_in_target && content_type.is_image() { + trace!(?path, ?content_type, "found a targeted rar entry"); + content_types.insert(pages_found + 1, content_type); + pages_found += 1; + } + + // If we've found all the pages we need, we can stop + if pages_found == pages.len() as i32 { + break; } } @@ -286,7 +283,7 @@ impl FileConverter for RarProcessor { let cache_dir = config.get_cache_dir(); let unpacked_path = cache_dir.join(file_stem); - trace!(?unpacked_path, "Extracting RAR to cache"); + trace!(?unpacked_path, "Extracting RAR to disk"); let mut archive = RarProcessor::open_for_processing(path)?; while let Ok(Some(header)) = archive.read_header() { @@ -303,17 +300,83 @@ impl FileConverter for RarProcessor { // TODO: won't work in docker if delete_source { if let Err(err) = trash::delete(path) { - warn!(error = ?err, path,"Failed to delete converted RAR file"); + warn!(error = ?err, path, "Failed to delete converted RAR file"); } } // TODO: maybe check that this path isn't in a pre-defined list of important paths? if let Err(err) = std::fs::remove_dir_all(&unpacked_path) { error!( - error = ?err, ?cache_dir, ?unpacked_path, "Failed to delete unpacked RAR contents in cache", + error = ?err, ?cache_dir, ?unpacked_path, "Failed to delete unpacked RAR contents after conversion", ); } Ok(zip_path) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::filesystem::media::tests::{get_test_rar_file_data, get_test_rar_path}; + + use std::fs; + + #[test] + fn test_process() { + // Create temporary directory and place a copy of our mock book.rar in it + let tempdir = tempfile::tempdir().expect("Failed to create temporary directory"); + let temp_rar_file_path = tempdir + .path() + .join("book.rar") + .to_string_lossy() + .to_string(); + fs::write(&temp_rar_file_path, get_test_rar_file_data()) + .expect("Failed to write temporary book.rar"); + let config = StumpConfig::debug(); + + // We can test deletion since it's a temporary file + let processed_file = RarProcessor::process( + &temp_rar_file_path, + FileProcessorOptions { + convert_rar_to_zip: true, + delete_conversion_source: true, + }, + &config, + ); + + // Assert that the operation succeeded + assert!(processed_file.is_ok()); + // And that the original file was deleted + assert!(!Path::new(&temp_rar_file_path).exists()) + } + + #[test] + fn test_rar_to_zip() { + // Create temporary directory and place a copy of our mock book.rar in it + let tempdir = tempfile::tempdir().expect("Failed to create temporary directory"); + let temp_rar_file_path = tempdir + .path() + .join("book.rar") + .to_string_lossy() + .to_string(); + fs::write(&temp_rar_file_path, get_test_rar_file_data()) + .expect("Failed to write temporary book.rar"); + let config = StumpConfig::debug(); + + // We have a temporary file, so we may as well test deletion also + let zip_result = RarProcessor::to_zip(&temp_rar_file_path, true, None, &config); + // Assert that operation succeeded + assert!(zip_result.is_ok()); + // And that the original file was deleted + assert!(!Path::new(&temp_rar_file_path).exists()) + } + + #[test] + fn test_get_page_content_types() { + let path = get_test_rar_path(); + + let content_types = RarProcessor::get_page_content_types(&path, vec![1]); + assert!(content_types.is_ok()); + } +} diff --git a/core/src/filesystem/media/zip.rs b/core/src/filesystem/media/zip.rs index bda9c24e2..343cb7376 100644 --- a/core/src/filesystem/media/zip.rs +++ b/core/src/filesystem/media/zip.rs @@ -1,11 +1,5 @@ -use std::{ - collections::HashMap, - fs::File, - io::Read, - path::{Path, PathBuf}, -}; +use std::{collections::HashMap, fs::File, io::Read, path::PathBuf}; use tracing::{debug, error, trace}; -use zip::read::ZipFile; use crate::{ config::StumpConfig, @@ -14,6 +8,7 @@ use crate::{ error::FileError, hash, media::common::{metadata_from_buf, sort_file_names}, + FileParts, PathUtils, }, }; @@ -76,11 +71,25 @@ impl FileProcessor for ZipProcessor { for i in 0..archive.len() { let mut file = archive.by_index(i)?; - let (content_type, buf) = get_zip_entry_content_type(&mut file)?; - if file.name() == "ComicInfo.xml" { + + let path_buf = file.enclosed_name().unwrap_or_else(|| { + tracing::warn!("Failed to get enclosed name for zip entry"); + PathBuf::from(file.name()) + }); + let path = path_buf.as_path(); + + if path.is_hidden_file() { + trace!(path = ?path, "Skipping hidden file"); + continue; + } + + let content_type = path.naive_content_type(); + let FileParts { file_name, .. } = path.file_parts(); + + if file_name == "ComicInfo.xml" { trace!("Found ComicInfo.xml"); // we have the first few bytes of the file in buf, so we need to read the rest and make it a string - let mut contents = buf.to_vec(); + let mut contents = Vec::new(); file.read_to_end(&mut contents)?; let contents = String::from_utf8_lossy(&contents).to_string(); trace!(contents_len = contents.len(), "Read ComicInfo.xml"); @@ -119,14 +128,26 @@ impl FileProcessor for ZipProcessor { let mut images_seen = 0; for name in file_names { let mut file = archive.by_name(name)?; - let (content_type, buf) = get_zip_entry_content_type(&mut file)?; + + let path_buf = file.enclosed_name().unwrap_or_else(|| { + tracing::warn!("Failed to get enclosed name for zip entry"); + PathBuf::from(name) + }); + let path = path_buf.as_path(); + + if path.is_hidden_file() { + tracing::trace!(path = ?path_buf, "Skipping hidden file"); + continue; + } + + let content_type = path.naive_content_type(); if images_seen + 1 == page && content_type.is_image() { - trace!(?name, page, ?content_type, "found target zip entry"); - // read_to_end maintains the current cursor, so we want to start - // with what we already read - let mut contents = buf.to_vec(); + trace!(?name, page, ?content_type, "Found targeted zip entry"); + + let mut contents = Vec::new(); file.read_to_end(&mut contents)?; + trace!(contents_len = contents.len(), "Read zip entry"); return Ok((content_type, contents)); } else if content_type.is_image() { @@ -134,11 +155,41 @@ impl FileProcessor for ZipProcessor { } } - error!(page, path, "Failed to find valid image"); + error!(page, path, "Failed to find valid image in zip file"); Err(FileError::NoImageError) } + fn get_page_count(path: &str, _: &StumpConfig) -> Result { + let zip_file = File::open(path)?; + + let mut archive = zip::ZipArchive::new(&zip_file)?; + let file_names_archive = archive.clone(); + + if archive.is_empty() { + error!(path, "Empty zip file"); + return Err(FileError::ArchiveEmptyError); + } + + let mut pages = 0; + let file_names = file_names_archive.file_names().collect::>(); + for name in file_names { + let file = archive.by_name(name)?; + let path_buf = file.enclosed_name().unwrap_or_else(|| { + tracing::warn!("Failed to get enclosed name for zip entry"); + PathBuf::from(name) + }); + let content_type = path_buf.as_path().naive_content_type(); + let is_hidden = path_buf.as_path().is_hidden_file(); + + if content_type.is_image() && !is_hidden { + pages += 1; + } + } + + Ok(pages) + } + fn get_page_content_types( path: &str, pages: Vec, @@ -156,21 +207,31 @@ impl FileProcessor for ZipProcessor { let mut content_types = HashMap::new(); - let mut images_seen = 0; + let mut pages_found = 0; for name in file_names { - let mut file = archive.by_name(name)?; - let (content_type, _) = get_zip_entry_content_type(&mut file)?; - let is_page_in_target = pages.contains(&(images_seen + 1)); + let file = archive.by_name(name)?; + let path_buf = file.enclosed_name().unwrap_or_else(|| { + tracing::warn!("Failed to get enclosed name for zip entry"); + PathBuf::from(name) + }); + let path = path_buf.as_path(); + + if path.is_hidden_file() { + trace!(path = ?path_buf, "Skipping hidden file"); + continue; + } + + let content_type = path.naive_content_type(); + let is_page_in_target = pages.contains(&(pages_found + 1)); if is_page_in_target && content_type.is_image() { trace!(?name, ?content_type, "found a targeted zip entry"); - content_types.insert(images_seen + 1, content_type); - images_seen += 1; - } else if content_type.is_image() { - images_seen += 1; + content_types.insert(pages_found + 1, content_type); + pages_found += 1; } - if images_seen == pages.len() as i32 { + // If we've found all the pages we need, we can stop + if pages_found == pages.len() as i32 { break; } } @@ -179,25 +240,109 @@ impl FileProcessor for ZipProcessor { } } -fn get_zip_entry_content_type( - zipfile: &mut ZipFile, -) -> Result<(ContentType, Vec), FileError> { - let file_size = zipfile.size(); - let file_name = zipfile.name().to_string(); - let buf_size = if file_size < 5 { file_size } else { 5 }; +#[cfg(test)] +mod tests { + use super::*; + use crate::filesystem::media::tests::{ + get_nested_macos_compressed_cbz_path, get_test_cbz_path, get_test_zip_path, + }; + + #[test] + fn test_process() { + let path = get_test_zip_path(); + let config = StumpConfig::debug(); + + let processed_file = ZipProcessor::process( + &path, + FileProcessorOptions { + convert_rar_to_zip: false, + delete_conversion_source: false, + }, + &config, + ); + assert!(processed_file.is_ok()); + } - if buf_size < 5 { - trace!(?buf_size, "Found small zip entry"); + #[test] + fn test_process_cbz() { + let path = get_test_cbz_path(); + let config = StumpConfig::debug(); + + let processed_file = ZipProcessor::process( + &path, + FileProcessorOptions { + convert_rar_to_zip: false, + delete_conversion_source: false, + }, + &config, + ); + assert!(processed_file.is_ok()); } - let extension = Path::new(&file_name) - .extension() - .and_then(|e| e.to_str()) - .unwrap_or_default(); + #[test] + fn test_process_nested_cbz() { + let path = get_nested_macos_compressed_cbz_path(); + let config = StumpConfig::debug(); + + let processed_file = ZipProcessor::process( + &path, + FileProcessorOptions { + convert_rar_to_zip: false, + delete_conversion_source: false, + }, + &config, + ); + assert!(processed_file.is_ok()); + assert_eq!(processed_file.unwrap().pages, 3); + } - let mut buf = vec![0; buf_size as usize]; - zipfile.read_exact(&mut buf)?; - let content_type = ContentType::from_bytes_with_fallback(&buf, extension); + #[test] + fn test_get_page_cbz() { + // Note: This doesn't work with the other test book, because it has no pages. + let path = get_test_cbz_path(); + let config = StumpConfig::debug(); - Ok((content_type, buf)) + let page = ZipProcessor::get_page(&path, 1, &config); + assert!(page.is_ok()); + } + + #[test] + fn test_get_page_nested_cbz() { + let path = get_nested_macos_compressed_cbz_path(); + + let (content_type, buf) = ZipProcessor::get_page(&path, 1, &StumpConfig::debug()) + .expect("Failed to get page"); + assert_eq!(content_type.mime_type(), "image/jpeg"); + // Note: this is known and expected to be 96623 bytes. + assert_eq!(buf.len(), 96623); + } + + #[test] + fn test_get_page_content_types() { + let path = get_test_zip_path(); + + let content_types = ZipProcessor::get_page_content_types(&path, vec![1]); + assert!(content_types.is_ok()); + } + + #[test] + fn test_get_page_content_types_cbz() { + let path = get_test_cbz_path(); + + let content_types = + ZipProcessor::get_page_content_types(&path, vec![1, 2, 3, 4, 5]); + assert!(content_types.is_ok()); + } + + #[test] + fn test_get_page_content_types_nested_cbz() { + let path = get_nested_macos_compressed_cbz_path(); + + let content_types = ZipProcessor::get_page_content_types(&path, vec![1, 2, 3]) + .expect("Failed to get page content types"); + assert_eq!(content_types.len(), 3); + assert!(content_types + .values() + .all(|ct| ct.mime_type() == "image/jpeg")); + } } diff --git a/core/src/filesystem/mod.rs b/core/src/filesystem/mod.rs index 769177c0d..62ecd2799 100644 --- a/core/src/filesystem/mod.rs +++ b/core/src/filesystem/mod.rs @@ -4,7 +4,7 @@ pub mod archive; mod common; mod content_type; mod directory_listing; -mod error; +pub(crate) mod error; mod hash; pub mod image; pub mod media; diff --git a/core/src/filesystem/scanner/library_scan_job.rs b/core/src/filesystem/scanner/library_scan_job.rs index c6f6b5171..a57c50bf4 100644 --- a/core/src/filesystem/scanner/library_scan_job.rs +++ b/core/src/filesystem/scanner/library_scan_job.rs @@ -36,7 +36,7 @@ use super::{ walk_library, walk_series, WalkedLibrary, WalkedSeries, WalkerCtx, }; -/// The taks variants that are used to scan a library +/// The task variants that are used to scan a library #[derive(Serialize, Deserialize)] pub enum LibraryScanTask { Init(InitTaskInput), diff --git a/core/src/job/error.rs b/core/src/job/error.rs index 8a3c09dd0..8fe75b7d2 100644 --- a/core/src/job/error.rs +++ b/core/src/job/error.rs @@ -1,6 +1,6 @@ use tokio::sync::oneshot; -use crate::CoreError; +use crate::{filesystem::error::FileError, CoreError}; #[derive(Debug, thiserror::Error)] pub enum JobError { @@ -16,6 +16,8 @@ pub enum JobError { TaskFailed(String), #[error("A query error occurred: {0}")] QueryError(#[from] prisma_client_rust::QueryError), + #[error("A file error occurred: {0}")] + FileError(#[from] FileError), #[error("An unknown error occurred: {0}")] Unknown(String), } diff --git a/core/src/lib.rs b/core/src/lib.rs index 91a83e192..9b6e670eb 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -32,8 +32,14 @@ pub use context::Ctx; pub use error::{CoreError, CoreResult}; pub use event::CoreEvent; +pub use email::{ + AttachmentPayload, EmailContentType, EmailerClient, EmailerClientConfig, +}; + /// A type alias strictly for explicitness in the return type of `init_journal_mode`. type JournalModeChanged = bool; +/// A type alias strictly for explicitness in the return type of `init_encryption`. +type EncryptionKeySet = bool; /// The [StumpCore] struct is the main entry point for any server-side Stump /// applications. It is responsible for managing incoming tasks ([InternalCoreTask]), @@ -138,6 +144,40 @@ impl StumpCore { Ok(()) } + // TODO: This is insecure for obvious reasons, and should be removed in the future. This was added + // to reduce friction of setting up the server for folks who might not understand encryption keys. + /// Initializes the encryption key for the database. This will only set the encryption key + /// if one does not already exist. + pub async fn init_encryption(&self) -> Result { + let client = self.ctx.db.clone(); + + let encryption_key_set = client + .server_config() + .find_first(vec![server_config::encryption_key::not(None)]) + .exec() + .await? + .is_some(); + + if encryption_key_set { + Ok(false) + } else { + let encryption_key = utils::create_encryption_key()?; + let affected_rows = client + .server_config() + .update_many( + vec![], + vec![server_config::encryption_key::set(Some(encryption_key))], + ) + .exec() + .await?; + tracing::trace!(affected_rows, "Updated encryption key"); + if affected_rows > 1 { + tracing::warn!("More than one encryption key was updated? This is definitely not expected"); + } + Ok(affected_rows > 0) + } + } + /// Initializes the journal mode for the database. This will only set the journal mode to WAL /// provided a few conditions are met: /// @@ -196,6 +236,7 @@ impl StumpCore { mod tests { use std::{fs::File, io::Write, path::PathBuf}; + use email::EmailerClientConfig; use specta::{ ts::{export, BigIntExportBehavior, ExportConfiguration, TsExportError}, NamedType, @@ -249,7 +290,7 @@ mod tests { file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; // file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; - // TODO: Fix this... Must move all job defs to the core... + // TODO: Fix this... Must move all job defs to the core... Otherwise, the `unknown` type swallows the others in the union file.write_all( "export type CoreJobOutput = LibraryScanOutput | SeriesScanOutput | ThumbnailGenerationOutput\n\n".to_string() .as_bytes(), @@ -286,6 +327,19 @@ mod tests { file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; @@ -301,6 +355,13 @@ mod tests { file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all( + format!("{}\n\n", ts_export::()?).as_bytes(), + )?; + file.write_all(format!("{}\n\n", ts_export::>()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::>()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::>()?).as_bytes())?; @@ -325,12 +386,6 @@ mod tests { file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; - file.write_all( - format!("{}\n\n", ts_export::()?).as_bytes(), - )?; - file.write_all( - format!("{}\n\n", ts_export::()?).as_bytes(), - )?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; file.write_all(format!("{}\n\n", ts_export::()?).as_bytes())?; diff --git a/core/src/opds/mod.rs b/core/src/opds/mod.rs index cb5576bde..e2822e000 100644 --- a/core/src/opds/mod.rs +++ b/core/src/opds/mod.rs @@ -1,41 +1 @@ -//! Defines an interface for writing OPDS-complaint XML, based on the specification defined at -//! https://specs.opds.io/opds-1.2 - -pub mod author; -pub mod entry; -pub mod feed; -pub mod link; -pub mod opensearch; -pub mod util; - -#[cfg(test)] -mod tests { - use regex::Regex; - - /// A utility function for stripping whitespace from XML strings to - /// make writing expected test results more ergonomic. - pub fn normalize_xml(xml: &str) -> String { - // Match whitespace between XML tags and replace it with "><" - let re = Regex::new(r">\s+<").unwrap(); - let compacted = re.replace_all(xml, "><").to_string(); - - // Do interior normalization and return the result - interior_normalize_xml(compacted.trim()) - } - - /// Normalizes XML by removing newlines and tabs from within tags. - fn interior_normalize_xml(xml: &str) -> String { - let re_tags = Regex::new(r"<[^>]+>").unwrap(); // Matches the entire tag - - // First we replace all \n and \t characters with spaces - let normalized_xml = re_tags - .replace_all(xml, |caps: ®ex::Captures| { - caps[0].replace(['\n', '\t'], " ") - }) - .to_string(); - let partially_cleaned = normalized_xml.trim(); - // Then we make sure there aren't any more double spaces - let re = Regex::new(r"\s{2,}").unwrap(); // Matches sequences of two or more whitespace characters - re.replace_all(partially_cleaned, " ").to_string() - } -} +pub mod v1_2; diff --git a/core/src/opds/author.rs b/core/src/opds/v1_2/author.rs similarity index 96% rename from core/src/opds/author.rs rename to core/src/opds/v1_2/author.rs index fa3208d13..46d3a1dba 100644 --- a/core/src/opds/author.rs +++ b/core/src/opds/v1_2/author.rs @@ -32,7 +32,7 @@ impl StumpAuthor { /// /// ## Example /// ```rust - /// use stump_core::opds::author::StumpAuthor; + /// use stump_core::opds::v1_2::author::StumpAuthor; /// use xml::EventWriter; /// /// let author = StumpAuthor::new("Aaron Leopold".to_string(), None); @@ -61,7 +61,7 @@ impl StumpAuthor { #[cfg(test)] mod tests { use super::*; - use crate::opds::tests::normalize_xml; + use crate::opds::v1_2::tests::normalize_xml; #[test] fn test_author_with_only_name() { diff --git a/core/src/opds/entry.rs b/core/src/opds/v1_2/entry.rs similarity index 99% rename from core/src/opds/entry.rs rename to core/src/opds/v1_2/entry.rs index 9a4ae80f3..79b34ae1e 100644 --- a/core/src/opds/entry.rs +++ b/core/src/opds/v1_2/entry.rs @@ -15,7 +15,7 @@ use crate::error::CoreResult; use crate::filesystem::media::get_content_types_for_pages; use crate::filesystem::{ContentType, FileParts, PathUtils}; use crate::{ - opds::link::OpdsStreamLink, + opds::v1_2::link::OpdsStreamLink, prisma::{library, media, series}, }; @@ -290,7 +290,7 @@ mod tests { use std::str::FromStr; use super::*; - use crate::opds::tests::normalize_xml; + use crate::opds::v1_2::tests::normalize_xml; #[test] fn test_opds_entry() { diff --git a/core/src/opds/feed.rs b/core/src/opds/v1_2/feed.rs similarity index 99% rename from core/src/opds/feed.rs rename to core/src/opds/v1_2/feed.rs index 36596c1cd..911ed169a 100644 --- a/core/src/opds/feed.rs +++ b/core/src/opds/v1_2/feed.rs @@ -3,7 +3,7 @@ use crate::{ error::CoreError, - opds::link::OpdsLink, + opds::v1_2::link::OpdsLink, prisma::{library, series}, }; use prisma_client_rust::chrono::{self, DateTime, Utc}; @@ -286,7 +286,7 @@ mod tests { use std::str::FromStr; use super::*; - use crate::opds::tests::normalize_xml; + use crate::opds::v1_2::tests::normalize_xml; #[test] fn test_opds_feed() { diff --git a/core/src/opds/link.rs b/core/src/opds/v1_2/link.rs similarity index 99% rename from core/src/opds/link.rs rename to core/src/opds/v1_2/link.rs index 7c181d8cd..48bbcd203 100644 --- a/core/src/opds/link.rs +++ b/core/src/opds/v1_2/link.rs @@ -192,7 +192,7 @@ impl OpdsStreamLink { #[cfg(test)] mod tests { use super::*; - use crate::opds::tests::normalize_xml; + use crate::opds::v1_2::tests::normalize_xml; #[test] fn test_opds_link() { diff --git a/core/src/opds/v1_2/mod.rs b/core/src/opds/v1_2/mod.rs new file mode 100644 index 000000000..cb5576bde --- /dev/null +++ b/core/src/opds/v1_2/mod.rs @@ -0,0 +1,41 @@ +//! Defines an interface for writing OPDS-complaint XML, based on the specification defined at +//! https://specs.opds.io/opds-1.2 + +pub mod author; +pub mod entry; +pub mod feed; +pub mod link; +pub mod opensearch; +pub mod util; + +#[cfg(test)] +mod tests { + use regex::Regex; + + /// A utility function for stripping whitespace from XML strings to + /// make writing expected test results more ergonomic. + pub fn normalize_xml(xml: &str) -> String { + // Match whitespace between XML tags and replace it with "><" + let re = Regex::new(r">\s+<").unwrap(); + let compacted = re.replace_all(xml, "><").to_string(); + + // Do interior normalization and return the result + interior_normalize_xml(compacted.trim()) + } + + /// Normalizes XML by removing newlines and tabs from within tags. + fn interior_normalize_xml(xml: &str) -> String { + let re_tags = Regex::new(r"<[^>]+>").unwrap(); // Matches the entire tag + + // First we replace all \n and \t characters with spaces + let normalized_xml = re_tags + .replace_all(xml, |caps: ®ex::Captures| { + caps[0].replace(['\n', '\t'], " ") + }) + .to_string(); + let partially_cleaned = normalized_xml.trim(); + // Then we make sure there aren't any more double spaces + let re = Regex::new(r"\s{2,}").unwrap(); // Matches sequences of two or more whitespace characters + re.replace_all(partially_cleaned, " ").to_string() + } +} diff --git a/core/src/opds/opensearch.rs b/core/src/opds/v1_2/opensearch.rs similarity index 100% rename from core/src/opds/opensearch.rs rename to core/src/opds/v1_2/opensearch.rs diff --git a/core/src/opds/util.rs b/core/src/opds/v1_2/util.rs similarity index 100% rename from core/src/opds/util.rs rename to core/src/opds/v1_2/util.rs diff --git a/core/src/utils.rs b/core/src/utils.rs index b4884f150..6123ac606 100644 --- a/core/src/utils.rs +++ b/core/src/utils.rs @@ -1,3 +1,7 @@ +use simple_crypt::{decrypt, encrypt}; + +use crate::{CoreError, CoreResult, Ctx}; + pub fn chain_optional_iter( required: impl IntoIterator, optional: impl IntoIterator>, @@ -9,3 +13,27 @@ pub fn chain_optional_iter( .flatten() .collect() } + +pub fn create_encryption_key() -> CoreResult { + let random_bytes = rand::random::<[u8; 32]>(); + + Ok(data_encoding::BASE64.encode(&random_bytes)) +} + +pub async fn encrypt_string(str: &str, ctx: &Ctx) -> CoreResult { + let encryption_key = ctx.get_encryption_key().await?; + let encrypted_bytes = encrypt(str.as_bytes(), encryption_key.as_bytes()) + .map_err(|e| CoreError::EncryptionFailed(e.to_string()))?; + Ok(data_encoding::BASE64.encode(&encrypted_bytes)) +} + +pub async fn decrypt_string(encrypted_str: &str, ctx: &Ctx) -> CoreResult { + let encryption_key = ctx.get_encryption_key().await?; + let encrypted_bytes = data_encoding::BASE64 + .decode(encrypted_str.as_bytes()) + .map_err(|e| CoreError::DecryptionFailed(e.to_string()))?; + let decrypted_bytes = decrypt(&encrypted_bytes, encryption_key.as_bytes()) + .map_err(|e| CoreError::DecryptionFailed(e.to_string()))?; + String::from_utf8(decrypted_bytes) + .map_err(|e| CoreError::DecryptionFailed(e.to_string())) +} diff --git a/crates/email/Cargo.toml b/crates/email/Cargo.toml new file mode 100644 index 000000000..102cd14ba --- /dev/null +++ b/crates/email/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "email" +edition = "2021" +version.workspace = true +rust-version.workspace = true + + +[dependencies] +handlebars = "5.1.0" +lettre = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +specta = { workspace = true } +thiserror = { workspace = true } +tracing = { workspace = true } +utoipa = { version = "3.5.0" } diff --git a/crates/email/src/emailer.rs b/crates/email/src/emailer.rs new file mode 100644 index 000000000..1f8e30450 --- /dev/null +++ b/crates/email/src/emailer.rs @@ -0,0 +1,250 @@ +use std::path::PathBuf; + +use lettre::{ + address::AddressError, + message::{ + header::{self, ContentType}, + Attachment, MultiPart, SinglePart, + }, + transport::smtp::authentication::Credentials, + Message, SmtpTransport, Transport, +}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use specta::Type; +use utoipa::ToSchema; + +use crate::{render_template, EmailError, EmailResult, EmailTemplate}; + +/// The configuration for an [EmailerClient] +#[derive(Serialize, Deserialize, ToSchema, Type)] +pub struct EmailerClientConfig { + /// The email address to send from + pub sender_email: String, + /// The display name to use for the sender + pub sender_display_name: String, + /// The username to use for the SMTP server, typically the same as the sender email + pub username: String, + /// The plaintext password to use for the SMTP server, which will be encrypted before being stored + pub password: String, + /// The SMTP host to use + pub host: String, + /// The SMTP port to use + pub port: u16, + /// Whether to use TLS for the SMTP connection + pub tls_enabled: bool, + /// The maximum size of an attachment in bytes + pub max_attachment_size_bytes: Option, + /// The maximum number of attachments that can be sent in a single email + pub max_num_attachments: Option, +} + +/// Information about an attachment to be sent in an email, including the actual content +#[derive(Debug)] +pub struct AttachmentPayload { + /// The name of the attachment + pub name: String, + /// The bytes of the attachment + pub content: Vec, + /// The content type of the attachment, e.g. "text/plain" + pub content_type: ContentType, +} + +/// A client for sending emails +pub struct EmailerClient { + /// The configuration for the email client + config: EmailerClientConfig, + /// The directory where email templates are stored + template_dir: PathBuf, +} + +impl EmailerClient { + /// Create a new [EmailerClient] instance with the given configuration and template directory. + /// + /// # Example + /// ```rust + /// use email::{EmailerClient, EmailerClientConfig}; + /// use std::path::PathBuf; + /// + /// let config = EmailerClientConfig { + /// sender_email: "aaron@stumpapp.dev".to_string(), + /// sender_display_name: "Aaron's Stump Instance".to_string(), + /// username: "aaron@stumpapp.dev".to_string(), + /// password: "decrypted_password".to_string(), + /// host: "smtp.stumpapp.dev".to_string(), + /// port: 587, + /// tls_enabled: true, + /// max_attachment_size_bytes: Some(10_000_000), + /// max_num_attachments: Some(5), + /// }; + /// let template_dir = PathBuf::from("/templates"); + /// let emailer = EmailerClient::new(config, template_dir); + /// ``` + pub fn new(config: EmailerClientConfig, template_dir: PathBuf) -> Self { + Self { + config, + template_dir, + } + } + + /// Send an email with the given subject and attachment to the given recipient. + /// Internally, this will just call [EmailerClient::send_attachments] with a single attachment. + /// + /// # Example + /// ```rust + /// use email::{AttachmentPayload, EmailerClient, EmailerClientConfig}; + /// use std::path::PathBuf; + /// use lettre::message::header::ContentType; + /// + /// async fn test() { + /// let config = EmailerClientConfig { + /// sender_email: "aaron@stumpapp.dev".to_string(), + /// sender_display_name: "Aaron's Stump Instance".to_string(), + /// username: "aaron@stumpapp.dev".to_string(), + /// password: "decrypted_password".to_string(), + /// host: "smtp.stumpapp.dev".to_string(), + /// port: 587, + /// tls_enabled: true, + /// max_attachment_size_bytes: Some(10_000_000), + /// max_num_attachments: Some(5), + /// }; + /// let template_dir = PathBuf::from("/templates"); + /// let emailer = EmailerClient::new(config, template_dir); + /// + /// let result = emailer.send_attachment( + /// "Attachment Test", + /// "aaron@stumpapp.dev", + /// AttachmentPayload { + /// name: "test.txt".to_string(), + /// content: b"Hello, world!".to_vec(), + /// content_type: "text/plain".parse().unwrap(), + /// }, + /// ).await; + /// assert!(result.is_err()); // This will fail because the SMTP server is not real + /// } + /// ``` + pub async fn send_attachment( + &self, + subject: &str, + recipient: &str, + payload: AttachmentPayload, + ) -> EmailResult<()> { + self.send_attachments(subject, recipient, vec![payload]) + .await + } + + /// Send an email with the given subject and attachments to the given recipient. + /// The attachments are sent as a multipart email, with the first attachment being the email body. + /// + /// # Example + /// ```rust + /// use email::{AttachmentPayload, EmailerClient, EmailerClientConfig}; + /// use std::path::PathBuf; + /// use lettre::message::header::ContentType; + /// + /// async fn test() { + /// let config = EmailerClientConfig { + /// sender_email: "aaron@stumpapp.dev".to_string(), + /// sender_display_name: "Aaron's Stump Instance".to_string(), + /// username: "aaron@stumpapp.dev".to_string(), + /// password: "decrypted_password".to_string(), + /// host: "smtp.stumpapp.dev".to_string(), + /// port: 587, + /// tls_enabled: true, + /// max_attachment_size_bytes: Some(10_000_000), + /// max_num_attachments: Some(5), + /// }; + /// let template_dir = PathBuf::from("/templates"); + /// let emailer = EmailerClient::new(config, template_dir); + /// + /// let result = emailer.send_attachments( + /// "Attachment Test", + /// "aaron@stumpapp.dev", + /// vec![ + /// AttachmentPayload { + /// name: "test.txt".to_string(), + /// content: b"Hello, world!".to_vec(), + /// content_type: "text/plain".parse().unwrap(), + /// }, + /// AttachmentPayload { + /// name: "test2.txt".to_string(), + /// content: b"Hello, world again!".to_vec(), + /// content_type: "text/plain".parse().unwrap(), + /// }, + /// ], + /// ).await; + /// assert!(result.is_err()); // This will fail because the SMTP server is not real + /// } + /// ``` + pub async fn send_attachments( + &self, + subject: &str, + recipient: &str, + payloads: Vec, + ) -> EmailResult<()> { + let from = self + .config + .sender_email + .parse() + .map_err(|e: AddressError| EmailError::InvalidEmail(e.to_string()))?; + + let to = recipient + .parse() + .map_err(|e: AddressError| EmailError::InvalidEmail(e.to_string()))?; + + let html = render_template( + EmailTemplate::Attachment, + &json!({ + "title": "Stump Attachment", + }), + self.template_dir.clone(), + )?; + + let mut multipart_builder = MultiPart::mixed().singlepart( + SinglePart::builder() + .header(header::ContentType::TEXT_HTML) + .body(html), + ); + + for payload in payloads { + let attachment = + Attachment::new(payload.name).body(payload.content, payload.content_type); + multipart_builder = multipart_builder.singlepart(attachment); + } + + let email = Message::builder() + .from(from) + .to(to) + .subject(subject) + .multipart(multipart_builder)?; + + let creds = + Credentials::new(self.config.username.clone(), self.config.password.clone()); + + // Note this issue: https://github.com/lettre/lettre/issues/359 + let transport = if self.config.tls_enabled { + SmtpTransport::starttls_relay(&self.config.host) + .unwrap() + .credentials(creds) + .build() + } else { + SmtpTransport::relay(&self.config.host)? + .port(self.config.port) + .credentials(creds) + .build() + }; + + match transport.send(&email) { + Ok(res) => { + tracing::trace!(?res, "Email with attachments was sent"); + Ok(()) + }, + Err(e) => { + tracing::error!(error = ?e, "Failed to send email with attachments"); + Err(e.into()) + }, + } + } +} + +// TODO: write meaningful tests diff --git a/crates/email/src/error.rs b/crates/email/src/error.rs new file mode 100644 index 000000000..6c92865d4 --- /dev/null +++ b/crates/email/src/error.rs @@ -0,0 +1,21 @@ +use lettre::transport::smtp; + +pub type EmailResult = Result; + +/// An error type that represents what can go wrong when sending an email +/// using the `email` crate. +#[derive(Debug, thiserror::Error)] +pub enum EmailError { + #[error("Invalid email: {0}")] + InvalidEmail(String), + #[error("Failed to build email: {0}")] + EmailBuildFailed(#[from] lettre::error::Error), + #[error("Failed to send email: {0}")] + SendFailed(#[from] smtp::Error), + #[error("Failed to register template: {0}")] + TemplateRegistrationFailed(#[from] handlebars::TemplateError), + #[error("Template not found")] + TempalateNotFound, + #[error("Failed to render template: {0}")] + TemplateRenderFailed(#[from] handlebars::RenderError), +} diff --git a/crates/email/src/lib.rs b/crates/email/src/lib.rs new file mode 100644 index 000000000..bdfa38eef --- /dev/null +++ b/crates/email/src/lib.rs @@ -0,0 +1,15 @@ +//! Email module for sending emails using SMTP. This module uses the `lettre` crate to send emails, +//! and the `handlebars` crate to render email templates. + +/// A module containing the emailer client and its configuration, as well as the sending of emails +mod emailer; +/// A module containing the error type for this crate +mod error; +/// A module containing the template rendering functionality, via the `handlebars` crate +mod template; + +pub use emailer::{AttachmentPayload, EmailerClient, EmailerClientConfig}; +pub use error::{EmailError, EmailResult}; +pub use template::{render_template, EmailTemplate}; + +pub use lettre::message::header::ContentType as EmailContentType; diff --git a/crates/email/src/template.rs b/crates/email/src/template.rs new file mode 100644 index 000000000..a9d0c77e2 --- /dev/null +++ b/crates/email/src/template.rs @@ -0,0 +1,74 @@ +use std::path::PathBuf; + +use crate::EmailResult; +use handlebars::Handlebars; + +// TODO: expose this enumeration to the public API somehow, so that users can define their own template overrides + +pub enum EmailTemplate { + /// A template for an email which includes attachment(s), e.g. a book on the server + Attachment, +} + +impl AsRef for EmailTemplate { + fn as_ref(&self) -> &str { + match self { + Self::Attachment => "attachment", + } + } +} + +/// Render a template to a string using the given data and templates directory. +/// +/// # Example +/// ```rust +/// use email::{render_template, EmailTemplate}; +/// use serde_json::json; +/// use std::path::PathBuf; +/// +/// let data = json!({ +/// "title": "Stump Attachment", +/// }); +/// +/// let rendered = render_template(EmailTemplate::Attachment, &data, PathBuf::from("templates")).unwrap(); +/// assert!(rendered.contains("Stump Attachment")); +/// ``` +pub fn render_template( + template: EmailTemplate, + data: &serde_json::Value, + templates_dir: PathBuf, +) -> EmailResult { + let mut handlebars = Handlebars::new(); + handlebars.register_partial("base_partial", "{{> base}}")?; + handlebars.register_template_file("base", templates_dir.join("base.hbs"))?; + handlebars + .register_template_file("attachment", templates_dir.join("attachment.hbs"))?; + + Ok(handlebars.render(template.as_ref(), data)?) +} + +// TODO: Write meaningful tests + +#[cfg(test)] +mod tests { + use super::*; + + fn default_templates_dir() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("templates") + } + + #[test] + fn render_template_attachment() { + let data = serde_json::json!({ + "title": "Stump Attachment", + }); + + let rendered = + render_template(EmailTemplate::Attachment, &data, default_templates_dir()) + .unwrap(); + + dbg!(&rendered); + + assert!(rendered.contains("Stump Attachment")); + } +} diff --git a/crates/email/templates/attachment.hbs b/crates/email/templates/attachment.hbs new file mode 100644 index 000000000..044ba11a1 --- /dev/null +++ b/crates/email/templates/attachment.hbs @@ -0,0 +1,7 @@ +{{#*inline "page"}} +{{!-- TODO: design email --}} +

+ You have a new attachment from Stump! +

+{{/inline}} +{{> base}} \ No newline at end of file diff --git a/crates/email/templates/base.hbs b/crates/email/templates/base.hbs new file mode 100644 index 000000000..396cb1ad4 --- /dev/null +++ b/crates/email/templates/base.hbs @@ -0,0 +1,7 @@ +{{!-- TODO: design base email --}} + + {{title}} + + {{> page}} + + \ No newline at end of file diff --git a/crates/integrations/Cargo.toml b/crates/integrations/Cargo.toml index 492437e7a..d49d0c5b3 100644 --- a/crates/integrations/Cargo.toml +++ b/crates/integrations/Cargo.toml @@ -1,10 +1,11 @@ [package] name = "integrations" -version = "0.0.3" +version = "0.0.4" edition = "2021" [dependencies] async-trait = { workspace = true } +lettre = { workspace = true } reqwest = { workspace = true } serde_json = { workspace = true } thiserror = { workspace = true } diff --git a/package.json b/package.json index 12355f74c..73d1d24b6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@stump/monorepo", - "version": "0.0.3", + "version": "0.0.4", "repository": "https://github.com/stumpapp/stump.git", "author": "Aaron Leopold ", "license": "MIT", diff --git a/packages/api/package.json b/packages/api/package.json index fa8aedfeb..b5641165e 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -1,6 +1,6 @@ { "name": "@stump/api", - "version": "0.0.3", + "version": "0.0.4", "description": "", "main": "src/index.ts", "exports": { diff --git a/packages/api/src/axios.ts b/packages/api/src/axios.ts index dff8696ad..e9e87105f 100644 --- a/packages/api/src/axios.ts +++ b/packages/api/src/axios.ts @@ -25,6 +25,11 @@ export function initializeApi(baseUrl: string, version: string) { API = axios.create({ baseURL: correctedUrl, + // FIXME: react-native seems to ignore this option, causing brackets to be encoded which + // the backend doesn't support + // paramsSerializer: { + // encode: (params) => qs.stringify(params, { arrayFormat: 'repeat' }), + // }, withCredentials: true, }) } diff --git a/packages/api/src/emailer.ts b/packages/api/src/emailer.ts new file mode 100644 index 000000000..b89eea9b7 --- /dev/null +++ b/packages/api/src/emailer.ts @@ -0,0 +1,122 @@ +import { + CreateOrUpdateEmailDevice, + CreateOrUpdateEmailer, + EmailerSendRecord, + PatchEmailDevice, + RegisteredEmailDevice, + SendAttachmentEmailResponse, + SendAttachmentEmailsPayload, + SMTPEmailer, +} from '@stump/types' + +import { API } from './axios' +import { APIResult } from './types' +import { toUrlParams } from './utils' + +function getEmailers(params?: Record): Promise> { + if (params) { + return API.get(`/emailers?${toUrlParams(params)}`) + } else { + return API.get('/emailers') + } +} + +function getEmailerById(id: number): Promise> { + return API.get(`/emailers/${id}`) +} + +function createEmailer(payload: CreateOrUpdateEmailer): Promise> { + return API.post('/emailers', payload) +} + +function updateEmailer( + id: number, + payload: CreateOrUpdateEmailer, +): Promise> { + return API.put(`/emailers/${id}`, payload) +} + +function deleteEmailer(id: number): Promise> { + return API.delete(`/emailers/${id}`) +} + +function getEmailDevices(): Promise> { + return API.get('/email-devices') +} + +function getEmailDeviceById(id: number): Promise> { + return API.get(`/email-devices/${id}`) +} + +function getEmailerSendHistory( + emailerId: number, + params?: Record, +): Promise> { + if (params) { + return API.get(`/emailers/${emailerId}/send-history?${toUrlParams(params)}`) + } else { + return API.get(`/emailers/${emailerId}/send-history`) + } +} + +function createEmailDevice( + payload: CreateOrUpdateEmailDevice, +): Promise> { + return API.post('/email-devices', payload) +} + +function updateEmailDevice( + id: number, + payload: CreateOrUpdateEmailDevice, +): Promise> { + return API.put(`/email-devices/${id}`, payload) +} + +function patchEmailDevice( + id: number, + payload: PatchEmailDevice, +): Promise> { + return API.patch(`/email-devices/${id}`, payload) +} + +function deleteEmailDevice(id: number): Promise> { + return API.delete(`/email-devices/${id}`) +} + +function sendAttachmentEmail( + payload: SendAttachmentEmailsPayload, +): Promise> { + return API.post('/emailers/send-attachment', payload) +} + +export const emailerApi = { + createEmailDevice, + createEmailer, + deleteEmailDevice, + deleteEmailer, + getEmailDeviceById, + getEmailDevices, + getEmailerById, + getEmailerSendHistory, + getEmailers, + patchEmailDevice, + sendAttachmentEmail, + updateEmailDevice, + updateEmailer, +} + +export const emailerQueryKeys: Record = { + createEmailDevice: 'emailDevice.create', + createEmailer: 'emailer.create', + deleteEmailDevice: 'emailDevice.delete', + deleteEmailer: 'emailer.delete', + getEmailDeviceById: 'emailDevice.getById', + getEmailDevices: 'emailDevices.get', + getEmailerById: 'emailer.getById', + getEmailerSendHistory: 'emailer.sendHistory', + getEmailers: 'emailer.get', + patchEmailDevice: 'emailDevice.patch', + sendAttachmentEmail: 'emailer.sendAttachment', + updateEmailDevice: 'emailDevice.update', + updateEmailer: 'emailer.update', +} diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts index d97a9a4bc..f67ae63cf 100644 --- a/packages/api/src/index.ts +++ b/packages/api/src/index.ts @@ -1,6 +1,7 @@ export { authApi, authQueryKeys } from './auth' export { API, apiIsInitialized, checkUrl, initializeApi, isUrl } from './axios' export { bookClubApi, bookClubQueryKeys } from './bookClub' +export { emailerApi, emailerQueryKeys } from './emailer' export { epubApi, epubQueryKeys, getEpubResource, updateEpubProgress } from './epub' export { filesystemApi, filesystemQueryKeys } from './filesystem' export * from './job' diff --git a/packages/api/src/library.ts b/packages/api/src/library.ts index 1c625d238..9cc84e71b 100644 --- a/packages/api/src/library.ts +++ b/packages/api/src/library.ts @@ -117,6 +117,15 @@ export function updateExcludedUsers(id: string, user_ids: string[]) { return API.post(`/libraries/${id}/excluded-users`, { user_ids }) } +/** + * Start the analysis of a library by library id. + * + * @param id The id for the library to analyze + */ +export function startMediaAnalysis(id: string) { + API.post(`/libraries/${id}/analyze`) +} + export const libraryApi = { cleanLibrary, createLibrary, @@ -133,6 +142,7 @@ export const libraryApi = { patchLibraryThumbnail, regenerateThumbnails, scanLibary, + startMediaAnalysis, updateExcludedUsers, uploadLibraryThumbnail, visitLibrary, @@ -154,6 +164,7 @@ export const libraryQueryKeys: Record = { patchLibraryThumbnail: 'library.patchLibraryThumbnail', regenerateThumbnails: 'library.regenerateThumbnails', scanLibary: 'library.scanLibary', + startMediaAnalysis: 'library.startAnalysis', updateExcludedUsers: 'library.updateExcludedUsers', uploadLibraryThumbnail: 'library.uploadLibraryThumbnail', visitLibrary: 'library.visitLibrary', diff --git a/packages/api/src/log.ts b/packages/api/src/log.ts index 1d5c0768a..9b40ecf77 100644 --- a/packages/api/src/log.ts +++ b/packages/api/src/log.ts @@ -13,6 +13,15 @@ export function getLogs(params?: Record): Promise): Promise> { + if (params) { + const searchParams = toUrlParams(params) + return API.delete(`/logs?${searchParams.toString()}`) + } else { + return API.delete('/logs') + } +} + export function getLogFileMeta(): Promise> { return API.get('/logs/file/info') } @@ -23,12 +32,14 @@ export function clearLogFile() { export const logApi = { clearLogFile, + clearPersistedLogs, getLogFileMeta, getLogs, } export const logQueryKeys: Record = { clearLogFile: 'log.clearLogFile', + clearPersistedLogs: 'log.clearPersistedLogs', getLogFileMeta: 'log.getLogFileMeta', getLogs: 'log.getLogs', } diff --git a/packages/api/src/media.ts b/packages/api/src/media.ts index 46f31192f..2f16bd86a 100644 --- a/packages/api/src/media.ts +++ b/packages/api/src/media.ts @@ -98,6 +98,15 @@ export function putMediaCompletion( return API.put(`/media/${id}/progress/complete`, payload) } +/** + * Start the analysis of a book by media id. + * + * @param id The id for the book to analyze + */ +export function startMediaAnalysis(id: string) { + API.post(`/media/${id}/analyze`) +} + export const mediaApi = { getInProgressMedia, getMedia, @@ -110,6 +119,7 @@ export const mediaApi = { getRecentlyAddedMedia, patchMediaThumbnail, putMediaCompletion, + startMediaAnalysis, updateMediaProgress, uploadMediaThumbnail, } @@ -126,6 +136,7 @@ export const mediaQueryKeys: Record = { getRecentlyAddedMedia: 'media.getRecentlyAdded', patchMediaThumbnail: 'media.patchThumbnail', putMediaCompletion: 'media.putCompletion', + startMediaAnalysis: 'media.startAnalysis', updateMediaProgress: 'media.updateProgress', uploadMediaThumbnail: 'media.uploadThumbnail', } diff --git a/packages/api/src/series.ts b/packages/api/src/series.ts index 78c501079..191e31aee 100644 --- a/packages/api/src/series.ts +++ b/packages/api/src/series.ts @@ -81,6 +81,15 @@ export function uploadSeriesThumbnail(id: string, file: File) { }) } +/** + * Start the analysis of a series by series id. + * + * @param id The id for the series to analyze + */ +export function startMediaAnalysis(id: string) { + API.post(`/series/${id}/analyze`) +} + export const seriesApi = { getNextInSeries, getNextMediaInSeries, @@ -91,6 +100,7 @@ export const seriesApi = { getSeriesThumbnail, getSeriesWithCursor, patchSeriesThumbnail, + startMediaAnalysis, uploadSeriesThumbnail, } @@ -104,5 +114,6 @@ export const seriesQueryKeys: Record = { getSeriesThumbnail: 'series.getSeriesThumbnail', getSeriesWithCursor: 'series.getSeriesWithCursor', patchSeriesThumbnail: 'series.patchSeriesThumbnail', + startMediaAnalysis: 'series.Analysis', uploadSeriesThumbnail: 'series.uploadSeriesThumbnail', } diff --git a/packages/api/src/utils.ts b/packages/api/src/utils.ts index c18d0cdab..868964a15 100644 --- a/packages/api/src/utils.ts +++ b/packages/api/src/utils.ts @@ -66,19 +66,21 @@ export const toObjectParams = ( return {} as T } + const newParams = new URLSearchParams(params.toString()) + for (const key of ignoreKeys || []) { - params.delete(key) + newParams.delete(key) } if (removeEmpty) { - for (const [key, value] of params.entries()) { + for (const [key, value] of newParams.entries()) { if (!value) { - params.delete(key) + newParams.delete(key) } } } - return qs.parse(params.toString(), { ignoreQueryPrefix: true }) as T + return qs.parse(newParams.toString(), { ignoreQueryPrefix: true }) as T } export const mergeCursorParams = ({ diff --git a/packages/browser/package.json b/packages/browser/package.json index b09bf6eae..30aa85fd1 100644 --- a/packages/browser/package.json +++ b/packages/browser/package.json @@ -1,10 +1,13 @@ { "name": "@stump/browser", - "version": "0.0.3", + "version": "0.0.4", "description": "", "license": "MIT", "private": true, "main": "src/index.ts", + "scripts": { + "lint": "eslint --ext .ts,.tsx,.cts,.mts,.js,.jsx,.cjs,.mjs --fix --report-unused-disable-directives --no-error-on-unmatched-pattern --exit-on-fatal-error --ignore-path ../../.gitignore ." + }, "exports": { ".": "./src/index.ts", "./assets/*": "./src/assets/*" @@ -44,8 +47,9 @@ "react-hotkeys-hook": "^4.5.0", "react-i18next": "^14.1.0", "react-markdown": "^9.0.1", - "react-router": "^6.22.3", - "react-router-dom": "^6.22.3", + "react-router": "^6.23.0", + "react-router-dom": "^6.23.0", + "react-scrollbar-size": "^5.0.0", "react-swipeable": "^7.0.1", "react-virtualized-auto-sizer": "^1.0.24", "react-virtuoso": "^4.7.8", @@ -72,6 +76,6 @@ "vite": "^5.2.8" }, "peerDependencies": { - "react-router-dom": "^6.22.3" + "react-router-dom": "^6.23.0" } } diff --git a/packages/browser/public/assets/svg/mountain.svg b/packages/browser/public/assets/svg/mountain.svg new file mode 100644 index 000000000..c0d92aa6c --- /dev/null +++ b/packages/browser/public/assets/svg/mountain.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/browser/src/App.tsx b/packages/browser/src/App.tsx index 55f129aa7..703c29bb4 100644 --- a/packages/browser/src/App.tsx +++ b/packages/browser/src/App.tsx @@ -113,7 +113,7 @@ function RouterContainer(props: StumpClientProps) { setDiscordPresence={setDiscordPresence} setUseDiscordPresence={setUseDiscordPresence} > - {IS_DEVELOPMENT && } + {IS_DEVELOPMENT && } Stump diff --git a/packages/browser/src/AppLayout.tsx b/packages/browser/src/AppLayout.tsx index f797ea256..3a247b4c4 100644 --- a/packages/browser/src/AppLayout.tsx +++ b/packages/browser/src/AppLayout.tsx @@ -1,6 +1,6 @@ import { isAxiosError } from '@stump/api' import { useAuthQuery, useCoreEventHandler } from '@stump/client' -import { cx } from '@stump/components' +import { cn, cx } from '@stump/components' import { UserPermission, UserPreferences } from '@stump/types' import { Suspense, useCallback, useMemo } from 'react' import { Navigate, Outlet, useLocation, useNavigate } from 'react-router-dom' @@ -128,8 +128,16 @@ export function AppLayout() { {!hideSidebar && -
-
+
+
{!!storeUser.user_preferences?.show_query_indicator && } }> diff --git a/packages/browser/src/AppRouter.tsx b/packages/browser/src/AppRouter.tsx index 8aec95865..09968a72e 100644 --- a/packages/browser/src/AppRouter.tsx +++ b/packages/browser/src/AppRouter.tsx @@ -4,21 +4,21 @@ import React, { lazy } from 'react' import { Route, Routes } from 'react-router-dom' import { AppLayout } from './AppLayout.tsx' -import BookRouter from './scenes/book/BookRouter.tsx' -import BookClubRouter from './scenes/book-club/BookClubRouter.tsx' -import LibraryRouter from './scenes/library/LibraryRouter.tsx' -import OnBoardingRouter from './scenes/onboarding/OnBoardingRouter.tsx' -import SeriesRouter from './scenes/series/SeriesRouter.tsx' -import SettingsRouter from './scenes/settings/SettingsRouter.tsx' -import { SmartListRouter } from './scenes/smart-list/index.ts' +import { BookRouter } from './scenes/book' +import { BookClubRouter } from './scenes/bookClub' +import { LibraryRouter } from './scenes/library' +import { OnBoardingRouter } from './scenes/onboarding' +import { SeriesRouter } from './scenes/series' +import { SettingsRouter } from './scenes/settings' +import { SmartListRouter } from './scenes/smartList' import { useAppStore, useUserStore } from './stores' -const HomeScene = lazy(() => import('./scenes/home/HomeScene.tsx')) +const HomeScene = lazy(() => import('./scenes/home')) const FourOhFour = lazy(() => import('./scenes/error/FourOhFour.tsx')) const ServerConnectionErrorScene = lazy( () => import('./scenes/error/ServerConnectionErrorScene.tsx'), ) -const LoginOrClaimScene = lazy(() => import('./scenes/auth/LoginOrClaimScene.tsx')) +const LoginOrClaimScene = lazy(() => import('./scenes/auth')) export function AppRouter() { const locale = useUserStore((store) => store.userPreferences?.locale) diff --git a/packages/browser/src/components/GenericEmptyState.tsx b/packages/browser/src/components/GenericEmptyState.tsx index c551956aa..41dcbcd51 100644 --- a/packages/browser/src/components/GenericEmptyState.tsx +++ b/packages/browser/src/components/GenericEmptyState.tsx @@ -7,22 +7,25 @@ type Props = { subtitle?: string containerClassName?: string contentClassName?: string + leftAlign?: boolean } export default function GenericEmptyState({ title, subtitle, containerClassName, contentClassName, + leftAlign, }: Props) { return (
-
+
{title} {subtitle && ( diff --git a/packages/browser/src/components/TableOrGridLayout.tsx b/packages/browser/src/components/TableOrGridLayout.tsx new file mode 100644 index 000000000..e8b0ba4aa --- /dev/null +++ b/packages/browser/src/components/TableOrGridLayout.tsx @@ -0,0 +1,40 @@ +import { IconButton, ToolTip } from '@stump/components' +import { LayoutGrid, Table } from 'lucide-react' +import React from 'react' + +type Props = { + layout: 'GRID' | 'TABLE' + setLayout: (layout: 'GRID' | 'TABLE') => void +} + +export default function TableOrGridLayout({ layout, setLayout }: Props) { + return ( +
+ + setLayout('GRID')} + disabled={layout === 'GRID'} + > + + + + + + setLayout('TABLE')} + disabled={layout === 'TABLE'} + > + + + + + ) +} diff --git a/packages/browser/src/components/media/MediaCard.tsx b/packages/browser/src/components/book/BookCard.tsx similarity index 93% rename from packages/browser/src/components/media/MediaCard.tsx rename to packages/browser/src/components/book/BookCard.tsx index 8263099f7..4bd297839 100644 --- a/packages/browser/src/components/media/MediaCard.tsx +++ b/packages/browser/src/components/book/BookCard.tsx @@ -5,11 +5,11 @@ import { FileStatus, Media } from '@stump/types' import pluralize from 'pluralize' import { useMemo } from 'react' -import paths from '../../paths' -import { formatBytes } from '../../utils/format' -import { prefetchMediaPage } from '../../utils/prefetch' +import paths from '@/paths' +import { formatBytes } from '@/utils/format' +import { prefetchMediaPage } from '@/utils/prefetch' -export type MediaCardProps = { +export type BookCardProps = { media: Media readingLink?: boolean fullWidth?: boolean @@ -19,13 +19,13 @@ export type MediaCardProps = { type EntityCardProps = React.ComponentPropsWithoutRef -export default function MediaCard({ +export default function BookCard({ media, readingLink, fullWidth, variant = 'default', onSelect, -}: MediaCardProps) { +}: BookCardProps) { const isCoverOnly = variant === 'cover' const handleHover = () => { diff --git a/packages/browser/src/scenes/series/MediaGrid.tsx b/packages/browser/src/components/book/BookGrid.tsx similarity index 61% rename from packages/browser/src/scenes/series/MediaGrid.tsx rename to packages/browser/src/components/book/BookGrid.tsx index f9b632a52..c0b345e0a 100644 --- a/packages/browser/src/scenes/series/MediaGrid.tsx +++ b/packages/browser/src/components/book/BookGrid.tsx @@ -1,22 +1,23 @@ -import { Card, CardGrid } from '@stump/components' +import { CardGrid } from '@stump/components' import type { Media } from '@stump/types' import GenericEmptyState from '@/components/GenericEmptyState' -import MediaCard from '@/components/media/MediaCard' + +import BookCard from './BookCard' type Props = { isLoading: boolean - media?: Media[] + books?: Media[] hasFilters?: boolean onSelect?: (media: Media) => void } - -export default function MediaGrid({ media, isLoading, hasFilters, onSelect }: Props) { +// TODO: translate +export default function BookGrid({ books, isLoading, hasFilters, onSelect }: Props) { if (isLoading) { return null - } else if (!media || !media.length) { + } else if (!books || !books.length) { return ( - +
- +
) } return ( - {media.map((m) => ( - + {books.map((m) => ( + ))} ) diff --git a/packages/browser/src/components/media/BookSearch.tsx b/packages/browser/src/components/book/BookSearch.tsx similarity index 95% rename from packages/browser/src/components/media/BookSearch.tsx rename to packages/browser/src/components/book/BookSearch.tsx index a8922dee7..b06f05718 100644 --- a/packages/browser/src/components/media/BookSearch.tsx +++ b/packages/browser/src/components/book/BookSearch.tsx @@ -5,9 +5,9 @@ import React, { useCallback, useEffect, useMemo } from 'react' import useIsInView from '@/hooks/useIsInView' -import MediaGrid from '../../scenes/series/MediaGrid' import { FilterToolBar, useFilterContext } from '../filters' import Pagination from '../Pagination' +import BookGrid from './BookGrid' type Props = { page: number @@ -17,6 +17,8 @@ type Props = { showFilters?: boolean } +// TODO(bookclub): Refactor this component + /** * A component that renders a paginated grid of books with a search bar and (optionally) * a filter slide over. Must be used within a `FilterProvider`. @@ -75,7 +77,7 @@ export default function BookSearch({ page, page_size, setPage, onBookSelect, sho return ( <> -
+
)} - diff --git a/packages/browser/src/components/media/BookSearchOverlay.tsx b/packages/browser/src/components/book/BookSearchOverlay.tsx similarity index 97% rename from packages/browser/src/components/media/BookSearchOverlay.tsx rename to packages/browser/src/components/book/BookSearchOverlay.tsx index 7e41c216c..53408b297 100644 --- a/packages/browser/src/components/media/BookSearchOverlay.tsx +++ b/packages/browser/src/components/book/BookSearchOverlay.tsx @@ -17,6 +17,7 @@ type Props = { sheetProps?: SheetProps } +// TODO(bookclub): Refactor this component export default function BookSearchOverlay({ onBookSelect, sheetProps }: Props) { const [isOpen, setIsOpen] = useState(false) const [page, setPage] = useState(1) diff --git a/packages/browser/src/components/media/MediaList.tsx b/packages/browser/src/components/book/MediaList.tsx similarity index 100% rename from packages/browser/src/components/media/MediaList.tsx rename to packages/browser/src/components/book/MediaList.tsx diff --git a/packages/browser/src/components/book/index.ts b/packages/browser/src/components/book/index.ts new file mode 100644 index 000000000..d21090f82 --- /dev/null +++ b/packages/browser/src/components/book/index.ts @@ -0,0 +1,2 @@ +export { default as BookURLFilterContainer } from '../filters/URLFilterContainer' +export { BookTable } from './table' diff --git a/packages/browser/src/scenes/smart-list/items/table/BookLinksCell.tsx b/packages/browser/src/components/book/table/BookLinksCell.tsx similarity index 100% rename from packages/browser/src/scenes/smart-list/items/table/BookLinksCell.tsx rename to packages/browser/src/components/book/table/BookLinksCell.tsx diff --git a/packages/browser/src/components/book/table/BookTable.tsx b/packages/browser/src/components/book/table/BookTable.tsx new file mode 100644 index 000000000..5875b7e49 --- /dev/null +++ b/packages/browser/src/components/book/table/BookTable.tsx @@ -0,0 +1,49 @@ +import { Media } from '@stump/types' +import { OnChangeFn, SortingState } from '@tanstack/react-table' +import React, { useCallback, useMemo } from 'react' + +import { orderingToTableSort, tableSortToOrdering, useFilterContext } from '@/components/filters' +import { EntityTable, EntityTableProps } from '@/components/table' +import { useBooksLayout } from '@/stores/layout' + +import { buildBookColumns } from '.' +import { defaultColumns } from './columns' + +type Props = Omit, 'columns'> + +export default function BookTable(props: Props) { + const configuration = useBooksLayout((state) => ({ + columns: state.columns, + })) + const { ordering, setOrdering } = useFilterContext() + + const columns = useMemo( + () => + configuration.columns?.length ? buildBookColumns(configuration.columns) : defaultColumns, + [configuration.columns], + ) + + const handleSetSorting: OnChangeFn = useCallback( + (updater) => { + if (typeof updater === 'function') { + setOrdering(tableSortToOrdering(updater(orderingToTableSort(ordering)))) + } else { + setOrdering(tableSortToOrdering(updater)) + } + }, + [ordering, setOrdering], + ) + + const sorting = useMemo(() => orderingToTableSort(ordering), [ordering]) + + return ( + + ) +} diff --git a/packages/browser/src/scenes/smart-list/items/table/CoverImageCell.tsx b/packages/browser/src/components/book/table/CoverImageCell.tsx similarity index 100% rename from packages/browser/src/scenes/smart-list/items/table/CoverImageCell.tsx rename to packages/browser/src/components/book/table/CoverImageCell.tsx diff --git a/packages/browser/src/components/book/table/columns.tsx b/packages/browser/src/components/book/table/columns.tsx new file mode 100644 index 000000000..75b7d5a2a --- /dev/null +++ b/packages/browser/src/components/book/table/columns.tsx @@ -0,0 +1,405 @@ +import { Link, Text } from '@stump/components' +import { Media, ReactTableColumnSort } from '@stump/types' +import { ColumnDef, createColumnHelper } from '@tanstack/react-table' +import dayjs from 'dayjs' + +import paths from '@/paths' + +import BookLinksCell from './BookLinksCell' +import CoverImageCell from './CoverImageCell' + +const columnHelper = createColumnHelper() + +const coverColumn = columnHelper.display({ + cell: ({ + row: { + original: { id, name, metadata }, + }, + }) => , + enableGlobalFilter: true, + header: () => ( + + Cover + + ), + id: 'cover', + size: 60, +}) + +const nameColumn = columnHelper.accessor(({ name, metadata }) => metadata?.title || name, { + cell: ({ + getValue, + row: { + original: { id }, + }, + }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + enableSorting: true, + header: () => ( + + Name + + ), + id: 'name', + minSize: 285, +}) + +const pagesColumn = columnHelper.accessor('pages', { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + enableSorting: true, + header: () => ( + + Pages + + ), + id: 'pages', + size: 60, +}) + +const publishedColumn = columnHelper.accessor( + ({ metadata }) => { + const { year, month, day } = metadata || {} + + // TODO: validation + if (!!year && !!month && !!day) { + return dayjs(`${year}-${month}-${day}`).format('YYYY-MM-DD') + } else if (!!year && !!month) { + return dayjs(`${year}-${month}`).format('YYYY-MM') + } else if (year) { + return dayjs(`${year}`).format('YYYY') + } + + return '' + }, + { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Published + + ), + id: 'published', + }, +) + +const addedColumn = columnHelper.accessor( + ({ created_at }) => dayjs(created_at).format('M/D/YYYY, HH:mm:ss'), + { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + enableSorting: true, + header: () => ( + + Added + + ), + id: 'added', + }, +) + +const publisherColumn = columnHelper.accessor(({ metadata }) => metadata?.publisher, { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Publisher + + ), + id: 'publisher', +}) + +const ageRatingColumn = columnHelper.accessor(({ metadata }) => metadata?.age_rating, { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Age Rating + + ), + id: 'age_rating', +}) + +const genresColumn = columnHelper.accessor(({ metadata }) => metadata?.genre?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Genres + + ), + id: 'genres', +}) + +const volumeColumn = columnHelper.accessor(({ metadata }) => metadata?.volume, { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Volume + + ), + id: 'volume', +}) + +const inkersColumn = columnHelper.accessor(({ metadata }) => metadata?.inkers?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Inkers + + ), + id: 'inkers', +}) + +const writersColumn = columnHelper.accessor(({ metadata }) => metadata?.writers?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Writers + + ), + id: 'writers', +}) + +const pencillersColumn = columnHelper.accessor(({ metadata }) => metadata?.pencillers?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Pencillers + + ), + id: 'pencillers', +}) + +const coloristsColumn = columnHelper.accessor(({ metadata }) => metadata?.colorists?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Colorists + + ), + id: 'colorists', +}) + +const letterersColumn = columnHelper.accessor(({ metadata }) => metadata?.letterers?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Letterers + + ), + id: 'letterers', +}) + +const artistsColumn = columnHelper.accessor(({ metadata }) => metadata?.cover_artists?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Artists + + ), + id: 'artists', +}) + +const charactersColumn = columnHelper.accessor(({ metadata }) => metadata?.characters?.join(', '), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Characters + + ), + id: 'characters', +}) + +const linksColumn = columnHelper.accessor(({ metadata }) => metadata?.links?.join(', '), { + cell: ({ + row: { + original: { metadata }, + }, + }) => , + + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Links + + ), + id: 'links', +}) + +export type MediaTableColumnDef = ColumnDef + +/** + * A map of all columns that can be selected for the table. The key is the column ID, and the value is the column, itself. + */ +export const columnMap = { + added: addedColumn, + age_rating: ageRatingColumn, + artists: artistsColumn, + characters: charactersColumn, + colorists: coloristsColumn, + cover: coverColumn, + genres: genresColumn, + inkers: inkersColumn, + letterers: letterersColumn, + links: linksColumn, + name: nameColumn, + pages: pagesColumn, + pencillers: pencillersColumn, + published: publishedColumn, + publisher: publisherColumn, + volume: volumeColumn, + writers: writersColumn, +} as Record> + +// TODO: localization keys instead of hardcoded strings +export const columnOptionMap: Record = { + added: 'Added', + age_rating: 'Age Rating', + artists: 'Artists', + characters: 'Characters', + colorists: 'Colorists', + cover: 'Cover', + genres: 'Genres', + inkers: 'Inkers', + letterers: 'Letterers', + links: 'Links', + name: 'Name', + pages: 'Pages', + pencillers: 'Pencillers', + published: 'Published', + publisher: 'Publisher', + volume: 'Volume', + writers: 'Writers', +} + +export const defaultColumns = [ + coverColumn, + nameColumn, + pagesColumn, + publishedColumn, + addedColumn, +] as ColumnDef[] + +export const defaultColumnSort: ReactTableColumnSort[] = defaultColumns.map((column, idx) => ({ + id: column.id || '', + position: idx, +})) + +/** + * A helper function to build the columns for the table based on the stored column selection. If + * no columns are selected, or if the selection is empty, the default columns will be used. + */ +export const buildColumns = (columns?: ReactTableColumnSort[]) => { + if (!columns || columns.length === 0) { + return defaultColumns + } + + const sortedColumns = columns.sort((a, b) => a.position - b.position) + const selectedColumnIds = sortedColumns.map(({ id }) => id) + + return selectedColumnIds + .map((id) => columnMap[id as keyof typeof columnMap]) + .filter(Boolean) as ColumnDef[] +} diff --git a/packages/browser/src/components/book/table/index.ts b/packages/browser/src/components/book/table/index.ts new file mode 100644 index 000000000..9c1a9f28f --- /dev/null +++ b/packages/browser/src/components/book/table/index.ts @@ -0,0 +1,7 @@ +export { default as BookTable } from './BookTable' +export { + columnOptionMap as bookTableColumnMap, + buildColumns as buildBookColumns, + defaultColumns as defaultBookColumns, + defaultColumnSort as defaultBookColumnSort, +} from './columns' diff --git a/packages/browser/src/components/container/SceneContainer.tsx b/packages/browser/src/components/container/SceneContainer.tsx index e60ffe9d5..7d031f988 100644 --- a/packages/browser/src/components/container/SceneContainer.tsx +++ b/packages/browser/src/components/container/SceneContainer.tsx @@ -28,7 +28,7 @@ const SceneContainer = forwardRef( // NOTE: adding padding bottom because of the overflow-hidden on the html element and the fixed // topbar. This is... annoying. className={cn( - 'relative flex h-full w-full flex-col p-4 pb-16 md:pb-4', + 'relative flex w-full flex-col p-4 pb-16 md:pb-4', { 'mx-auto flex-1': preferTopBar && !unsetConstraints, }, diff --git a/packages/browser/src/components/explorer/FileExplorerHeader.tsx b/packages/browser/src/components/explorer/FileExplorerHeader.tsx index cc3e4ae93..e9622b3d0 100644 --- a/packages/browser/src/components/explorer/FileExplorerHeader.tsx +++ b/packages/browser/src/components/explorer/FileExplorerHeader.tsx @@ -14,10 +14,10 @@ export default function FileExplorerHeader() { const basename = useMemo(() => currentPath?.split('/').pop() ?? '', [currentPath]) return ( -
+
diff --git a/packages/browser/src/components/explorer/FileExplorerProvider.tsx b/packages/browser/src/components/explorer/FileExplorerProvider.tsx index 2fa67d9f6..b327b3a19 100644 --- a/packages/browser/src/components/explorer/FileExplorerProvider.tsx +++ b/packages/browser/src/components/explorer/FileExplorerProvider.tsx @@ -17,6 +17,8 @@ type Props = { rootPath: string } +// TODO: fix scrolling issues after adjusting scene container... + export default function FileExplorerProvider({ rootPath }: Props) { const navigate = useNavigate() const isMobile = useMediaMatch('(max-width: 768px)') diff --git a/packages/browser/src/components/explorer/table/BookNameCell.tsx b/packages/browser/src/components/explorer/table/BookNameCell.tsx deleted file mode 100644 index 594b1ec7c..000000000 --- a/packages/browser/src/components/explorer/table/BookNameCell.tsx +++ /dev/null @@ -1,29 +0,0 @@ -import { Link } from '@stump/components' -import { DirectoryListingFile } from '@stump/types' -import React, { useEffect, useState } from 'react' - -import paths from '@/paths' - -import { getBook } from '../FileThumbnail' - -type Props = { - file: DirectoryListingFile -} -export default function BookNameCell({ file }: Props) { - const [id, setId] = useState() - - useEffect(() => { - getBook(file.path).then((book) => { - setId(book?.id) - }) - }, [file.path]) - - return ( - - {file.name} - - ) -} diff --git a/packages/browser/src/components/explorer/table/FileTable.tsx b/packages/browser/src/components/explorer/table/FileTable.tsx index 4e00b580d..b280cbb5b 100644 --- a/packages/browser/src/components/explorer/table/FileTable.tsx +++ b/packages/browser/src/components/explorer/table/FileTable.tsx @@ -4,12 +4,17 @@ import { createColumnHelper, flexRender, getCoreRowModel, + getSortedRowModel, + SortDirection, + SortingState, useReactTable, } from '@tanstack/react-table' -import React, { useMemo } from 'react' +import React, { useMemo, useState } from 'react' import AutoSizer from 'react-virtualized-auto-sizer' import { useWindowSize } from 'rooks' +import { SortIcon } from '@/components/table' + import { useFileExplorerContext } from '../context' import FileThumbnail from '../FileThumbnail' @@ -22,7 +27,7 @@ const baseColumns = [ }, }) => , header: () => ( - + Cover ), @@ -35,22 +40,19 @@ export default function FileTable() { const { files, onSelect } = useFileExplorerContext() const { innerWidth } = useWindowSize() + const [sorting, setSorting] = useState([]) + const columns = useMemo( () => [ ...baseColumns.slice(0, 1), columnHelper.accessor('name', { cell: ({ row: { original: file }, getValue }) => ( - onSelect(file)} - > + onSelect(file)}> {getValue()} ), header: () => ( - + Name ), @@ -67,6 +69,11 @@ export default function FileTable() { size: 40, }, getCoreRowModel: getCoreRowModel(), + getSortedRowModel: getSortedRowModel(), + onSortingChange: setSorting, + state: { + sorting, + }, }) const { rows } = table.getRowModel() @@ -111,6 +118,12 @@ export default function FileTable() { }} > {flexRender(header.column.columnDef.header, header.getContext())} + + {isSortable && ( + + )} ) diff --git a/packages/browser/src/components/filters/FilterHeader.tsx b/packages/browser/src/components/filters/FilterHeader.tsx new file mode 100644 index 000000000..98f9dd548 --- /dev/null +++ b/packages/browser/src/components/filters/FilterHeader.tsx @@ -0,0 +1,68 @@ +import React from 'react' + +import { useFilterContext } from './context' +import Search from './Search' + +type Props = { + /** + * Whether the search is being fetched from the server + */ + isSearching?: boolean + /** + * Whether the search input should be disabled + */ + isSearchDisabled?: boolean + /** + * Placeholder for the search input + */ + searchPlaceholder?: string + /** + * The controls for adjusting the order of the items + */ + orderControls?: React.ReactNode + /** + * The controls for adjusting the filters + */ + filterControls?: React.ReactNode + /** + * The controls for adjusting the layout, i.e. GRID or TABLE + */ + layoutControls?: React.ReactNode +} + +export default function FilterHeader({ + isSearching, + isSearchDisabled, + searchPlaceholder, + layoutControls, + orderControls, + filterControls, +}: Props) { + const { filters, setFilter, removeFilter } = useFilterContext() + + return ( +
+ { + if (value) { + setFilter('search', value) + } else { + removeFilter('search') + } + }} + isLoading={isSearching} + isDisabled={isSearchDisabled} + /> + +
+
+ {orderControls} + {filterControls} +
+ {layoutControls} +
+
+ ) +} diff --git a/packages/browser/src/components/filters/FilterToolBar.tsx b/packages/browser/src/components/filters/FilterToolBar.tsx index a331f3239..a378fe1fd 100644 --- a/packages/browser/src/components/filters/FilterToolBar.tsx +++ b/packages/browser/src/components/filters/FilterToolBar.tsx @@ -38,6 +38,7 @@ type Props = { isDisabled?: boolean } +// TODO: Remove /** * A component that renders a set of filter-related components within a header. */ diff --git a/packages/browser/src/components/filters/Search.tsx b/packages/browser/src/components/filters/Search.tsx index ede6fb9cd..c1c7b597c 100644 --- a/packages/browser/src/components/filters/Search.tsx +++ b/packages/browser/src/components/filters/Search.tsx @@ -1,4 +1,4 @@ -import { Input, ProgressSpinner, usePreviousIsDifferent } from '@stump/components' +import { cn, Input, ProgressSpinner, usePreviousIsDifferent } from '@stump/components' import { SearchIcon } from 'lucide-react' import React, { useEffect, useState } from 'react' import { useDebouncedValue } from 'rooks' @@ -42,6 +42,7 @@ export default function Search({ isLoading, isDisabled, }: Props) { + const [isFocused, setIsFocused] = useState(false) // we need to debounce the onChange function so we only update once the user has stopped typing // this is a common pattern for search inputs const [value, setValue] = useState(initialValue) @@ -63,14 +64,21 @@ export default function Search({ setIsFocused(true)} + onBlur={() => setIsFocused(false)} onChange={(e) => setValue(e.target.value)} + placeholder={placeholder || 'Search'} value={value} + fullWidth + size="sm" + variant="activeGhost" leftDecoration={} rightDecoration={showLoader ? : null} - variant="ghost" - className="flex-grow" + className={cn( + 'flex-grow transition-[width] duration-200 ease-in-out', + { 'w-full flex-grow sm:w-2/5': isFocused }, + { 'w-2/3 cursor-pointer pr-0 sm:w-3/5 md:w-1/5': !isFocused }, + )} disabled={isDisabled} /> ) diff --git a/packages/browser/src/components/filters/URLFilterContainer.tsx b/packages/browser/src/components/filters/URLFilterContainer.tsx new file mode 100644 index 000000000..bec056ec2 --- /dev/null +++ b/packages/browser/src/components/filters/URLFilterContainer.tsx @@ -0,0 +1,104 @@ +import { cn } from '@stump/components' +import React, { forwardRef, useEffect, useMemo } from 'react' +import useScrollbarSize from 'react-scrollbar-size' +import { useMediaMatch } from 'rooks' +import { create } from 'zustand' + +import { SIDEBAR_WIDTH } from '@/components/navigation/sidebar' +import { TablePaginationProps } from '@/components/table' +import { usePreferences } from '@/hooks' + +import URLPageSize from './URLPageSize' +import URLPagination from './URLPagination' + +type Props = { + tableControls?: React.ReactNode +} & TablePaginationProps & + Pick, 'className' | 'children'> + +// FIXME: Looks awkward with TopBar layout because the constrained width... I think this just means the top-level +// container should be moved lower in the tree for any explore-like routes... +// However, this is also dependent on the width limit imposed by the user preference +const URLFilterContainer = forwardRef( + ({ children, className, tableControls, ...paginationProps }, ref) => { + const { + preferences: { enable_hide_scrollbar, primary_navigation_mode }, + } = usePreferences() + const { width } = useScrollbarSize() + const { storedWidth, storeWidth } = useWidthStore((state) => ({ + storeWidth: state.setWidth, + storedWidth: state.width, + })) + + /** + * An effect to update the stored width with any *non-zero* width value. + * This is necessary because the scrollbar width flickers between 0 and the + * actual width. A bit annoying + */ + useEffect(() => { + if (width && storedWidth !== width) { + storeWidth(width) + } + }, [storedWidth, storeWidth, width]) + + /** + * A computed width which factors the actual scroll state of the main content. + * If the main content has a scroll height greater than the client height, we + * can safely assume that the scrollbar is visible and we should account for it. + */ + const adjustedWidth = useMemo(() => { + const scrollRoot = document.getElementById('main') + const scrollRootScrollHeight = scrollRoot?.scrollHeight ?? 0 + const scrollRootClientHeight = scrollRoot?.clientHeight ?? 0 + const hasScroll = scrollRootScrollHeight > scrollRootClientHeight + + return hasScroll ? width || storedWidth : 0 + }, [width, storedWidth]) + + const isMobile = useMediaMatch('(max-width: 768px)') + /** + * The value used for computing the right position of the pagination controls. + * If the scrollbar is hidden, we don't need to account for it. + */ + const scrollbarWidth = enable_hide_scrollbar ? 0 : adjustedWidth + + return ( +
+ {children} + +
+
+ {tableControls} + +
+ +
+
+ ) + }, +) +URLFilterContainer.displayName = 'URLFilterContainer' + +export default URLFilterContainer + +type WidthStore = { + width: number + setWidth: (width: number) => void +} +const useWidthStore = create((set) => ({ + setWidth: (width) => set({ width }), + width: 0, +})) diff --git a/packages/browser/src/components/filters/URLFilterDrawer.tsx b/packages/browser/src/components/filters/URLFilterDrawer.tsx new file mode 100644 index 000000000..9e3219805 --- /dev/null +++ b/packages/browser/src/components/filters/URLFilterDrawer.tsx @@ -0,0 +1,95 @@ +import { Button, IconButton, Sheet, ToolTip } from '@stump/components' +import { Bolt } from 'lucide-react' +import React, { useCallback, useMemo, useState } from 'react' +import { useMediaMatch } from 'rooks' + +import { clearFilters, getActiveFilterCount, useFilterContext } from '.' +import { FilterableEntity, MediaFilterForm, SeriesFilterForm } from './form' + +type Props = { + entity: FilterableEntity +} + +export default function URLFilterDrawer({ entity }: Props) { + const { filters, setFilters } = useFilterContext() + + const [isOpen, setIsOpen] = useState(false) + + const isMobile = useMediaMatch('(max-width: 768px)') + // We don't apply search within the slideover, so we want to exclude it from the count. If any + // other 'filters' are added outside the context of this component we need to account for them, as well. + const activeFilters = useMemo(() => getActiveFilterCount(filters || {}), [filters]) + + /** + * A callback to clear all filters. Certain filters are excluded from this operation, + * such as the search filter. See clearFilters for more information. + */ + const handleClearFilters = useCallback( + () => setFilters(clearFilters(filters || {})), + [filters, setFilters], + ) + + const renderForm = () => { + if (entity === 'media') { + return + } else if (entity === 'series') { + return + } else { + console.debug('Not implemented yet') + return null + } + } + + return ( + setIsOpen(false)} + onOpen={() => setIsOpen(true)} + title="Configure URL filters" + description="Adjust the filters applied to the current view" + trigger={ + + + setIsOpen(true)} + > + + + + {activeFilters > 0 && ( + + + {activeFilters} + + + )} + + + } + size={isMobile ? 'xl' : 'default'} + footer={ +
+ + + +
+ } + > + {renderForm()} +
+ ) +} diff --git a/packages/browser/src/components/filters/URLOrdering.tsx b/packages/browser/src/components/filters/URLOrdering.tsx new file mode 100644 index 000000000..72c79464e --- /dev/null +++ b/packages/browser/src/components/filters/URLOrdering.tsx @@ -0,0 +1,65 @@ +import { IconButton, Popover, ToolTip } from '@stump/components' +import { ArrowUpDown } from 'lucide-react' +import React, { useCallback, useState } from 'react' +import { useMediaMatch } from 'rooks' + +import { useFilterContext } from './context' +import { FilterableEntity, OrderByDirection, OrderBySelect } from './form' + +type Props = { + entity: FilterableEntity +} + +export default function URLOrdering({ entity }: Props) { + const [isOpen, setIsOpen] = useState(false) + const isMobile = useMediaMatch('(max-width: 768px)') + + const { ordering, setFilter } = useFilterContext() + + /** + * A callback to handle the change of the ordering field. + */ + const handleChangeOrderBy = useCallback( + (value: string) => setFilter('order_by', value), + [setFilter], + ) + + /** + * A callback to handle the change of the ordering direction. + * + * @param value The new ordering direction. + */ + const handleChangeDirection = useCallback( + (value: 'asc' | 'desc') => setFilter('direction', value), + [setFilter], + ) + + return ( + + + + + + + + + + + + + + + ) +} diff --git a/packages/browser/src/components/filters/URLPageSize.tsx b/packages/browser/src/components/filters/URLPageSize.tsx new file mode 100644 index 000000000..5a7728870 --- /dev/null +++ b/packages/browser/src/components/filters/URLPageSize.tsx @@ -0,0 +1,48 @@ +import { Input, Text } from '@stump/components' +import React, { useCallback, useEffect, useState } from 'react' + +import { useFilterContext } from './context' + +export default function URLPageSize() { + const { + pagination: { page_size }, + setFilter, + } = useFilterContext() + const [inputPageSize, setInputPageSize] = useState(page_size) + + const handleInputChange = (e: React.ChangeEvent) => { + const parsed = parseInt(e.target.value) + setInputPageSize(isNaN(parsed) ? undefined : parsed) + } + + const handleInputSubmit = useCallback( + (e: React.FormEvent) => { + e.preventDefault() + if (inputPageSize !== undefined && inputPageSize > 0) { + setFilter('page_size', inputPageSize) + } + }, + [inputPageSize, setFilter], + ) + + useEffect(() => { + setInputPageSize(page_size) + }, [page_size]) + + return ( +
+ + + per page + + + ) +} diff --git a/packages/browser/src/components/filters/URLPagination.tsx b/packages/browser/src/components/filters/URLPagination.tsx new file mode 100644 index 000000000..faba4dc94 --- /dev/null +++ b/packages/browser/src/components/filters/URLPagination.tsx @@ -0,0 +1,110 @@ +import { IconButton, Input, Text, ToolTip } from '@stump/components' +import { ChevronLeft, ChevronRight } from 'lucide-react' +import React, { useCallback, useEffect, useState } from 'react' + +type Props = { + pages: number + currentPage: number + onChangePage: (page: number) => void + onPrefetchPage?: (page: number) => void +} + +export default function URLPagination({ pages, currentPage, onChangePage, onPrefetchPage }: Props) { + const [inputPage, setInputPage] = useState(currentPage) + + const handleNextPage = useCallback(() => { + if (currentPage < pages) { + onChangePage(currentPage + 1) + } + }, [currentPage, onChangePage, pages]) + + const handlePrefetchNextPage = useCallback(() => { + if (currentPage < pages) { + onPrefetchPage?.(currentPage + 1) + } + }, [currentPage, onPrefetchPage, pages]) + + const handlePreviousPage = useCallback(() => { + if (currentPage > 1) { + onChangePage(currentPage - 1) + } + }, [currentPage, onChangePage]) + + const handlePrefetchPreviousPage = useCallback(() => { + if (currentPage > 1) { + onPrefetchPage?.(currentPage - 1) + } + }, [currentPage, onPrefetchPage]) + + const handleInputChange = (e: React.ChangeEvent) => { + const parsed = parseInt(e.target.value) + setInputPage(isNaN(parsed) ? undefined : parsed) + } + + const handleInputBlur = useCallback(() => { + if (inputPage === undefined || inputPage < 1 || inputPage > pages) { + setInputPage(currentPage) + } + }, [inputPage, currentPage, pages]) + + const handleInputSubmit = useCallback( + (e: React.FormEvent) => { + e.preventDefault() + if (inputPage !== undefined && inputPage > 0 && inputPage <= pages) { + onChangePage(inputPage) + } + }, + [inputPage, onChangePage, pages], + ) + + useEffect(() => { + setInputPage(currentPage) + }, [currentPage]) + + return ( +
+
+ + + of {pages} + + +
+ + + + + + + + = pages} + onClick={handleNextPage} + onMouseEnter={handlePrefetchNextPage} + > + + + +
+
+ ) +} diff --git a/packages/browser/src/components/filters/context.ts b/packages/browser/src/components/filters/context.ts index d0ae3b63d..92d43a15d 100644 --- a/packages/browser/src/components/filters/context.ts +++ b/packages/browser/src/components/filters/context.ts @@ -7,19 +7,30 @@ export type Ordering = { order_by?: string } +export type Pagination = { + page: number + page_size: number +} + export type IFilterContext = { filters?: Record ordering: Ordering + pagination: Pagination + setPage: (page: number) => void setFilters: (filters: Record) => void setFilter: (key: string, value: unknown) => void + setOrdering: (ordering: Ordering) => void removeFilter: (key: string) => void } export const FilterContext = createContext({ filters: {}, ordering: {}, + pagination: { page: 1, page_size: 20 }, removeFilter: noop, setFilter: noop, setFilters: noop, + setOrdering: noop, + setPage: noop, }) export const useFilterContext = () => useContext(FilterContext) diff --git a/packages/browser/src/components/filters/form/GenericFilterMultiselect.tsx b/packages/browser/src/components/filters/form/GenericFilterMultiselect.tsx index 2a083ab73..eab723f70 100644 --- a/packages/browser/src/components/filters/form/GenericFilterMultiselect.tsx +++ b/packages/browser/src/components/filters/form/GenericFilterMultiselect.tsx @@ -1,5 +1,5 @@ import { ComboBox } from '@stump/components' -import React, { useEffect, useState } from 'react' +import React, { useCallback } from 'react' import { useFormContext } from 'react-hook-form' type Props = { @@ -24,12 +24,14 @@ type Props = { * update the form context. */ export default function GenericFilterMultiselect({ name, label, options }: Props) { - const form = useFormContext() - const [value, setValue] = useState(() => form.getValues(name)) + const { watch, setValue } = useFormContext() - useEffect(() => { - form.setValue(name, value) - }, [name, form, value]) + const formValue = watch(name) + + const handleChange = useCallback( + (value: string[] | undefined) => setValue(name, value), + [name, setValue], + ) return ( { - setValue(selected) - }} + value={formValue} + onChange={handleChange} size="full" /> ) diff --git a/packages/browser/src/components/filters/form/MediaFilterForm.tsx b/packages/browser/src/components/filters/form/MediaFilterForm.tsx index 4e0c9858b..5409984ac 100644 --- a/packages/browser/src/components/filters/form/MediaFilterForm.tsx +++ b/packages/browser/src/components/filters/form/MediaFilterForm.tsx @@ -2,11 +2,12 @@ import { zodResolver } from '@hookform/resolvers/zod' import { metadataApi, metadataQueryKeys } from '@stump/api' import { useQuery } from '@stump/client' import { CheckBox, Form } from '@stump/components' -import React, { useMemo, useState } from 'react' +import React, { useEffect, useMemo, useState } from 'react' import { FieldValues, useForm } from 'react-hook-form' import z from 'zod' -import { useSeriesContext } from '../../../scenes/series/context' +import { useSeriesContextSafe } from '@/scenes/series' + import { useFilterContext } from '..' import AgeRatingFilter from './AgeRatingFilter' import ExtensionSelect from './ExtensionSelect' @@ -37,29 +38,26 @@ const schema = z.object({ read_status: z.array(z.enum(['completed', 'reading', 'unread'])).optional(), }) export type MediaFilterFormSchema = z.infer - -// TODO: detatch from series context to be re-used in library context +type ReadStatus = NonNullable['read_status']>[number] export default function MediaFilterForm() { const { filters, setFilters } = useFilterContext() - const { - series: { id }, - } = useSeriesContext() + const seriesContext = useSeriesContextSafe() const [onlyFromSeries, setOnlyFromSeries] = useState(false) const params = useMemo(() => { - if (onlyFromSeries && !!id) { + if (onlyFromSeries && !!seriesContext?.series.id) { return { media: { series: { - id, + id: seriesContext.series.id, }, }, } } return {} - }, [onlyFromSeries, id]) + }, [onlyFromSeries, seriesContext]) const { data } = useQuery([metadataQueryKeys.getMediaMetadataOverview, params], () => metadataApi.getMediaMetadataOverview(params).then((res) => res.data), @@ -72,9 +70,22 @@ export default function MediaFilterForm() { ...((filters?.metadata as Record) || {}), age_rating: (filters?.metadata as Record)?.age_rating ?? null, }, + read_status: filters?.read_status as ReadStatus[], }, resolver: zodResolver(schema), }) + const { reset } = form + + useEffect(() => { + reset({ + extension: filters?.extension as string, + metadata: { + ...((filters?.metadata as Record) || {}), + age_rating: (filters?.metadata as Record)?.age_rating ?? null, + }, + read_status: filters?.read_status as ReadStatus[], + }) + }, [reset, filters]) /** * A function that handles the form submission. This function merges the form @@ -98,7 +109,7 @@ export default function MediaFilterForm() { form={form} onSubmit={handleSubmit} > - {!!id && ( + {!!seriesContext && ( void } export default function OrderByDirection({ value, onChange }: Props) { - const { ordering } = useFilterContext() return ( ) } diff --git a/packages/browser/src/components/filters/form/OrderBySelect.tsx b/packages/browser/src/components/filters/form/OrderBySelect.tsx index 703c9d3e2..32ff5f839 100644 --- a/packages/browser/src/components/filters/form/OrderBySelect.tsx +++ b/packages/browser/src/components/filters/form/OrderBySelect.tsx @@ -1,4 +1,4 @@ -import { NativeSelect } from '@stump/components' +import { Label, NativeSelect } from '@stump/components' import React, { useMemo } from 'react' import { FilterableEntity } from '.' @@ -10,6 +10,7 @@ const options: Record = { series: [...commonOptions, 'description', 'library_id'], } +// TODO: accept a default value which, if value equals, do an onChange with an empty string type Props = { entity: FilterableEntity value?: string @@ -22,12 +23,17 @@ export default function OrderBySelect({ entity, value, onChange }: Props) { ) return ( - onChange?.(e.target.value)} - size="sm" - /> +
+ + onChange?.(e.target.value)} + size="sm" + /> +
) } diff --git a/packages/browser/src/components/filters/index.ts b/packages/browser/src/components/filters/index.ts index daca0f9ea..38b8ffafb 100644 --- a/packages/browser/src/components/filters/index.ts +++ b/packages/browser/src/components/filters/index.ts @@ -1,4 +1,16 @@ export { FilterContext, type IFilterContext, useFilterContext } from './context' +export { default as FilterHeader } from './FilterHeader' export { default as FilterProvider } from './FilterProvider' export { default as FilterToolBar } from './FilterToolBar' export { default as Search } from './Search' +export { default as URLFilterContainer } from './URLFilterContainer' +export { default as URLFilterDrawer } from './URLFilterDrawer' +export { default as URLOrdering } from './URLOrdering' +export { useFilterScene } from './useFilterScene' +export { + clearFilters, + EXCLUDED_FILTER_KEYS, + getActiveFilterCount, + orderingToTableSort, + tableSortToOrdering, +} from './utils' diff --git a/packages/browser/src/components/filters/useFilterScene.ts b/packages/browser/src/components/filters/useFilterScene.ts new file mode 100644 index 000000000..db277a9b2 --- /dev/null +++ b/packages/browser/src/components/filters/useFilterScene.ts @@ -0,0 +1,151 @@ +import { toObjectParams, toUrlParams } from '@stump/api' +import { useCallback, useMemo } from 'react' +import { useSearchParams } from 'react-router-dom' +import { useMediaMatch } from 'rooks' + +import { IFilterContext, Ordering } from './context' +import { EXCLUDED_FILTER_KEYS } from './utils' + +type Return = IFilterContext + +export function useFilterScene(): Return { + const [searchParams, setSearchParams] = useSearchParams() + + const is3XLScreenOrBigger = useMediaMatch('(min-width: 1600px)') + const defaultPageSize = is3XLScreenOrBigger ? 40 : 20 + + /** + * An object representation of the url params without the excluded keys, such as + * order_by, direction, search, page, and page_size. + */ + const filters = useMemo( + () => + toObjectParams>(searchParams, { + ignoreKeys: EXCLUDED_FILTER_KEYS, + removeEmpty: true, + }), + [searchParams], + ) + + /** + * An object representation of the ordering params + */ + const ordering = useMemo( + () => + objectWithoutEmptyValues({ + direction: searchParams.get('direction') as 'asc' | 'desc' | undefined, + order_by: searchParams.get('order_by') as string | undefined, + }), + [searchParams], + ) + + /** + * An object representation of the pagination params + */ + const pagination = useMemo( + () => ({ + page: searchParams.get('page') ? parseInt(searchParams.get('page') as string) : 1, + page_size: searchParams.get('page_size') + ? parseInt(searchParams.get('page_size') as string) + : defaultPageSize, + }), + [searchParams, defaultPageSize], + ) + + const setOrdering = useCallback( + (newOrdering: Ordering) => { + setSearchParams( + toUrlParams( + { + ...pagination, + ...filters, + ...newOrdering, + }, + undefined, + { removeEmpty: true }, + ), + ) + }, + [setSearchParams, pagination, filters], + ) + + const setPage = useCallback( + (page: number) => { + setSearchParams((prev) => { + prev.set('page', page.toString()) + return prev + }) + }, + [setSearchParams], + ) + + /** + * Replace the current filters with the provided filters + */ + const handleSetFilters = useCallback( + (newFilters: Record) => { + // setFilters(toUrlParams(newFilters, undefined, { removeEmpty: true })) + setSearchParams( + toUrlParams( + { + ...ordering, + ...pagination, + ...newFilters, + }, + undefined, + { removeEmpty: true }, + ), + ) + }, + [ordering, pagination, setSearchParams], + ) + + /** + * Sets a single filter in the url with the provided value + */ + const handleSetFilter = useCallback( + (key: string, value: unknown) => { + setSearchParams((prev) => { + const params = toObjectParams>(prev) + params[key] = value + return toUrlParams(params) + }) + }, + [setSearchParams], + ) + + /** + * Removes a filter from the url + */ + const removeFilter = useCallback( + (key: string) => { + setSearchParams((prev) => { + prev.delete(key) + return prev + }) + }, + [setSearchParams], + ) + + return { + filters, + ordering, + pagination, + removeFilter, + setFilter: handleSetFilter, + setFilters: handleSetFilters, + setOrdering, + setPage, + } +} + +const objectWithoutEmptyValues = (obj: Record) => + Object.entries(obj).reduce( + (acc, [key, value]) => { + if (value) { + acc[key] = value + } + return acc + }, + {} as Record, + ) diff --git a/packages/browser/src/components/filters/utils.ts b/packages/browser/src/components/filters/utils.ts new file mode 100644 index 000000000..a8dc8ea42 --- /dev/null +++ b/packages/browser/src/components/filters/utils.ts @@ -0,0 +1,53 @@ +import { ReactTableGlobalSort } from '@stump/types' + +import { Ordering } from './context' + +export const EXCLUDED_FILTER_KEYS = ['order_by', 'direction', 'page', 'page_size'] +export const EXCLUDED_FILTER_KEYS_FOR_COUNTS = EXCLUDED_FILTER_KEYS.concat(['search']) + +export const getActiveFilterCount = (filters: Record) => { + return Object.keys(filters).filter((key) => !EXCLUDED_FILTER_KEYS_FOR_COUNTS.includes(key)).length +} + +export const clearFilters = (filters: Record): Record => + Object.keys(filters).reduce( + (acc, key) => { + if (EXCLUDED_FILTER_KEYS.includes(key)) { + acc[key] = filters[key] + } + return acc + }, + {} as Record, + ) + +/** + * Converts the react-table sort object to an ordering object. + * + * Note that only the **first** sort is considered. + */ +export const tableSortToOrdering = (sort: ReactTableGlobalSort[]): Ordering => { + if (sort[0]) { + return { + direction: sort[0].desc ? 'desc' : 'asc', + order_by: sort[0].id, + } + } else { + return {} + } +} + +/** + * Converts the ordering object to a react-table sort object. + */ +export const orderingToTableSort = (ordering: Ordering): ReactTableGlobalSort[] => { + if (ordering.order_by) { + return [ + { + desc: ordering.direction === 'desc', + id: ordering.order_by, + }, + ] + } else { + return [] + } +} diff --git a/packages/browser/src/components/library/table/index.ts b/packages/browser/src/components/library/table/index.ts new file mode 100644 index 000000000..e69de29bb diff --git a/packages/browser/src/components/navigation/sidebar/SideBar.tsx b/packages/browser/src/components/navigation/sidebar/SideBar.tsx index dc2eb5797..9419c8174 100644 --- a/packages/browser/src/components/navigation/sidebar/SideBar.tsx +++ b/packages/browser/src/components/navigation/sidebar/SideBar.tsx @@ -164,3 +164,5 @@ export default function SideBar({ asChild, hidden }: Props) { ) } + +export const SIDEBAR_WIDTH = 208 diff --git a/packages/browser/src/components/navigation/sidebar/SideBarButtonLink.tsx b/packages/browser/src/components/navigation/sidebar/SideBarButtonLink.tsx index 115c9b75a..94d7ceb89 100644 --- a/packages/browser/src/components/navigation/sidebar/SideBarButtonLink.tsx +++ b/packages/browser/src/components/navigation/sidebar/SideBarButtonLink.tsx @@ -25,7 +25,7 @@ export default function SideBarButtonLink({ className={cn( 'group inline-flex h-[2.35rem] w-full shrink-0 items-center justify-start rounded-md px-2 text-sm transition-all duration-150 hover:bg-sidebar-300', { - 'justify-center border border-dashed border-sidebar-300 bg-opacity-50 text-contrast-200 text-opacity-80 hover:bg-sidebar-200 hover:text-opacity-100': + 'justify-center border border-dashed border-sidebar-300 bg-opacity-50 text-contrast text-opacity-90 hover:bg-sidebar-200 hover:text-opacity-100': variant === 'action', }, { 'text-contrast': variant !== 'action' }, diff --git a/packages/browser/src/components/navigation/sidebar/index.ts b/packages/browser/src/components/navigation/sidebar/index.ts index f80c801bf..2ab0e2bed 100644 --- a/packages/browser/src/components/navigation/sidebar/index.ts +++ b/packages/browser/src/components/navigation/sidebar/index.ts @@ -1,2 +1,2 @@ -export { default as SideBar } from './SideBar' +export { default as SideBar, SIDEBAR_WIDTH } from './SideBar' export { default as SideBarFooter } from './SideBarFooter' diff --git a/packages/browser/src/components/navigation/sidebar/sections/book-club/BookClubEmoji.tsx b/packages/browser/src/components/navigation/sidebar/sections/bookClub/BookClubEmoji.tsx similarity index 100% rename from packages/browser/src/components/navigation/sidebar/sections/book-club/BookClubEmoji.tsx rename to packages/browser/src/components/navigation/sidebar/sections/bookClub/BookClubEmoji.tsx diff --git a/packages/browser/src/components/navigation/sidebar/sections/book-club/BookClubSideBarSection.tsx b/packages/browser/src/components/navigation/sidebar/sections/bookClub/BookClubSideBarSection.tsx similarity index 95% rename from packages/browser/src/components/navigation/sidebar/sections/book-club/BookClubSideBarSection.tsx rename to packages/browser/src/components/navigation/sidebar/sections/bookClub/BookClubSideBarSection.tsx index 0e9254e03..81f8d1690 100644 --- a/packages/browser/src/components/navigation/sidebar/sections/book-club/BookClubSideBarSection.tsx +++ b/packages/browser/src/components/navigation/sidebar/sections/bookClub/BookClubSideBarSection.tsx @@ -5,9 +5,10 @@ import { Club } from 'lucide-react' import React from 'react' import { useLocation } from 'react-router' -import { useAppContext } from '../../../../../context' -import paths from '../../../../../paths' -import { EntityOptionProps } from '../../../types' +import { EntityOptionProps } from '@/components/navigation/types' +import { useAppContext } from '@/context' +import paths from '@/paths' + import SideBarButtonLink from '../../SideBarButtonLink' import BookClubEmoji from './BookClubEmoji' diff --git a/packages/browser/src/components/navigation/sidebar/sections/index.ts b/packages/browser/src/components/navigation/sidebar/sections/index.ts index 881a9ae56..f02a7953d 100644 --- a/packages/browser/src/components/navigation/sidebar/sections/index.ts +++ b/packages/browser/src/components/navigation/sidebar/sections/index.ts @@ -1,3 +1,3 @@ -export { default as BookClubSideBarSection } from './book-club/BookClubSideBarSection' -export { default as LibrarySideBarSection } from './library-section/LibrarySideBarSection' -export { default as SmartListSideBarSection } from './smart-list/SmartListSideBarSection' +export { default as BookClubSideBarSection } from './bookClub/BookClubSideBarSection' +export { default as LibrarySideBarSection } from './library/LibrarySideBarSection' +export { default as SmartListSideBarSection } from './smartList/SmartListSideBarSection' diff --git a/packages/browser/src/components/navigation/sidebar/sections/library-section/LibraryEmoji.tsx b/packages/browser/src/components/navigation/sidebar/sections/library/LibraryEmoji.tsx similarity index 100% rename from packages/browser/src/components/navigation/sidebar/sections/library-section/LibraryEmoji.tsx rename to packages/browser/src/components/navigation/sidebar/sections/library/LibraryEmoji.tsx diff --git a/packages/browser/src/components/navigation/sidebar/sections/library-section/LibraryOptionsMenu.tsx b/packages/browser/src/components/navigation/sidebar/sections/library/LibraryOptionsMenu.tsx similarity index 100% rename from packages/browser/src/components/navigation/sidebar/sections/library-section/LibraryOptionsMenu.tsx rename to packages/browser/src/components/navigation/sidebar/sections/library/LibraryOptionsMenu.tsx diff --git a/packages/browser/src/components/navigation/sidebar/sections/library-section/LibrarySideBarSection.tsx b/packages/browser/src/components/navigation/sidebar/sections/library/LibrarySideBarSection.tsx similarity index 100% rename from packages/browser/src/components/navigation/sidebar/sections/library-section/LibrarySideBarSection.tsx rename to packages/browser/src/components/navigation/sidebar/sections/library/LibrarySideBarSection.tsx diff --git a/packages/browser/src/components/navigation/sidebar/sections/smart-list/SmartListSideBarSection.tsx b/packages/browser/src/components/navigation/sidebar/sections/smartList/SmartListSideBarSection.tsx similarity index 100% rename from packages/browser/src/components/navigation/sidebar/sections/smart-list/SmartListSideBarSection.tsx rename to packages/browser/src/components/navigation/sidebar/sections/smartList/SmartListSideBarSection.tsx diff --git a/packages/browser/src/components/readers/epub/EpubJsReader.tsx b/packages/browser/src/components/readers/epub/EpubJsReader.tsx index c72ec7441..8e7438e7e 100644 --- a/packages/browser/src/components/readers/epub/EpubJsReader.tsx +++ b/packages/browser/src/components/readers/epub/EpubJsReader.tsx @@ -123,6 +123,19 @@ export default function EpubJsReader({ id, initialCfi }: EpubJsReaderProps) { const { epub, isLoading } = useEpubLazy(id) + /** + * A function for focusing the iframe in the epub reader. This will be used to ensure + * the iframe is focused whenever the reader is loaded and/or the location changes. + */ + const focusIframe = () => { + const iframe = ref.current?.querySelector('iframe') + if (iframe) { + iframe.focus() + } else { + console.debug('Failed to find iframe in epub reader') + } + } + /** * Syncs the current location with local state whenever epubjs internal location * changes. It will also try and determine the current chapter information. @@ -139,6 +152,7 @@ export default function EpubJsReader({ id, initialCfi }: EpubJsReaderProps) { } setCurrentLocation(changeState) + focusIframe() } /** diff --git a/packages/browser/src/scenes/series/UpNextInSeriesButton.tsx b/packages/browser/src/components/series/NextInSeriesLink.tsx similarity index 93% rename from packages/browser/src/scenes/series/UpNextInSeriesButton.tsx rename to packages/browser/src/components/series/NextInSeriesLink.tsx index eb0bd868e..c0df0a595 100644 --- a/packages/browser/src/scenes/series/UpNextInSeriesButton.tsx +++ b/packages/browser/src/components/series/NextInSeriesLink.tsx @@ -9,7 +9,7 @@ type Props = { title?: string } -export default function UpNextInSeriesButton({ seriesId, title, ...props }: Props) { +export default function NextInSeriesLink({ seriesId, title, ...props }: Props) { const { media, isLoading } = useUpNextInSeries(seriesId) if (!media) { diff --git a/packages/browser/src/components/series/SeriesExplorationLayout.tsx b/packages/browser/src/components/series/SeriesExplorationLayout.tsx new file mode 100644 index 000000000..59821cd21 --- /dev/null +++ b/packages/browser/src/components/series/SeriesExplorationLayout.tsx @@ -0,0 +1,42 @@ +import { IconButton, ToolTip } from '@stump/components' +import { LayoutGrid, Table } from 'lucide-react' +import React from 'react' + +import { useSeriesLayout } from '@/stores/layout' + +export default function SeriesExplorationLayout() { + const { layout, setLayout } = useSeriesLayout((state) => ({ + layout: state.layout, + setLayout: state.setLayout, + })) + + return ( +
+ + setLayout('GRID')} + disabled={layout === 'GRID'} + > + + + + + + setLayout('TABLE')} + disabled={layout === 'TABLE'} + > +
+ + + + ) +} diff --git a/packages/browser/src/components/series/SeriesGrid.tsx b/packages/browser/src/components/series/SeriesGrid.tsx index 83ffc8357..3c67e6b79 100644 --- a/packages/browser/src/components/series/SeriesGrid.tsx +++ b/packages/browser/src/components/series/SeriesGrid.tsx @@ -15,18 +15,20 @@ export default function SeriesGrid({ series, isLoading, hasFilters }: Props) { return null } else if (!series || !series.length) { return ( - +
+ +
) } diff --git a/packages/browser/src/components/series/index.ts b/packages/browser/src/components/series/index.ts new file mode 100644 index 000000000..639361604 --- /dev/null +++ b/packages/browser/src/components/series/index.ts @@ -0,0 +1 @@ +export { SeriesTable } from './table' diff --git a/packages/browser/src/components/series/table/CoverImageCell.tsx b/packages/browser/src/components/series/table/CoverImageCell.tsx new file mode 100644 index 000000000..bb636ee3f --- /dev/null +++ b/packages/browser/src/components/series/table/CoverImageCell.tsx @@ -0,0 +1,60 @@ +import { seriesApi } from '@stump/api' +import { Book } from 'lucide-react' +import React, { useState } from 'react' + +type Props = { + /** + * The ID of the series + */ + id: string + /** + * The title for the image + */ + title?: string +} + +export default function CoverImageCell({ id, title }: Props) { + const [showFallback, setShowFallback] = useState(false) + + const loadImage = () => { + const image = new Image() + return new Promise((resolve, reject) => { + image.src = seriesApi.getSeriesThumbnail(id) + image.onload = () => resolve(image) + image.onerror = (e) => { + console.error('Image failed to load:', e) + reject(new Error('Could not load image')) + } + }) + } + + const attemptReload = async () => { + try { + await loadImage() + setShowFallback(false) + } catch (e) { + setShowFallback(true) + } + } + + if (showFallback) { + return ( +
+ +
+ ) + } + + return ( + setShowFallback(true)} + /> + ) +} diff --git a/packages/browser/src/components/series/table/SeriesTable.tsx b/packages/browser/src/components/series/table/SeriesTable.tsx new file mode 100644 index 000000000..6ffc5cf51 --- /dev/null +++ b/packages/browser/src/components/series/table/SeriesTable.tsx @@ -0,0 +1,47 @@ +import { Series } from '@stump/types' +import { OnChangeFn, SortingState } from '@tanstack/react-table' +import React, { useCallback, useMemo } from 'react' + +import { orderingToTableSort, tableSortToOrdering, useFilterContext } from '@/components/filters' +import { EntityTable, EntityTableProps } from '@/components/table' +import { useSeriesLayout } from '@/stores/layout' + +import { defaultColumns } from './columns' + +type Props = Omit, 'columns' | 'options'> + +export default function SeriesTable(props: Props) { + const configuration = useSeriesLayout((state) => ({ + columns: state.columns, + })) + const { ordering, setOrdering } = useFilterContext() + + const columns = useMemo( + () => (configuration.columns?.length ? configuration.columns : defaultColumns), + [configuration.columns], + ) + + const handleSetSorting: OnChangeFn = useCallback( + (updater) => { + if (typeof updater === 'function') { + setOrdering(tableSortToOrdering(updater(orderingToTableSort(ordering)))) + } else { + setOrdering(tableSortToOrdering(updater)) + } + }, + [ordering, setOrdering], + ) + + const sorting = useMemo(() => orderingToTableSort(ordering), [ordering]) + + return ( + + ) +} diff --git a/packages/browser/src/components/series/table/columns.tsx b/packages/browser/src/components/series/table/columns.tsx new file mode 100644 index 000000000..447134cc8 --- /dev/null +++ b/packages/browser/src/components/series/table/columns.tsx @@ -0,0 +1,98 @@ +import { Link, Text } from '@stump/components' +import { ReactTableColumnSort, Series } from '@stump/types' +import { ColumnDef, createColumnHelper } from '@tanstack/react-table' + +import paths from '@/paths' + +import CoverImageCell from './CoverImageCell' + +const columnHelper = createColumnHelper() + +const coverColumn = columnHelper.display({ + cell: ({ + row: { + original: { id, name, metadata }, + }, + }) => , + enableGlobalFilter: true, + header: () => ( + + Cover + + ), + id: 'cover', + size: 60, +}) + +const nameColumn = columnHelper.accessor(({ name, metadata }) => metadata?.title || name, { + cell: ({ + getValue, + row: { + original: { id }, + }, + }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + enableSorting: true, + header: () => ( + + Name + + ), + id: 'name', + minSize: 285, +}) + +const booksCountColumn = columnHelper.accessor((series) => series.media_count?.toString(), { + cell: ({ getValue }) => ( + + {getValue()} + + ), + enableGlobalFilter: true, + // TODO(prisma 0.7.0): Support order by relation + enableSorting: false, + header: () => ( + + Books + + ), + id: 'media_count', + minSize: 60, +}) + +// TODO: more columns + +/** + * A map of all columns that can be selected for the table. The key is the column ID, and the value is the column, itself. + */ +export const columnMap = { + books: booksCountColumn, + cover: coverColumn, + name: nameColumn, +} as Record> + +export const defaultColumns = [coverColumn, nameColumn, booksCountColumn] as ColumnDef[] + +/** + * A helper function to build the columns for the table based on the stored column selection. If + * no columns are selected, or if the selection is empty, the default columns will be used. + */ +export const buildColumns = (columns?: ReactTableColumnSort[]) => { + if (!columns || columns.length === 0) { + return defaultColumns + } + + const sortedColumns = columns.sort((a, b) => a.position - b.position) + const selectedColumnIds = sortedColumns.map(({ id }) => id) + + return selectedColumnIds + .map((id) => columnMap[id as keyof typeof columnMap]) + .filter(Boolean) as ColumnDef[] +} diff --git a/packages/browser/src/components/series/table/index.ts b/packages/browser/src/components/series/table/index.ts new file mode 100644 index 000000000..0a9529a5e --- /dev/null +++ b/packages/browser/src/components/series/table/index.ts @@ -0,0 +1 @@ +export { default as SeriesTable } from './SeriesTable' diff --git a/packages/browser/src/components/table/EntityTable.tsx b/packages/browser/src/components/table/EntityTable.tsx new file mode 100644 index 000000000..fbd16c4d8 --- /dev/null +++ b/packages/browser/src/components/table/EntityTable.tsx @@ -0,0 +1,153 @@ +import { cn } from '@stump/components' +import { + ColumnDef, + flexRender, + getCoreRowModel, + getFilteredRowModel, + getSortedRowModel, + OnChangeFn, + SortDirection, + SortingState, + useReactTable, +} from '@tanstack/react-table' +import React, { PropsWithChildren } from 'react' + +import SortIcon from './SortIcon' + +type EntityTableSorting = + | { + sorting: SortingState + setSorting: OnChangeFn + } + | { + sorting?: never + setSorting?: never + } + +type EntityTableSearch = + | { + globalFilter: string + } + | { + globalFilter?: never + } + +type StateOptions = EntityTableSorting & EntityTableSearch + +export type EntityTableOptions = { + enableMultiSort?: boolean +} & StateOptions + +export type EntityTableProps = { + /** + * The items to render in the table. + */ + items: Entity[] + /** + * The columns to render in the table. This is a prop in order to support dynamic columns, + * e.g. configurable columns. + */ + columns: ColumnDef[] + /** + * Additional options for the underlying table. + */ + options?: EntityTableOptions + /** + * An optional renderer to wrap the table in. This is useful for providing a custom container, + * as needed. + */ + render?: (props: PropsWithChildren) => React.ReactNode +} + +/** + * A table component used for rendering: + * + * - Media / Books + * - Series + * - Authors + * + * This is separate from the `Table` component primarily in styling, and serves to provide a + * unified look and feel for the application WRT the scenes which explore these entities. + */ +export default function EntityTable({ + items, + columns, + options, + render, +}: EntityTableProps) { + const { setSorting, enableMultiSort, ...state } = options ?? {} + + const table = useReactTable({ + columns, + data: items, + enableMultiSort, + getCoreRowModel: getCoreRowModel(), + getFilteredRowModel: getFilteredRowModel(), + getSortedRowModel: getSortedRowModel(), + onSortingChange: setSorting, + state, + }) + const { rows } = table.getRowModel() + + const Container = render ?? React.Fragment + + return ( + +
+ + + {table.getFlatHeaders().map((header) => { + const isSortable = header.column.getCanSort() + return ( + + ) + })} + + + + + {rows.map((row) => ( + + {row.getVisibleCells().map((cell) => ( + + ))} + + ))} + +
+
+ {flexRender(header.column.columnDef.header, header.getContext())} + {isSortable && ( + + )} +
+
+ {flexRender(cell.column.columnDef.cell, cell.getContext())} +
+ + ) +} diff --git a/packages/browser/src/components/table/EntityTableColumnConfiguration.tsx b/packages/browser/src/components/table/EntityTableColumnConfiguration.tsx new file mode 100644 index 000000000..d2d3cecde --- /dev/null +++ b/packages/browser/src/components/table/EntityTableColumnConfiguration.tsx @@ -0,0 +1,222 @@ +import { + closestCenter, + DndContext, + DragEndEvent, + KeyboardSensor, + PointerSensor, + useSensor, + useSensors, +} from '@dnd-kit/core' +import { + arrayMove, + rectSortingStrategy, + SortableContext, + sortableKeyboardCoordinates, + useSortable, +} from '@dnd-kit/sortable' +import { CSS } from '@dnd-kit/utilities' +import { Button, IconButton, Sheet, Text, ToolTip } from '@stump/components' +import { useLocaleContext } from '@stump/i18n' +import { ReactTableColumnSort } from '@stump/types' +import { Columns, Eye, EyeOff } from 'lucide-react' +import React, { useCallback, useMemo, useState } from 'react' +import { useMediaMatch } from 'rooks' + +import { bookTableColumnMap } from '../book/table' + +type Props = { + entity: 'media' | 'series' | 'library' + configuration: ReactTableColumnSort[] + onSave: (columns: ReactTableColumnSort[]) => void +} + +export default function EntityTableColumnConfiguration({ entity, configuration, onSave }: Props) { + const { t } = useLocaleContext() + + const [isOpen, setIsOpen] = useState(false) + + const isMobile = useMediaMatch('(max-width: 768px)') + const sensors = useSensors( + useSensor(PointerSensor, { + activationConstraint: { + // Require pointer to move by 5 pixels before activating draggable + // Allows nested onClicks/buttons/interactions to be accessed + distance: 5, + }, + }), + useSensor(KeyboardSensor, { + coordinateGetter: sortableKeyboardCoordinates, + }), + ) + + const columnMap = useMemo(() => { + if (entity === 'media') { + return bookTableColumnMap + } + + return {} + }, [entity]) + + const [fullConfiguration, setFullConfiguration] = useState(() => + resolveConfiguration(configuration, columnMap), + ) + + /** + * A callback to toggle the selected state of a column. If saved, the column will be displayed + * according to the boolean value of selected. + * + * @param id The ID of the column to toggle + */ + const handleChangeSelected = (id: string) => + setFullConfiguration((prev) => + prev.map((column) => { + if (column.id === id) { + return { + ...column, + selected: !column.selected, + } + } + return column + }), + ) + + /** + * A callback to persist the current local state to the parent component. + */ + const handleSave = useCallback(() => { + const onlySelected = fullConfiguration + .filter((column) => column.selected) + .map(({ id }, idx) => ({ + id, + position: idx, + })) + onSave(onlySelected) + setIsOpen(false) + }, [fullConfiguration, onSave]) + + /** + * A callback to handle the end of a drag event. If the column is dragged over another column, + * the columns will be re-ordered according to the new position. + */ + const handleDragEnd = (event: DragEndEvent) => { + const { active, over } = event + + if (!!over?.id && active.id !== over.id) { + setFullConfiguration((prev) => { + const oldIndex = prev.findIndex((column) => column.id === active.id) + const newIndex = prev.findIndex((column) => column.id === over.id) + return arrayMove(prev, oldIndex, newIndex) + }) + } + } + + /** + * The IDs of all columns in the current configuration, used for sorting and re-ordering. + */ + const identifiers = useMemo( + () => fullConfiguration.map((column) => column.id), + [fullConfiguration], + ) + + return ( + setIsOpen(false)} + onOpen={() => setIsOpen(true)} + trigger={ + + setIsOpen(true)}> + + + + } + size={isMobile ? 'xl' : 'lg'} + footer={ +
+ + +
+ } + > +
+ + +
+ {fullConfiguration.map((column) => ( + handleChangeSelected(column.id)} + /> + ))} +
+
+
+
+
+ ) +} + +const resolveConfiguration = ( + configuration: ReactTableColumnSort[], + columnMap: Record, +) => + Object.entries(columnMap) + .map(([key, label], idx) => { + const configPosition = configuration.findIndex((column) => column.id === key) + return { + id: key, + label, + position: configPosition === -1 ? configuration.length + idx : configPosition, + selected: configPosition !== -1, + } + }) + .sort((a, b) => a.position - b.position) + +type DraggableColumnProps = { + column: ReturnType[number] + toggleSelected: () => void + disabled?: boolean +} +function DraggableColumn({ column, toggleSelected }: DraggableColumnProps) { + const { attributes, listeners, setNodeRef, transform, transition } = useSortable({ + id: column.id, + transition: { + duration: 250, + easing: 'cubic-bezier(0.25, 1, 0.5, 1)', + }, + }) + + const style = { + transform: CSS.Transform.toString(transform), + transition, + } + + const VisibilityIcon = column.selected ? Eye : EyeOff + + return ( +
+ {column.label} + + + +
+ ) +} diff --git a/packages/browser/src/components/table/Pagination.tsx b/packages/browser/src/components/table/Pagination.tsx index 5870d92af..847cd5f21 100644 --- a/packages/browser/src/components/table/Pagination.tsx +++ b/packages/browser/src/components/table/Pagination.tsx @@ -8,7 +8,7 @@ import { usePagination } from '@/hooks/usePagination' import PagePopoverForm from '../PagePopoverForm' import { PaginationProps } from '../Pagination' -type TablePaginationProps = Omit +export type TablePaginationProps = Omit export default function TablePagination({ pages, @@ -37,7 +37,7 @@ export default function TablePagination({ return (
onChangePage(currentPage - 1)}> - + {pageRange.map((page, i) => { @@ -61,7 +61,7 @@ export default function TablePagination({ onPageChange={onChangePage} trigger={ } /> @@ -69,7 +69,7 @@ export default function TablePagination({ })} = pages} onClick={() => onChangePage(currentPage + 1)}> - +
) diff --git a/packages/browser/src/components/table/SortIcon.tsx b/packages/browser/src/components/table/SortIcon.tsx index 9ff09c174..a1e9bf677 100644 --- a/packages/browser/src/components/table/SortIcon.tsx +++ b/packages/browser/src/components/table/SortIcon.tsx @@ -1,11 +1,15 @@ import { cn } from '@stump/components' import { ArrowDown, ArrowUpDown } from 'lucide-react' -export default function SortIcon({ direction }: { direction: 'asc' | 'desc' | null }) { +type Props = { + direction: 'asc' | 'desc' | null + showIfNull?: boolean +} +export default function SortIcon({ direction, showIfNull }: Props) { const classes = 'h-3.5 w-3.5 text-muted shrink-0' if (!direction) { - return + return showIfNull ? : null } return ( diff --git a/packages/browser/src/components/table/Table.tsx b/packages/browser/src/components/table/Table.tsx index de0e8f083..d1c8fde25 100644 --- a/packages/browser/src/components/table/Table.tsx +++ b/packages/browser/src/components/table/Table.tsx @@ -1,9 +1,11 @@ import { Heading, NativeSelect, Text } from '@stump/components' import { + Column, ColumnDef, ColumnFiltersState, flexRender, getCoreRowModel, + getExpandedRowModel, getFilteredRowModel, getSortedRowModel, SortDirection, @@ -13,7 +15,7 @@ import { } from '@tanstack/react-table' import clsx from 'clsx' import { ArrowDown, ArrowUp } from 'lucide-react' -import { useMemo, useRef, useState } from 'react' +import { CSSProperties, useMemo, useRef, useState } from 'react' import TablePagination from './Pagination' import TableFilterInput from './TableFilterInput' @@ -30,6 +32,7 @@ export interface TableProps { isZeroBasedPagination?: boolean } +// TODO: properly support pinned columns, which means I likely need to break out components (TableCell, TableHeaderCell, etc) // TODO: move into components package! // TODO: loading state // TODO: total count for pagination... @@ -120,7 +123,14 @@ export default function Table({ {headerGroup.headers.map((header) => { return ( - +
({ {row.getVisibleCells().map((cell) => { return ( - + {flexRender(cell.column.columnDef.cell, cell.getContext())} ) @@ -229,7 +246,39 @@ function SortIcon({ direction }: { direction: 'asc' | 'desc' | null }) { return ( - {direction === 'asc' ? : } + {direction === 'asc' ? ( + + ) : ( + + )} ) } + +export const getTableModels = ({ + filtered, + expanded, + sorted, +}: { + filtered?: boolean + expanded?: boolean + sorted?: boolean +}) => ({ + getCoreRowModel: getCoreRowModel(), + ...(filtered ? { getFilteredRowModel: getFilteredRowModel() } : {}), + ...(expanded ? { getExpandedRowModel: getExpandedRowModel(), getRowCanExpand: () => true } : {}), + ...(sorted ? { getSortedRowModel: getSortedRowModel() } : {}), +}) + +export function getCommonPinningStyles(column: Column) { + const isPinned = column.getIsPinned() + + const styles: CSSProperties = { + left: isPinned === 'left' ? `${column.getStart('left')}px` : undefined, + position: isPinned ? 'sticky' : undefined, + right: isPinned === 'right' ? `${column.getAfter('right')}px` : undefined, + zIndex: isPinned ? 1 : undefined, + } + + return styles +} diff --git a/packages/browser/src/components/table/index.ts b/packages/browser/src/components/table/index.ts index 4ad7ac83a..8564b484c 100644 --- a/packages/browser/src/components/table/index.ts +++ b/packages/browser/src/components/table/index.ts @@ -1,2 +1,15 @@ +export { + default as EntityTable, + type EntityTableOptions, + type EntityTableProps, +} from './EntityTable' +export { default as EntityTableColumnConfiguration } from './EntityTableColumnConfiguration' +export { default as TablePagination } from './Pagination' export { default as SortIcon } from './SortIcon' -export { default as Table } from './Table' +export { getTableModels, default as Table } from './Table' + +import { ColumnDef as ReactColumnDef } from '@tanstack/react-table' + +// Note: bug in the types for ColumnDef in @tanstack/react-table +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type ColumnDef = ReactColumnDef diff --git a/packages/browser/src/hooks/index.ts b/packages/browser/src/hooks/index.ts index 39f49ed7f..3e4b323fa 100644 --- a/packages/browser/src/hooks/index.ts +++ b/packages/browser/src/hooks/index.ts @@ -1,3 +1,4 @@ +export { useCurrentOrPrevious } from './useCurrentOrPrevious' export { useDetectZoom } from './useDetectZoom' export { useLayoutMode } from './useLayoutMode' export { usePageParam } from './usePageParam' diff --git a/packages/browser/src/hooks/useCurrentOrPrevious.ts b/packages/browser/src/hooks/useCurrentOrPrevious.ts new file mode 100644 index 000000000..bab2fb1ae --- /dev/null +++ b/packages/browser/src/hooks/useCurrentOrPrevious.ts @@ -0,0 +1,28 @@ +import { useEffect, useRef } from 'react' + +type Params = { + method?: 'falsy' | 'defined' +} + +export function useCurrentOrPrevious( + value: T | undefined, + { method } = defaultParams, +): T | undefined { + const previousValue = useRef(undefined) + + useEffect(() => { + if (value !== undefined) { + previousValue.current = value + } + }, [value]) + + if (method === 'falsy') { + return value || previousValue.current + } else { + return value ?? previousValue.current + } +} + +const defaultParams: Params = { + method: 'falsy', +} diff --git a/packages/browser/src/hooks/useStickyIntersection.ts b/packages/browser/src/hooks/useStickyIntersection.ts new file mode 100644 index 000000000..dcf2fae12 --- /dev/null +++ b/packages/browser/src/hooks/useStickyIntersection.ts @@ -0,0 +1,22 @@ +import { useEffect, useRef, useState } from 'react' + +type Return = [React.MutableRefObject, boolean] + +export function useStickyIntersection(): Return { + const ref = useRef(null) + + const [isPinned, setIsPinned] = useState(false) + + useEffect(() => { + const observer = new IntersectionObserver( + ([e]) => setIsPinned(e ? e.intersectionRatio < 1 : false), + { threshold: [1] }, + ) + + if (ref.current) { + observer.observe(ref.current) + } + }, []) + + return [ref, isPinned] +} diff --git a/packages/browser/src/i18n/locales/fr.json b/packages/browser/src/i18n/locales/fr.json deleted file mode 100644 index dd438c4cd..000000000 --- a/packages/browser/src/i18n/locales/fr.json +++ /dev/null @@ -1,724 +0,0 @@ -{ - "authScene": { - "claimHeading": "Initialisez votre serveur", - "claimText": "Ce serveur Stump n'est pas initialisé. Utilisez le formulaire ci-dessous pour créer votre compte. Une fois créé, vous aurez un accès complet à toutes les fonctionnalités du serveur.", - "form": { - "validation": { - "missingUsername": "Le nom d'utilisateur est requis", - "missingPassword": "Le mot de passe est requis" - }, - "labels": { - "username": "Nom d'utilisateur", - "password": "Mot de passe" - }, - "buttons": { - "createAccount": "Créer un compte", - "login": "S'identifier" - } - }, - "toasts": { - "loggingIn": "Connexion en cours...", - "loggedIn": "Nous sommes heureux de vous revoir !", - "loggedInFirstTime": "Bienvenue !", - "registering": "Enregistrement en cours...", - "registered": "Enregistré !", - "loginFailed": "Échec de la connexion. Veuillez réessayer", - "registrationFailed": "Échec de l'inscription. Veuillez réessayer" - } - }, - "adminLibrarySidebar": { - "libraryConfiguration": { - "heading": "Configuration de la bibliothèque", - "subtitleCreating": "Un aperçu de votre nouvelle bibliothèque sera affiché ci-dessous", - "subtitleEditing": "Un aperçu des modifications de votre bibliothèque sera affiché ci-dessous" - } - }, - "bookOverviewScene": { - "fileInformation": { - "heading": "Informations sur le fichier", - "labels": { - "fileSize": "Taille du fichier", - "fileType": "Type de fichier", - "fileLocation": "Emplacement du fichier", - "fileChecksum": "Somme de contrôle" - } - }, - "nextInSeries": "Suivant dans la série" - }, - "createBookClubScene": { - "heading": "Créer un nouveau Club de Lecture", - "subtitle": "Vous pouvez créer un club de lecture privé en sélectionnant des membres, ou le rendre public pour tous sur le serveur", - "form": { - "name": { - "label": "Nom", - "placeholder": "Mon club de lecture", - "description": "Le nom de votre club de lecture" - }, - "description": { - "label": "Description", - "placeholder": "Un club de fans sur \"Notre Drapeau signifie la Mort\". Nous lisons des fictions sur la piraterie au gré de nos envies", - "description": "Une courte description facultative de votre club de lecture" - }, - "is_private": { - "label": "Club privé", - "description": "Si activée, seuls les utilisateurs que vous invitez pourront rejoindre votre club de lecture" - }, - "member_role_spec": { - "heading": "Mappage des rôles personnalisé", - "subtitle": [ - "You can create custom names for the roles in your book club. For example, you could rename the 'Member' role to 'Crewmate', or 'Creator' to 'Captain'. If you don't want to use custom names, you can leave these fields blank and the default names will be used instead. For more information about roles, refer to the", - "documentation" - ], - "member": { - "label": "Membre", - "placeholder": "Membre", - "description": "Le nom du rôle par défaut pour votre club de lecture" - }, - "moderator": { - "label": "Modérateur", - "placeholder": "Modérateur", - "description": "Le nom du rôle de modérateur pour votre club de lecture" - }, - "admin": { - "label": "Administrateur", - "placeholder": "Administrateur", - "description": "Le nom du rôle d'administrateur de votre club de lecture" - }, - "creator": { - "label": "Créateur", - "placeholder": "Créateur", - "description": "Le nom du rôle de créateur de votre club de lecture. C'est vous !" - } - }, - "creator_preferences": { - "heading": "Your membership preferences", - "subtitle": "Some preferences for your membership in the book club. These can be changed at any time from the book club settings page", - "creator_display_name": { - "label": "Nom affiché", - "placeholder": "oromei", - "description": "Un nom affiché facultatif pour votre adhésion au club de lecture. Si défini, celui-ci prend la priorité sur votre nom d'utilisateur" - }, - "creator_hide_progress": { - "label": "Masquer la progression", - "description": "Si activée, votre progression de lecture sera cachée aux autres membres du club" - } - }, - "submit": "Créer un Club de Lecture" - } - }, - "createLibraryScene": { - "heading": "Créer une nouvelle bibliothèque", - "subtitle": "Les bibliothèques sont utilisées pour regrouper vos livres. Si vous souhaitez en savoir plus sur les bibliothèques et leur fonctionnement, consultez la", - "subtitleLink": "documentation correspondante", - "form": { - "labels": { - "libraryName": "Nom", - "libraryPath": "Chemin d'accès", - "libraryPathDescription": "Le chemin d'accès à la bibliothèque tel qu'il existe sur votre serveur", - "libraryDescription": "Description", - "libraryTags": "Étiquettes" - }, - "buttons": { - "confirm": "Créer la bibliothèque" - } - } - }, - "librarySettingsScene": { - "heading": "Gérer la Bibliothèque", - "subtitle": "Mettez à jour les détails ou la configuration de votre bibliothèque, modifiez les règles d'accès ou scannez des fichiers. Si vous voulez une mise à jour sur les bibliothèques et comment elles fonctionnent, consultez la", - "subtitleLink": "documentation correspondante", - "form": { - "labels": { - "libraryName": "Nom", - "libraryPath": "Chemin d'accès", - "libraryPathDescription": "Le chemin d'accès à la bibliothèque tel qu'il existe sur votre serveur", - "libraryDescription": "Description", - "libraryTags": "Étiquettes" - }, - "buttons": { - "confirm": "Enregistrer les modifications" - } - } - }, - "errorScene": { - "heading": "lol, oups", - "message": "Une erreur s'est produite :", - "buttons": { - "report": "Signaler un bug", - "copy": "Copier les détails de l'erreur", - "goHome": "Aller à la page d'accueil" - } - }, - "homeScene": { - "continueReading": { - "title": "Continuer la lecture", - "emptyState": { - "heading": "Aucun livre à afficher", - "message": "Tous les livres que vous lisez actuellement apparaîtront ici" - } - }, - "recentlyAddedSeries": { - "title": "Séries récemment ajoutées", - "emptyState": { - "heading": "Aucune série à afficher", - "message": "Toutes les séries que vous ajoutez à vos bibliothèques apparaîtront ici" - } - }, - "recentlyAddedBooks": { - "title": "Livres récemment ajoutés", - "emptyState": { - "heading": "Aucun livre à afficher", - "message": "Tous les livres que vous ajoutez à vos bibliothèques apparaîtront ici" - } - } - }, - "seriesOverviewScene": { - "buttons": { - "continueReading": "Continuer la lecture", - "downloadSeries": "Télécharger la série" - } - }, - "userSmartListsScene": { - "heading": "Listes intelligentes", - "subtitle": "Vos recherches et filtres favoris sauvegardés pour un accès facile", - "searchPlaceholder": "Filtrer les listes intelligentes", - "buttons": { - "createSmartList": "Créer une liste intelligente" - }, - "list": { - "emptyState": { - "heading": "Aucune liste intelligente à afficher", - "noListsMessage": "Créez une liste intelligente pour commencer", - "noMatchesMessage": "Essayez de modifier votre recherche" - }, - "card": { - "meta": { - "figures": { - "books": "livre", - "series": "séries", - "library": "bibliothèque" - }, - "matches": "Correspondances" - } - } - } - }, - "userSmartListScene": { - "navigation": { - "items": "Éléments", - "settings": "Paramètres" - }, - "layout": { - "missingIdError": "Cette scène nécessite un ID dans l'URL", - "smartListNotFound": "La liste intelligente est introuvable", - "viewCreateError": "Échec lors de la création de l'affichage", - "viewSaveError": "Échec lors de la sauvegarde de l'affichage" - }, - "header": { - "backLink": "Listes" - }, - "itemsScene": { - "smartListNotFound": "La liste intelligente est introuvable", - "actionHeader": { - "viewSelector": { - "customView": "Affichage personnalisé", - "defaultView": "Affichage par défaut", - "noViewsSaved": "Aucun affichage enregistré", - "selectView": "Sélectionnez un affichage enregistré" - }, - "filterDrawer": { - "heading": "Filtres de liste intelligente", - "description": "Changer les filtres pour cette session affichée", - "rawData": { - "heading": "Données du filtre brut", - "description": "Voici comment Stump traitera les filtres" - }, - "buttons": { - "save": "Sauvegarder", - "cancel": "Annuler" - } - }, - "search": { - "placeholder": "Filtre rapide" - }, - "viewManager": { - "updateSelected": "Mettre à jour la sélection", - "create": "Créer un nouvel affichage", - "modal": { - "heading": { - "create": "Créer un affichage", - "update": "Actualiser l'affichage" - }, - "description": { - "create": "Créer un nouvel affichage pour cette liste intelligente", - "update": "Actualiser l'affichage" - }, - "createForm": { - "name": { - "label": "Nom", - "placeholder": "Mon affichage", - "description": "Un nom sympa pour identifier de manière unique cet affichage" - } - }, - "updateForm": { - "name": { - "label": "Nom", - "placeholder": "Mon affichage", - "description": "Le nom actualisé pour cet affichage, si désiré" - } - } - } - } - } - } - }, - "settingsScene": { - "navigation": { - "general": "Général", - "logs": "Journaux", - "server": "Serveur", - "jobs": "Tâches et Configuration", - "users": "Gestion des Utilisateurs", - "desktop": "Bureau" - }, - "sidebar": { - "application": { - "account": "Compte", - "appearance": "Apparence", - "reader": "Lecteur", - "desktop": "Bureau", - "label": "Application" - }, - "server": { - "general": "Général", - "logs": "Journaux", - "users": "Utilisateurs", - "jobs": "Tâches", - "access": "Accéder", - "notifications": "Notifications", - "label": "Serveur" - } - }, - "app/account": { - "helmet": "Paramètres du compte", - "title": "Paramètres du compte", - "description": "Paramètres liés à votre compte", - "sections": { - "account": { - "validation": { - "invalidUrl": "Veuillez saisir une URL valide", - "missingUsername": "Nom d’utilisateur requis" - }, - "labels": { - "username": "Nom d'utilisateur", - "password": "Mot de passe", - "activeChangesPrompt": "Vous avez des modifications non enregistrées" - }, - "errors": { - "updateFailed": "Une erreur de serveur est survenue lors de la mise à jour de votre profil. Veuillez réessayer." - }, - "avatarPicker": { - "heading": "Définir votre image d'avatar", - "subtitle": "Stump prend en charge les avatars personnalisés, qui peuvent être définis en fournissant une URL vers une image. Les téléchargements d'images ne sont pas pris en charge pour réduire la quantité de données stockées sur le serveur.", - "preview": "Un aperçu apparaîtra ici une fois que vous aurez entré une URL.", - "labels": { - "imageUrl": "URL de l'image", - "customAvatar": "Personnaliser l’avatar" - }, - "buttons": { - "confirm": "Confirmer l'image", - "cancel": "Annuler", - "edit": "Éditer", - "changeImage": "Changer l'image", - "removeImage": "Supprimer l'image" - } - }, - "buttons": { - "confirm": "Enregistrer les modifications" - } - }, - "locale": { - "localeSelector": { - "label": "Langue" - }, - "heading": "Langue", - "subtitle": [ - "Stump prend en charge plusieurs langues, définissez vos préférences ci-dessous. Pensez à aider à améliorer la qualité des", - "traductions de Stump", - "si vous le pouvez !" - ] - } - } - }, - "app/appearance": { - "helmet": "Apparence", - "title": "Apparence", - "description": "Personnaliser l'apparence de l'application", - "sections": { - "themeSelect": { - "label": "Thème", - "description": "Par défaut, Stump possède un thème clair et sombre", - "customTheme": [ - "Si vous êtes intéressé par la création de votre propre thème personnalisé, consultez la", - "documentation" - ], - "options": { - "light": "Clair", - "dark": "Sombre", - "bronze": "Bronze léger" - } - } - } - }, - "app/reader": { - "helmet": "Paramètres du lecteur", - "title": "Paramètres du lecteur", - "description": "Options par défaut pour les lecteurs. Elles sont liées à votre appareil actuel uniquement", - "sections": { - "imageBasedBooks": { - "label": "Livres illustrés", - "description": "Bandes dessinées, mangas, et autres livres illustrés", - "sections": { - "preloadAheadCount": { - "label": "Nombre de préchargements avant la page actuelle", - "description": "Le nombre de pages à précharger avant la page actuelle" - }, - "preloadBehindCount": { - "label": "Nombre de préchargements après la page actuelle", - "description": "Le nombre de pages à précharger après la page actuelle" - } - } - } - } - }, - "app/desktop": { - "helmet": "Paramètres du bureau", - "title": "Paramètres du bureau", - "description": "Paramètres liés à l'application de bureau Stump", - "sections": { - "discordPresence": { - "label": "Présence Discord", - "description": "Affiche votre activité Stump sur Discord en utilisant la Présence Riche de Discord", - "reconnect": "Se reconnecter à Discord" - } - } - }, - "server/general": { - "helmet": "Paramètres généraux du serveur", - "title": "Paramètres généraux", - "description": "Paramètres généraux liés à l'instance de votre serveur Stump", - "sections": { - "updateAvailable": { - "message": "Votre serveur n'est pas à jour. Veuillez le mettre à jour vers la dernière version !" - }, - "serverInfo": { - "title": "Informations du serveur", - "description": "Détails de base sur l'instance de votre serveur Stump", - "build": { - "label": "Version", - "description": "Détails à propos de la version", - "version": { - "semver": "Version", - "commitHash": "Commit exact", - "date": "Date de compilation" - } - } - } - } - }, - "server/logs": { - "helmet": "Journaux", - "title": "Journaux", - "description": "Les journaux générés par votre instance de serveur Stump", - "sections": { - "persistedLogs": { - "title": "Journaux persistants", - "description": "Ces journaux ont été enregistrés manuellement dans la base de données et sont généralement associés à un travail ou à un événement spécifique", - "table": { - "columns": { - "level": "Niveau", - "message": "Message", - "timestamp": "Horodatage" - }, - "emptyHeading": "Aucun journal à afficher", - "emptySubtitle": "Votre serveur est soit très sain, soit en très mauvais état" - } - }, - "liveLogs": { - "title": "Flux des journaux en direct", - "description": "Diffusion directe depuis votre instance de serveur Stump en temps réel" - } - } - }, - "server/jobs": { - "helmet": "Tâches", - "title": "Tâches", - "description": "Tâches en arrière-plan qui s'exécutent sur l'instance de votre serveur Stump", - "sections": { - "scheduling": { - "title": "Planification", - "description": "Certaines tâches peuvent être configurées pour être exécutées à un moment défini. Les modifications de cette configuration prendront effet après le redémarrage du serveur" - }, - "history": { - "title": "Historique", - "description": "Un enregistrement des tâches qui ont été exécutées sur l'instance de votre serveur Stump", - "table": { - "columns": { - "name": "Type", - "description": "Description", - "status": "État", - "createdAt": "Commencée à", - "elapsed": "Temps écoulé", - "tasks": "Tâches" - }, - "emptyHeading": "Il n'y a aucune tâche à afficher", - "emptySubtitle": "Vous ne pouvez pas avoir une tâche si vous n'avez pas une tâche définie", - "deleteAllMessage": "L'historique des tâches et les statistiques peuvent être supprimés de la base de données à tout moment. Cette action ne peut pas être annulée", - "deleteAllConfirmButton": "Effacer l'historique", - "deleteAllConfirmButtonTitle": "Effacer l'historique des tâches", - "deleteAllConfirmButtonTitleNoJobs": "Aucune tâche à effacer" - } - } - } - }, - "server/users": { - "helmet": "Gestion des utilisateurs", - "title": "Utilisateurs", - "description": "Gérer les utilisateurs sur ce serveur", - "createUser": { - "helmet": "Créer un utilisateur", - "title": "Créer un utilisateur", - "description": "Créer un nouvel utilisateur sur ce serveur" - }, - "updateUser": { - "helmet": "Modifier l'utilisateur", - "title": "Modifier l'utilisateur", - "description": "Mettre à jour les détails de cet utilisateur" - }, - "createOrUpdateForm": { - "accessControl": { - "heading": "Contrôle d'accès et restrictions", - "subtitle": [ - "Configurez toutes les restrictions que vous souhaitez appliquer à cet utilisateur. Pour plus d'informations sur le contrôle d'accès, consultez la", - "documentation" - ], - "ageRestriction": { - "label": "Restriction d'âge", - "description": "Empêche l'utilisateur d'accéder au contenu au-delà du seuil d'âge défini", - "placeholder": "13", - "enforceUnset": { - "label": "Appliquer en cas d'absence de paramétrage", - "description": "Si activée, les utilisateurs ne pourront pas accéder aux contenus pour lesquels aucune classification par âge n'est disponible" - } - }, - "tagRestriction": { - "label": "Restrictions d'étiquettes", - "description": "Empêche l'utilisateur d'accéder au contenu contenant les étiquettes sélectionnées", - "placeholder": "Adulte, Gore" - } - }, - "permissions": { - "heading": "Permissions", - "subtitle": [ - "Sélectionnez les autorisations que vous souhaitez accorder à cet utilisateur. Pour plus d'informations sur la fonction de chaque autorisation, consultez la", - "documentation" - ], - "bookclub": { - "label": "Clubs de lecture", - "read": { - "label": "Accéder aux fonctionnalités des clubs de lecture", - "description": "Permet à l'utilisateur d'accéder aux fonctionnalités des clubs de lecture, incluant la visualisation et l'inscription à des clubs de lecture" - }, - "create": { - "label": "Créer des Clubs de Lecture", - "description": "Permet à l'utilisateur de créer de nouveaux clubs de lectures" - } - }, - "file": { - "label": "Gestion des fichiers", - "explorer": { - "label": "File Explorer", - "description": "Allows the user to access the Library File Explorer.\nContent restriction is not supported when this feature is granted" - }, - "download": { - "label": "Télécharger les fichiers", - "description": "Permet à l'utilisateur de télécharger des fichiers à partir du serveur" - }, - "upload": { - "label": "Envoyer des fichiers", - "description": "Permet à l'utilisateur d'envoyer des fichiers sur le serveur" - } - }, - "library": { - "label": "Gestion des bibliothèques", - "create": { - "label": "Créer des bibliothèques", - "description": "Permet à l'utilisateur de créer de nouvelles bibliothèques.\nInclut les permissions d'édition et de scan" - }, - "scan": { - "label": "Scanner les bibliothèques", - "description": "Permet à l'utilisateur de lancer des scans pour les bibliothèques existantes" - }, - "edit": { - "label": "Modifier les bibliothèques", - "description": "Permet à l'utilisateur de modifier les détails de base des bibliothèques existantes" - }, - "manage": { - "label": "Gérer les bibliothèques", - "description": "Permet à l'utilisateur de gérer les paramètres avancés des bibliothèques existantes.\nInclut les permissions d'édition et de scan" - }, - "delete": { - "label": "Supprimer les bibliothèques", - "description": "Permet à l'utilisateur de supprimer des bibliothèques existantes.\nInclut les permissions d'édition, de gestion et de scan" - } - }, - "server": { - "label": "Gestion du serveur", - "manage": { - "label": "Gérer le serveur", - "description": "Permet à l'utilisateur de gérer le serveur.\nComprend *beaucoup* d'autres autorisations" - } - }, - "user": { - "label": "Gestion des Utilisateurs", - "read": { - "label": "Lire des utilisateurs", - "description": "Permet à l'utilisateur de rechercher d'autres utilisateurs sur le serveur. Cette fonction est nécessaire pour certaines fonctionnalités, par exemple pour réduire l'accès à une bibliothèque pour des utilisateurs" - }, - "manage": { - "label": "Gérer les utilisateurs", - "description": "Allows the user to manage other users on the server.\nIncludes permissions to create and update" - } - }, - "smartlist": { - "label": "Listes intelligentes", - "read": { - "label": "Accéder à la fonctionnalité Liste Intelligente", - "description": "Allows the user to access smart lists features" - } - } - }, - "validation": { - "ageRestrictionTooLow": "La restriction d'âge ne peut pas être inférieure à 0" - }, - "createSubmitButton": "Créer un utilisateur", - "updateSubmitButton": "Modifier l'utilisateur" - } - } - }, - "jobOverlay": { - "backupHeading": "Traitement en cours" - }, - "libraryStats": { - "seriesCount": "Total des séries", - "bookCount": "Total des livres", - "diskUsage": "Utilisation du disque" - }, - "pagination": { - "buttons": { - "next": "Suivant", - "previous": "Précédent" - }, - "popover": { - "heading": "Aller à la page", - "buttons": { - "confirm": "Aller", - "cancel": "Annuler" - } - } - }, - "signOutModal": { - "title": "Déconnexion", - "message": "Êtes-vous sûr(e) de vouloir vous déconnecter ?", - "buttons": { - "cancel": "Annuler", - "signOut": "Se déconnecter" - } - }, - "sidebar": { - "buttons": { - "home": "Accueil", - "libraries": "Bibliothèques", - "books": "Explorer", - "bookClubs": "Clubs de lecture", - "createLibrary": "Créer une bibliothèque", - "noLibraries": "Aucune bibliothèque", - "createBookClub": "Créer un Club de Lecture", - "noBookClubs": "Aucun club de lecture", - "settings": "Paramètres", - "themeToggle": "Changer de thème", - "goForward": "Aller en avant", - "goBack": "Aller en arrière", - "smartlists": "Listes intelligentes", - "noSmartlists": "Aucune liste intelligente", - "createSmartlist": "Créer une liste intelligente" - }, - "libraryOptions": { - "scanLibrary": "Scanner", - "fileExplorer": "Explorateur de fichiers", - "manageLibrary": "Manage", - "deleteLibrary": "Supprimer" - }, - "versionInformation": { - "heading": "Informations de version", - "semVer": "Version sémantique", - "commitHash": "Hachage de validation", - "buildDate": "Date de construction" - } - }, - "search": { - "placeholder": "Recherche" - }, - "serverSOS": { - "heading": "Serveur non disponible", - "desktop": { - "message": "Une erreur de réseau s'est produite indiquant que votre serveur Stump est actuellement indisponible. Veuillez vous assurer qu'il est en cours d'exécution et accessible depuis cet appareil.\nSi l'URL de votre serveur a changé, vous pouvez la mettre à jour à l'aide du formulaire ci-dessous" - }, - "web": { - "message": "Une erreur de réseau s'est produite indiquant que votre serveur Stump est actuellement indisponible. Veuillez vous assurer qu'il est en cours d'exécution et accessible depuis cet appareil" - }, - "reconnected": "Reconnecté au serveur! Redirection...", - "reconnectionFailed": "Quelque chose s'est mal passé!" - }, - "serverStatusOverlay": { - "heading": "Le serveur n'est pas connecté", - "message": [ - "Veuillez vérifier votre connexion internet", - "Cliquez ici", - "pour changer l'URL de votre serveur" - ] - }, - "slidingList": { - "empty": "Aucun élément à afficher", - "buttons": { - "next": "Avancer", - "previous": "Reculer" - } - }, - "tagSelect": { - "placholder": "Choisissez ou créez une étiquette", - "placeholderNoTags": "Aucune étiquette disponible" - }, - "thumbnailDropdown": { - "label": "Edit thumbnail", - "options": { - "selectFromBooks": "Sélectionner à partir des livres", - "uploadImage": "Importer une image" - }, - "uploadImage": { - "emptyState": "Un aperçu de votre image apparaîtra ici", - "prompt": "Déposez l'image ici ou cliquez pour sélectionner", - "remove": "Supprimer l’image" - } - }, - "common": { - "cancel": "Annuler", - "confirm": "Confirmer", - "save": "Sauvegarder", - "saveChanges": "Enregistrer les modifications", - "create": "Créer", - "edit": "Éditer", - "unimplemented": "Cette fonctionnalité n'est pas encore implémentée ! Revenez plus tard", - "limitedFunctionality": "Ceci n'est pas encore totalement implémenté et manque de certaines fonctionnalités. Revenez plus tard" - } -} diff --git a/packages/browser/src/paths.ts b/packages/browser/src/paths.ts index 73ff77c36..cb06ad00a 100644 --- a/packages/browser/src/paths.ts +++ b/packages/browser/src/paths.ts @@ -16,6 +16,8 @@ type SettingsPage = | 'server/logs' | 'server/users' | 'server/access' + | 'server/email' + | 'server/email/new' | 'server/notifications' type DocTopic = 'access-control' | 'book-club' type BookClubTab = 'overview' | 'members' | 'chat-board' | 'settings' @@ -78,8 +80,10 @@ const paths = { return `${baseUrl}/reader?${searchParams.toString()}` }, bookSearch: () => '/books', + createEmailer: () => paths.settings('server/email/new'), docs: (topic?: DocTopic, section?: string) => `https://www.stumpapp.dev/guides/${topic || ''}${section ? `#${section}` : ''}`, + editEmailer: (id: number) => paths.settings('server/email') + `/${id}/edit`, home: () => '/', libraries: () => '/libraries', libraryBooks: (id: string, page?: number) => { diff --git a/packages/browser/src/scenes/auth/index.ts b/packages/browser/src/scenes/auth/index.ts new file mode 100644 index 000000000..5fddd38a0 --- /dev/null +++ b/packages/browser/src/scenes/auth/index.ts @@ -0,0 +1 @@ +export { default } from './LoginOrClaimScene' diff --git a/packages/browser/src/scenes/book/BookOverviewScene.tsx b/packages/browser/src/scenes/book/BookOverviewScene.tsx index a9db78487..6844572ba 100644 --- a/packages/browser/src/scenes/book/BookOverviewScene.tsx +++ b/packages/browser/src/scenes/book/BookOverviewScene.tsx @@ -6,9 +6,9 @@ import { Helmet } from 'react-helmet' import { useParams } from 'react-router' import { useMediaMatch } from 'rooks' +import MediaCard from '@/components/book/BookCard' import { SceneContainer } from '@/components/container' import LinkBadge from '@/components/LinkBadge' -import MediaCard from '@/components/media/MediaCard' import ReadMore from '@/components/ReadMore' import TagList from '@/components/tags/TagList' @@ -21,6 +21,7 @@ import BookLibrarySeriesLinks from './BookLibrarySeriesLinks' import BookReaderDropdown from './BookReaderDropdown' import BooksAfterCursor from './BooksAfterCursor' import DownloadMediaButton from './DownloadMediaButton' +import EmailBookDropdown from './EmailBookDropdown' // TODO: redesign page? // TODO: with metadata being collected now, there is a lot more information to display: @@ -118,6 +119,7 @@ export default function BookOverviewScene() { )} {canDownload && } +
{!isAtLeastMedium && !!media.metadata?.summary && ( diff --git a/packages/browser/src/scenes/book/BookRouter.tsx b/packages/browser/src/scenes/book/BookRouter.tsx index ab59b11d0..50479ab37 100644 --- a/packages/browser/src/scenes/book/BookRouter.tsx +++ b/packages/browser/src/scenes/book/BookRouter.tsx @@ -3,12 +3,12 @@ import { Navigate, Route, Routes } from 'react-router' import ServerOwnerRouteWrapper from '@/components/ServerOwnerRouteWrapper.tsx' -const BookSearchScene = lazy(() => import('./BookSearchScene.tsx')) +const BookSearchScene = lazy(() => import('../bookSearch/BookSearchScene.tsx')) const BookOverviewScene = lazy(() => import('./BookOverviewScene.tsx')) -const BookReaderScene = lazy(() => import('./BookReaderScene.tsx')) -const EpubReaderScene = lazy(() => import('./EpubReaderScene.tsx')) -const PDFReaderScene = lazy(() => import('./PDFReaderScene.tsx')) -const BookManagementScene = lazy(() => import('./management/BookManagementScene.tsx')) +const BookReaderScene = lazy(() => import('./reader/BookReaderScene.tsx')) +const EpubReaderScene = lazy(() => import('./reader/EpubReaderScene.tsx')) +const PDFReaderScene = lazy(() => import('./reader/PDFReaderScene.tsx')) +const BookManagementScene = lazy(() => import('./settings/BookManagementScene.tsx')) export default function BookRouter() { return ( diff --git a/packages/browser/src/scenes/book/BookSearchScene.tsx b/packages/browser/src/scenes/book/BookSearchScene.tsx deleted file mode 100644 index 5216fa87f..000000000 --- a/packages/browser/src/scenes/book/BookSearchScene.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import React from 'react' -import { Helmet } from 'react-helmet' -import { useMediaMatch } from 'rooks' - -import { SceneContainer } from '@/components/container' -import { FilterProvider } from '@/components/filters' -import BookSearch from '@/components/media/BookSearch' -import { usePageParam } from '@/hooks/usePageParam' - -export default function BookSearchScene() { - const { page, setPage } = usePageParam() - - const is3XLScreenOrBigger = useMediaMatch('(min-width: 1600px)') - - return ( - - - - Stump | Books - - - - - - ) -} diff --git a/packages/browser/src/scenes/book/BooksAfterCursor.tsx b/packages/browser/src/scenes/book/BooksAfterCursor.tsx index 70d8b2de8..000ae0c19 100644 --- a/packages/browser/src/scenes/book/BooksAfterCursor.tsx +++ b/packages/browser/src/scenes/book/BooksAfterCursor.tsx @@ -2,8 +2,8 @@ import { useMediaCursorQuery } from '@stump/client' import { Media } from '@stump/types' import { useEffect } from 'react' +import MediaCard from '@/components/book/BookCard' import HorizontalCardList from '@/components/HorizontalCardList' -import MediaCard from '@/components/media/MediaCard' type Props = { cursor: Media diff --git a/packages/browser/src/scenes/book/EmailBookDropdown.tsx b/packages/browser/src/scenes/book/EmailBookDropdown.tsx new file mode 100644 index 000000000..d1bf759eb --- /dev/null +++ b/packages/browser/src/scenes/book/EmailBookDropdown.tsx @@ -0,0 +1,169 @@ +import { useEmailDevicesQuery, useSendAttachmentEmail } from '@stump/client' +import { Badge, Button, ComboBox, Dialog, IconButton, Input } from '@stump/components' +import { useLocaleContext } from '@stump/i18n' +import { Send } from 'lucide-react' +import React, { Suspense, useCallback, useMemo, useState } from 'react' +import toast from 'react-hot-toast' + +import { useAppContext } from '@/context' + +type ContainerProps = { + mediaId: string +} +export default function EmailBookDropdownContainer({ mediaId }: ContainerProps) { + const { checkPermission } = useAppContext() + + const canSendEmail = useMemo(() => checkPermission('email:send'), [checkPermission]) + const canArbitrarySendEmail = useMemo( + () => checkPermission('email:arbitrary_send'), + [checkPermission], + ) + + if (!canSendEmail && !canArbitrarySendEmail) { + return null + } + + return ( + + + + ) +} + +type Props = { + canArbitrarySendEmail: boolean +} & ContainerProps + +function EmailBookDropdown({ mediaId, canArbitrarySendEmail }: Props) { + const { t } = useLocaleContext() + const { devices } = useEmailDevicesQuery() + const { sendAsync: sendEmail, isSending } = useSendAttachmentEmail() + + const [isOpen, setIsOpen] = useState(false) + const [deviceIds, setDeviceIds] = useState([]) + const [emails, setEmails] = useState([]) + + const [currentEmail, setCurrentEmail] = useState('') + + const handleSend = useCallback(async () => { + if (deviceIds.length === 0 && emails.length === 0) { + return + } + + const payload = { + media_ids: [mediaId], + send_to: [ + ...deviceIds.map((id) => ({ device_id: id })), + ...(canArbitrarySendEmail ? emails.map((email) => ({ email })) : []), + ], + } + + try { + const { errors } = await sendEmail(payload) + setIsOpen(errors.length > 0) + if (errors.length > 0) { + console.warn(errors) + toast.error('Some errors occurred while sending email(s). Check the logs for more detail') + } + } catch (error) { + console.error(error) + toast.error('Failed to send email') + } + }, [sendEmail, deviceIds, emails, canArbitrarySendEmail, mediaId]) + + const renderArbitraryEmails = () => { + if (!canArbitrarySendEmail) { + return null + } else { + return ( +
+
+ {emails.map((email, index) => ( + setEmails((curr) => curr.filter((e) => e !== email))} + > + {email} + + ))} +
+ +
+ setCurrentEmail(e.target.value)} + /> + +
+
+ ) + } + } + + return ( + + + + + + + + + {t(getKey('heading'))} + {t(getKey('description'))} + setIsOpen(false)} disabled={isSending} /> + + +
+ ({ + label: device.name, + value: device.id.toString(), + }))} + isMultiSelect + filterable + filterEmptyMessage={t(getFormKey('devices.noFilterMatch'))} + value={deviceIds.map((id) => id.toString())} + onChange={(selected) => { + setDeviceIds(selected?.map((id) => parseInt(id)).filter((id) => !isNaN(id)) || []) + }} + size="full" + /> + + {renderArbitraryEmails()} +
+ + + + + +
+
+ ) +} + +const BASE_LOCALE_KEY = 'bookOverviewScene.emailBook' +const getKey = (key: string) => `${BASE_LOCALE_KEY}.${key}` +const getFormKey = (key: string) => `${BASE_LOCALE_KEY}.form.${key}` diff --git a/packages/browser/src/scenes/book/index.ts b/packages/browser/src/scenes/book/index.ts new file mode 100644 index 000000000..1c011b015 --- /dev/null +++ b/packages/browser/src/scenes/book/index.ts @@ -0,0 +1 @@ +export { default as BookRouter } from './BookRouter' diff --git a/packages/browser/src/scenes/book/BookReaderScene.tsx b/packages/browser/src/scenes/book/reader/BookReaderScene.tsx similarity index 99% rename from packages/browser/src/scenes/book/BookReaderScene.tsx rename to packages/browser/src/scenes/book/reader/BookReaderScene.tsx index 64a137b59..f9e2aa9fb 100644 --- a/packages/browser/src/scenes/book/BookReaderScene.tsx +++ b/packages/browser/src/scenes/book/reader/BookReaderScene.tsx @@ -7,7 +7,7 @@ import { ImageBasedReader } from '@/components/readers/image-based' import paths from '@/paths' import { useReaderStore } from '@/stores' -import { ARCHIVE_EXTENSION, EBOOK_EXTENSION, PDF_EXTENSION } from '../../utils/patterns' +import { ARCHIVE_EXTENSION, EBOOK_EXTENSION, PDF_EXTENSION } from '../../../utils/patterns' export default function BookReaderScene() { const [search] = useSearchParams() diff --git a/packages/browser/src/scenes/book/EpubReaderScene.tsx b/packages/browser/src/scenes/book/reader/EpubReaderScene.tsx similarity index 97% rename from packages/browser/src/scenes/book/EpubReaderScene.tsx rename to packages/browser/src/scenes/book/reader/EpubReaderScene.tsx index 1668b6e2f..89c293f1a 100644 --- a/packages/browser/src/scenes/book/EpubReaderScene.tsx +++ b/packages/browser/src/scenes/book/reader/EpubReaderScene.tsx @@ -4,7 +4,7 @@ import { Navigate, useParams, useSearchParams } from 'react-router-dom' import EpubJsReader from '@/components/readers/epub/EpubJsReader' -import paths from '../../paths' +import paths from '../../../paths' //! NOTE: Only the epub.js reader is supported for now :sob: export default function EpubReaderScene() { diff --git a/packages/browser/src/scenes/book/PDFReaderScene.tsx b/packages/browser/src/scenes/book/reader/PDFReaderScene.tsx similarity index 100% rename from packages/browser/src/scenes/book/PDFReaderScene.tsx rename to packages/browser/src/scenes/book/reader/PDFReaderScene.tsx diff --git a/packages/browser/src/scenes/book/management/BookManagementScene.tsx b/packages/browser/src/scenes/book/settings/BookManagementScene.tsx similarity index 84% rename from packages/browser/src/scenes/book/management/BookManagementScene.tsx rename to packages/browser/src/scenes/book/settings/BookManagementScene.tsx index b7a535a27..e59b056a4 100644 --- a/packages/browser/src/scenes/book/management/BookManagementScene.tsx +++ b/packages/browser/src/scenes/book/settings/BookManagementScene.tsx @@ -1,5 +1,6 @@ +import { mediaApi } from '@stump/api' import { useMediaByIdQuery } from '@stump/client' -import { Alert, Breadcrumbs, Heading, Text } from '@stump/components' +import { Alert, Breadcrumbs, Button, Heading, Text } from '@stump/components' import { Construction } from 'lucide-react' import React, { useMemo } from 'react' import { Navigate, useParams } from 'react-router' @@ -52,6 +53,12 @@ export default function BookManagementScene() { return } + function handleAnalyze() { + if (id != undefined) { + mediaApi.startMediaAnalysis(id) + } + } + return (
@@ -72,6 +79,12 @@ export default function BookManagementScene() { +
+ +
+
diff --git a/packages/browser/src/scenes/book/management/BookPageGrid.tsx b/packages/browser/src/scenes/book/settings/BookPageGrid.tsx similarity index 100% rename from packages/browser/src/scenes/book/management/BookPageGrid.tsx rename to packages/browser/src/scenes/book/settings/BookPageGrid.tsx diff --git a/packages/browser/src/scenes/book/management/BookThumbnailSelector.tsx b/packages/browser/src/scenes/book/settings/BookThumbnailSelector.tsx similarity index 100% rename from packages/browser/src/scenes/book/management/BookThumbnailSelector.tsx rename to packages/browser/src/scenes/book/settings/BookThumbnailSelector.tsx diff --git a/packages/browser/src/scenes/book-club/BookClubRouter.tsx b/packages/browser/src/scenes/bookClub/BookClubRouter.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/BookClubRouter.tsx rename to packages/browser/src/scenes/bookClub/BookClubRouter.tsx diff --git a/packages/browser/src/scenes/book-club/UserBookClubsScene.tsx b/packages/browser/src/scenes/bookClub/UserBookClubsScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/UserBookClubsScene.tsx rename to packages/browser/src/scenes/bookClub/UserBookClubsScene.tsx diff --git a/packages/browser/src/scenes/book-club/create-club/CreateBookClubForm.tsx b/packages/browser/src/scenes/bookClub/create-club/CreateBookClubForm.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/create-club/CreateBookClubForm.tsx rename to packages/browser/src/scenes/bookClub/create-club/CreateBookClubForm.tsx diff --git a/packages/browser/src/scenes/book-club/create-club/CreateBookClubScene.tsx b/packages/browser/src/scenes/bookClub/create-club/CreateBookClubScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/create-club/CreateBookClubScene.tsx rename to packages/browser/src/scenes/bookClub/create-club/CreateBookClubScene.tsx diff --git a/packages/browser/src/scenes/book-club/create-club/CreatorPreferences.tsx b/packages/browser/src/scenes/bookClub/create-club/CreatorPreferences.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/create-club/CreatorPreferences.tsx rename to packages/browser/src/scenes/bookClub/create-club/CreatorPreferences.tsx diff --git a/packages/browser/src/scenes/book-club/create-club/RoleMappingForm.tsx b/packages/browser/src/scenes/bookClub/create-club/RoleMappingForm.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/create-club/RoleMappingForm.tsx rename to packages/browser/src/scenes/bookClub/create-club/RoleMappingForm.tsx diff --git a/packages/browser/src/scenes/book-club/explore/BookClubExploreScene.tsx b/packages/browser/src/scenes/bookClub/explore/BookClubExploreScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/explore/BookClubExploreScene.tsx rename to packages/browser/src/scenes/bookClub/explore/BookClubExploreScene.tsx diff --git a/packages/browser/src/scenes/book-club/home/BookClubHeader.tsx b/packages/browser/src/scenes/bookClub/home/BookClubHeader.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/BookClubHeader.tsx rename to packages/browser/src/scenes/bookClub/home/BookClubHeader.tsx diff --git a/packages/browser/src/scenes/book-club/home/BookClubHomeLayout.tsx b/packages/browser/src/scenes/bookClub/home/BookClubHomeLayout.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/BookClubHomeLayout.tsx rename to packages/browser/src/scenes/bookClub/home/BookClubHomeLayout.tsx diff --git a/packages/browser/src/scenes/book-club/home/BookClubNavigation.tsx b/packages/browser/src/scenes/bookClub/home/BookClubNavigation.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/BookClubNavigation.tsx rename to packages/browser/src/scenes/bookClub/home/BookClubNavigation.tsx diff --git a/packages/browser/src/scenes/book-club/home/context.ts b/packages/browser/src/scenes/bookClub/home/context.ts similarity index 100% rename from packages/browser/src/scenes/book-club/home/context.ts rename to packages/browser/src/scenes/bookClub/home/context.ts diff --git a/packages/browser/src/scenes/book-club/home/tabs/chat-board/BookClubChatBoardScene.tsx b/packages/browser/src/scenes/bookClub/home/tabs/chat-board/BookClubChatBoardScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/chat-board/BookClubChatBoardScene.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/chat-board/BookClubChatBoardScene.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/chat-board/ChatMessage.tsx b/packages/browser/src/scenes/bookClub/home/tabs/chat-board/ChatMessage.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/chat-board/ChatMessage.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/chat-board/ChatMessage.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/members/BookClubMembersScene.tsx b/packages/browser/src/scenes/bookClub/home/tabs/members/BookClubMembersScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/members/BookClubMembersScene.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/members/BookClubMembersScene.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/overview/BookClubOverviewScene.tsx b/packages/browser/src/scenes/bookClub/home/tabs/overview/BookClubOverviewScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/overview/BookClubOverviewScene.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/overview/BookClubOverviewScene.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/overview/BookClubScheduleTimeline.tsx b/packages/browser/src/scenes/bookClub/home/tabs/overview/BookClubScheduleTimeline.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/overview/BookClubScheduleTimeline.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/overview/BookClubScheduleTimeline.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/overview/BookClubScheduleTimelineItem.tsx b/packages/browser/src/scenes/bookClub/home/tabs/overview/BookClubScheduleTimelineItem.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/overview/BookClubScheduleTimelineItem.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/overview/BookClubScheduleTimelineItem.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/settings/BookClubSettingsScene.tsx b/packages/browser/src/scenes/bookClub/home/tabs/settings/BookClubSettingsScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/settings/BookClubSettingsScene.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/settings/BookClubSettingsScene.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/settings/MemberSpecDisplay.tsx b/packages/browser/src/scenes/bookClub/home/tabs/settings/MemberSpecDisplay.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/settings/MemberSpecDisplay.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/settings/MemberSpecDisplay.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/AddBookCard.tsx b/packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/AddBookCard.tsx similarity index 98% rename from packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/AddBookCard.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/AddBookCard.tsx index 4031d3510..773907a12 100644 --- a/packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/AddBookCard.tsx +++ b/packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/AddBookCard.tsx @@ -4,7 +4,7 @@ import { Media } from '@stump/types' import React, { useEffect, useState } from 'react' import { useFormContext } from 'react-hook-form' -import BookSearchOverlay from '@/components/media/BookSearchOverlay' +import BookSearchOverlay from '@/components/book/BookSearchOverlay' import { defaultBook, Schema } from './CreateOrAddToScheduleForm' diff --git a/packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/BookClubSchedulerScene.tsx b/packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/BookClubSchedulerScene.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/BookClubSchedulerScene.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/BookClubSchedulerScene.tsx diff --git a/packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/CreateOrAddToScheduleForm.tsx b/packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/CreateOrAddToScheduleForm.tsx similarity index 100% rename from packages/browser/src/scenes/book-club/home/tabs/settings/scheduler/CreateOrAddToScheduleForm.tsx rename to packages/browser/src/scenes/bookClub/home/tabs/settings/scheduler/CreateOrAddToScheduleForm.tsx diff --git a/packages/browser/src/scenes/bookClub/index.ts b/packages/browser/src/scenes/bookClub/index.ts new file mode 100644 index 000000000..2ec8d9a3a --- /dev/null +++ b/packages/browser/src/scenes/bookClub/index.ts @@ -0,0 +1 @@ +export { default as BookClubRouter } from './BookClubRouter' diff --git a/packages/browser/src/scenes/bookSearch/BookSearchScene.tsx b/packages/browser/src/scenes/bookSearch/BookSearchScene.tsx new file mode 100644 index 000000000..249f381c4 --- /dev/null +++ b/packages/browser/src/scenes/bookSearch/BookSearchScene.tsx @@ -0,0 +1,162 @@ +import { prefetchPagedMedia, usePagedMediaQuery } from '@stump/client' +import { usePrevious, usePreviousIsDifferent } from '@stump/components' +import React, { useCallback, useEffect, useMemo } from 'react' +import { Helmet } from 'react-helmet' + +import { BookTable } from '@/components/book' +import BookGrid from '@/components/book/BookGrid' +import { defaultBookColumnSort } from '@/components/book/table' +import { + FilterContext, + FilterHeader, + URLFilterContainer, + URLFilterDrawer, + URLOrdering, + useFilterScene, +} from '@/components/filters' +import { EntityTableColumnConfiguration } from '@/components/table' +import TableOrGridLayout from '@/components/TableOrGridLayout' +import useIsInView from '@/hooks/useIsInView' +import { useBooksLayout } from '@/stores/layout' + +export default function BookSearchScene() { + const [containerRef, isInView] = useIsInView() + + const { layoutMode, setLayout, columns, setColumns } = useBooksLayout((state) => ({ + columns: state.columns, + layoutMode: state.layout, + setColumns: state.setColumns, + setLayout: state.setLayout, + })) + const { + filters, + ordering, + pagination: { page, page_size }, + setPage, + ...rest + } = useFilterScene() + + const params = useMemo( + () => ({ + page, + page_size, + params: { + ...filters, + ...ordering, + }, + }), + [page, page_size, ordering, filters], + ) + const { + isLoading: isLoadingMedia, + isRefetching: isRefetchingMedia, + media, + pageData, + } = usePagedMediaQuery(params) + const { current_page, total_pages } = pageData || {} + + const differentSearch = usePreviousIsDifferent(filters?.search as string) + useEffect(() => { + if (differentSearch) { + setPage(1) + } + }, [differentSearch, setPage]) + + const handlePrefetchPage = useCallback( + (page: number) => { + prefetchPagedMedia({ + ...params, + page, + }) + }, + [params], + ) + + const previousPage = usePrevious(current_page) + const shouldScroll = !!previousPage && previousPage !== current_page + useEffect( + () => { + if (!isInView && shouldScroll) { + containerRef.current?.scrollIntoView({ + behavior: 'smooth', + block: 'nearest', + inline: 'start', + }) + } + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [shouldScroll], + ) + + const renderContent = () => { + if (layoutMode === 'GRID') { + return ( + +
+ 0} + /> +
+
+ ) + } else { + return ( + ( + + } + {...props} + /> + )} + /> + ) + } + } + + return ( + +
+ + Stump | Books + + +
+ + } + orderControls={} + filterControls={} + /> + + {renderContent()} +
+
+ ) +} diff --git a/packages/browser/src/scenes/bookSearch/index.ts b/packages/browser/src/scenes/bookSearch/index.ts new file mode 100644 index 000000000..967df881b --- /dev/null +++ b/packages/browser/src/scenes/bookSearch/index.ts @@ -0,0 +1 @@ +export { default } from './BookSearchScene' diff --git a/packages/browser/src/scenes/home/ContinueReading.tsx b/packages/browser/src/scenes/home/ContinueReading.tsx index 6567460a8..bd2b66393 100644 --- a/packages/browser/src/scenes/home/ContinueReading.tsx +++ b/packages/browser/src/scenes/home/ContinueReading.tsx @@ -3,8 +3,8 @@ import { Heading, Text } from '@stump/components' import { useLocaleContext } from '@stump/i18n' import { CircleSlash2 } from 'lucide-react' +import MediaCard from '@/components/book/BookCard' import HorizontalCardList from '@/components/HorizontalCardList' -import MediaCard from '@/components/media/MediaCard' export default function ContinueReadingMedia() { const { t } = useLocaleContext() diff --git a/packages/browser/src/scenes/home/NoLibraries.tsx b/packages/browser/src/scenes/home/NoLibraries.tsx index a4c484b39..b0d184d58 100644 --- a/packages/browser/src/scenes/home/NoLibraries.tsx +++ b/packages/browser/src/scenes/home/NoLibraries.tsx @@ -1,29 +1,41 @@ -import { ButtonOrLink, Card, Heading, Text } from '@stump/components' -import { CircleSlash2 } from 'lucide-react' +import { ButtonOrLink, Heading, Text } from '@stump/components' +import { useLocaleContext } from '@stump/i18n' import { useAppContext } from '../../context' import paths from '../../paths' export default function NoLibraries() { - const { isServerOwner } = useAppContext() + const { t } = useLocaleContext() + const { checkPermission } = useAppContext() + + const canCreateLibrary = checkPermission('library:create') return ( -
- - - - {isServerOwner ? "You don't have" : 'There are no'} libraries configured - - - Once {isServerOwner ? 'you create a library' : 'a library has been created'}, this page - will be more useful +
+ Construction illustration + +
+ {t(getKey('heading'))} + + {t(getKey('messagePrefix'))}.{' '} + {t(getKey(`message.${canCreateLibrary ? 'create' : 'wait'}`))} - {isServerOwner && ( - - Create a library - + + {canCreateLibrary && ( +
+ + {t(getKey('links.create'))} + +
)} - +
) } + +const LOCALE_BASE_KEY = 'noLibraries' +const getKey = (key: string) => `${LOCALE_BASE_KEY}.${key}` diff --git a/packages/browser/src/scenes/home/RecentlyAddedMedia.tsx b/packages/browser/src/scenes/home/RecentlyAddedMedia.tsx index 0d129bb59..ec10968f6 100644 --- a/packages/browser/src/scenes/home/RecentlyAddedMedia.tsx +++ b/packages/browser/src/scenes/home/RecentlyAddedMedia.tsx @@ -3,8 +3,8 @@ import { Heading, Text } from '@stump/components' import { useLocaleContext } from '@stump/i18n' import { CircleSlash2 } from 'lucide-react' +import MediaCard from '@/components/book/BookCard' import HorizontalCardList from '@/components/HorizontalCardList' -import MediaCard from '@/components/media/MediaCard' export default function RecentlyAddedMedia() { const { t } = useLocaleContext() diff --git a/packages/browser/src/scenes/home/index.ts b/packages/browser/src/scenes/home/index.ts new file mode 100644 index 000000000..01821cc5d --- /dev/null +++ b/packages/browser/src/scenes/home/index.ts @@ -0,0 +1 @@ +export { default } from './HomeScene' diff --git a/packages/browser/src/scenes/library/LibraryBooksScene.tsx b/packages/browser/src/scenes/library/LibraryBooksScene.tsx deleted file mode 100644 index bc8f8a511..000000000 --- a/packages/browser/src/scenes/library/LibraryBooksScene.tsx +++ /dev/null @@ -1,145 +0,0 @@ -import { prefetchPagedMedia, usePagedMediaQuery } from '@stump/client' -import { useCallback, useEffect, useMemo } from 'react' -import { Helmet } from 'react-helmet' -import { useMediaMatch } from 'rooks' - -import { FilterProvider, FilterToolBar, useFilterContext } from '@/components/filters' -import MediaList from '@/components/media/MediaList' -import Pagination from '@/components/Pagination' -import { useLayoutMode } from '@/hooks' -import useIsInView from '@/hooks/useIsInView' -import { usePageParam } from '@/hooks/usePageParam' - -import MediaGrid from '../series/MediaGrid' -import { useLibraryContext } from './context' - -export default function LibraryBooksSceneContainer() { - return ( - - - - ) -} - -function LibraryBooksScene() { - const is3XLScreenOrBigger = useMediaMatch('(min-width: 1600px)') - - const [containerRef, isInView] = useIsInView() - - const { page, setPage } = usePageParam() - const { library } = useLibraryContext() - - const { layoutMode } = useLayoutMode() - const { filters } = useFilterContext() - - const params = useMemo( - () => ({ - page, - page_size: is3XLScreenOrBigger ? 40 : 20, - params: { - ...filters, - series: { - library: { - id: library.id, - }, - }, - }, - }), - [page, is3XLScreenOrBigger, filters, library.id], - ) - const { - isLoading: isLoadingMedia, - isRefetching: isRefetchingMedia, - media, - pageData, - } = usePagedMediaQuery(params) - - const { current_page, total_pages } = pageData || {} - - const isOnFirstPage = current_page === 1 - const hasStuff = total_pages !== undefined && current_page !== undefined - - const handlePrefetchPage = useCallback( - (page: number) => { - prefetchPagedMedia({ - ...params, - page, - }) - }, - [params], - ) - - // TODO: detect if going from page > 1 to page = 1 and scroll to top - useEffect( - () => { - if (!isInView && !isOnFirstPage) { - containerRef.current?.scrollIntoView({ - block: 'nearest', - inline: 'start', - }) - } - }, - // eslint-disable-next-line react-hooks/exhaustive-deps - [current_page, isOnFirstPage], - ) - - const renderContent = () => { - if (layoutMode === 'GRID') { - return ( - 0} - /> - ) - } else { - return ( - 0} - /> - ) - } - } - - return ( - <> - - Stump | {library.name || ''} - - -
- - {/* FIXME: This component is currently intertwined with series context, and so it needs to change before I enable it */} - - -
- {hasStuff && ( - - )} -
{renderContent()}
- {hasStuff && ( - - )} -
- - ) -} diff --git a/packages/browser/src/scenes/library/LibraryLayout.tsx b/packages/browser/src/scenes/library/LibraryLayout.tsx index 2c10ceac6..efa505a62 100644 --- a/packages/browser/src/scenes/library/LibraryLayout.tsx +++ b/packages/browser/src/scenes/library/LibraryLayout.tsx @@ -35,23 +35,20 @@ export default function LibraryLayout() { return ( - - - - - - - - +
+ + + + + + + + +
) } diff --git a/packages/browser/src/scenes/library/LibraryNavigation.tsx b/packages/browser/src/scenes/library/LibraryNavigation.tsx index 818a15f5f..e6ac005f7 100644 --- a/packages/browser/src/scenes/library/LibraryNavigation.tsx +++ b/packages/browser/src/scenes/library/LibraryNavigation.tsx @@ -1,4 +1,4 @@ -import { prefetchLibraryFiles, prefetchLibrarySeries } from '@stump/client' +import { prefetchLibraryFiles, prefetchLibraryMedia, prefetchLibrarySeries } from '@stump/client' import { cn, Link } from '@stump/components' import React, { useMemo } from 'react' import { useLocation } from 'react-router' @@ -30,6 +30,7 @@ export default function LibraryNavigation() { { isActive: location.pathname.match(/\/libraries\/[^/]+\/books(\/.*)?$/), label: 'Books', + onHover: () => prefetchLibraryMedia(id), to: 'books', }, ...(canAccessFiles @@ -54,10 +55,10 @@ export default function LibraryNavigation() { const preferTopBar = primary_navigation_mode === 'TOPBAR' return ( -
+