From 237188ea4f1d9339c1d417d8a5de5cd07de5face Mon Sep 17 00:00:00 2001 From: Trim21 Date: Thu, 19 Feb 2026 21:58:19 +0800 Subject: [PATCH 1/7] rust --- .github/workflows/rust-ci.yaml | 65 + .gitignore | 2 + .rustfmt.toml | 3 + Cargo.lock | 3565 +++++++++++++++++++ Cargo.toml | 28 + config.example.toml | 1 + crates/app/Cargo.toml | 26 + crates/app/src/main.rs | 48 + crates/app/src/worker/canal/character.rs | 30 + crates/app/src/worker/canal/mod.rs | 151 + crates/app/src/worker/canal/person.rs | 24 + crates/app/src/worker/canal/search_event.rs | 528 +++ crates/app/src/worker/canal/subject.rs | 39 + crates/app/src/worker/canal/types.rs | 20 + crates/app/src/worker/canal/user.rs | 293 ++ crates/app/src/worker/mod.rs | 2 + crates/app/src/worker/timeline.rs | 22 + crates/common/Cargo.toml | 18 + crates/common/src/config.rs | 117 + crates/common/src/lib.rs | 80 + crates/php-serialize/Cargo.toml | 9 + crates/php-serialize/src/lib.rs | 56 + docs/rust-migration-plan.md | 48 + docs/rust-workspace.md | 30 + etc/Dockerfile.rust | 10 + rust-toolchain.toml | 4 + 26 files changed, 5219 insertions(+) create mode 100644 .github/workflows/rust-ci.yaml create mode 100644 .rustfmt.toml create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 crates/app/Cargo.toml create mode 100644 crates/app/src/main.rs create mode 100644 crates/app/src/worker/canal/character.rs create mode 100644 crates/app/src/worker/canal/mod.rs create mode 100644 crates/app/src/worker/canal/person.rs create mode 100644 crates/app/src/worker/canal/search_event.rs create mode 100644 crates/app/src/worker/canal/subject.rs create mode 100644 crates/app/src/worker/canal/types.rs create mode 100644 crates/app/src/worker/canal/user.rs create mode 100644 crates/app/src/worker/mod.rs create mode 100644 crates/app/src/worker/timeline.rs create mode 100644 crates/common/Cargo.toml create mode 100644 crates/common/src/config.rs create mode 100644 crates/common/src/lib.rs create mode 100644 crates/php-serialize/Cargo.toml create mode 100644 crates/php-serialize/src/lib.rs create mode 100644 docs/rust-migration-plan.md create mode 100644 docs/rust-workspace.md create mode 100644 etc/Dockerfile.rust create mode 100644 rust-toolchain.toml diff --git a/.github/workflows/rust-ci.yaml b/.github/workflows/rust-ci.yaml new file mode 100644 index 000000000..1579d5eee --- /dev/null +++ b/.github/workflows/rust-ci.yaml @@ -0,0 +1,65 @@ +name: Rust CI + +on: + push: + branches: + - master + merge_group: + pull_request: + branches: + - master + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust: + runs-on: ubuntu-24.04 + env: + TARGET: x86_64-unknown-linux-gnu + steps: + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Set up Rust toolchain + uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + target: ${{ env.TARGET }} + + - name: Check format + run: cargo fmt --all -- --check + + - name: Run clippy + run: cargo clippy --workspace --all-targets --locked + + - name: Run tests + run: cargo test --workspace --locked + + - name: Build release binary + run: cargo build --release --locked --target $TARGET -p app + + docker: + runs-on: ubuntu-24.04 + needs: rust + env: + TARGET: x86_64-unknown-linux-gnu + steps: + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Set up Rust toolchain + uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + target: ${{ env.TARGET }} + + - name: Build release binary + run: cargo build --release --locked --target $TARGET -p app + + - name: Build rust docker image + run: docker build --build-arg TARGET=$TARGET -t app-rust -f etc/Dockerfile.rust . + + - name: Smoke run + run: docker run --rm app-rust --help diff --git a/.gitignore b/.gitignore index 4eb88f609..8c0341a8e 100644 --- a/.gitignore +++ b/.gitignore @@ -22,5 +22,7 @@ node_modules/ pnpm-lock.yaml .task/ +/target/ + config.yaml config.toml diff --git a/.rustfmt.toml b/.rustfmt.toml new file mode 100644 index 000000000..1b29b9bb9 --- /dev/null +++ b/.rustfmt.toml @@ -0,0 +1,3 @@ +max_width = 88 +tab_spaces = 2 +newline_style = 'Unix' diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 000000000..eec2c59ad --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,3565 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" + +[[package]] +name = "app" +version = "0.1.0" +dependencies = [ + "anyhow", + "bangumi-wiki-parser", + "clap", + "common", + "meilisearch-sdk", + "opendal", + "php-serialize", + "rdkafka", + "redis", + "serde", + "serde_json", + "sqlx", + "tokio", + "tracing", +] + +[[package]] +name = "arc-swap" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9f3647c145568cec02c42054e07bdf9a5a698e15b466fb2341bfc393cd24aa5" +dependencies = [ + "rustversion", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "backon" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" +dependencies = [ + "fastrand", + "gloo-timers", + "tokio", +] + +[[package]] +name = "bangumi-wiki-parser" +version = "0.1.0" +source = "git+https://github.com/bangumi/server-private#6a32bcfef3a2717fdca43676c64dc0f07fecae00" +dependencies = [ + "thiserror 2.0.18", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +dependencies = [ + "serde_core", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c6f81257d10a0f602a294ae4182251151ff97dbb504ef9afcdda4a64b24d9b4" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "clap" +version = "4.5.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5caf74d17c3aec5495110c34cc3f78644bfa89af6c8993ed4de2790e49b6499" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "370daa45065b80218950227371916a1633217ae42b2715b2287b606dcd618e24" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "clap_lex" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" + +[[package]] +name = "cmake" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "common" +version = "0.1.0" +dependencies = [ + "anyhow", + "rdkafka", + "serde", + "sqlx", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.17", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32c" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a47af21622d091a8f0fb295b88bc886ac74efcc613efc19f5d0b21de5c89e47" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc3dc5ad92c2e2d1c193bbbbdf2ea477cb81331de4f3103f267ca18368b988c4" +dependencies = [ + "powerfmt", + "serde_core", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc2ab4d5a16117f9029e9a6b5e4e79f4c67f6519bc134210d4d4a04ba31f41b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec 1.15.1", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots 1.0.6", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc 0.2.5", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc 0.2.5", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec 1.15.1", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc 0.2.5", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec 1.15.1", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "iso8601" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1082f0c48f143442a1ac6122f67e360ceee130b967af4d50996e5154a45df46" +dependencies = [ + "nom", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "jiff" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c867c356cc096b33f4981825ab281ecba3db0acefe60329f044c1789d94c6543" +dependencies = [ + "jiff-static", + "jiff-tzdb-platform", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", + "windows-sys 0.61.2", +] + +[[package]] +name = "jiff-static" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7946b4325269738f270bb55b3c19ab5c5040525f83fd625259422a9d25d9be5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "jiff-tzdb" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68971ebff725b9e2ca27a601c5eb38a4c5d64422c4cbab0c535f248087eda5c2" + +[[package]] +name = "jiff-tzdb-platform" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" +dependencies = [ + "jiff-tzdb", +] + +[[package]] +name = "js-sys" +version = "0.3.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64", + "js-sys", + "ring", + "serde", + "serde_json", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.182" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" + +[[package]] +name = "libm" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" + +[[package]] +name = "libredox" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +dependencies = [ + "bitflags", + "libc", + "redox_syscall 0.7.1", +] + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "meilisearch-index-setting-macro" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420f67f5943a0236eea7f199720cc465e806c48978d9b0fdc1fb62eceaee7556" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "structmeta", + "syn 2.0.116", +] + +[[package]] +name = "meilisearch-sdk" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2325355c73c96667178c09675389cfa7afc2382d5aa0e0d34d0cf29793d89090" +dependencies = [ + "async-trait", + "bytes", + "either", + "futures", + "futures-io", + "iso8601", + "jsonwebtoken", + "log", + "meilisearch-index-setting-macro", + "pin-project-lite", + "reqwest", + "serde", + "serde_json", + "thiserror 1.0.69", + "time", + "uuid", + "wasm-bindgen-futures", + "web-sys", + "yaup", +] + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "nom" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec 1.15.1", + "zeroize", +] + +[[package]] +name = "num-conv" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_enum" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" +dependencies = [ + "num_enum_derive", + "rustversion", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "opendal" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d075ab8a203a6ab4bc1bce0a4b9fe486a72bf8b939037f4b78d95386384bc80a" +dependencies = [ + "anyhow", + "backon", + "base64", + "bytes", + "crc32c", + "futures", + "getrandom 0.2.17", + "http", + "http-body", + "jiff", + "log", + "md-5", + "percent-encoding", + "quick-xml 0.38.4", + "reqsign", + "reqwest", + "serde", + "serde_json", + "tokio", + "url", + "uuid", +] + +[[package]] +name = "openssl-src" +version = "300.5.5+3.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f1787d533e03597a7934fd0a765f0d28e94ecc5fb7789f8053b1e699a56f709" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "ordered-multimap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" +dependencies = [ + "dlv-list", + "hashbrown 0.14.5", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.18", + "smallvec 1.15.1", + "windows-link", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "php-serialize" +version = "0.1.0" +dependencies = [ + "serde", + "serde_php", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + +[[package]] +name = "portable-atomic-util" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9db96d7fa8782dd8c15ce32ffe8680bbd1e978a43bf51a34d39483540495f5" +dependencies = [ + "portable-atomic", +] + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.116", +] + +[[package]] +name = "proc-macro-crate" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quick-xml" +version = "0.37.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quick-xml" +version = "0.38.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2 0.6.2", + "thiserror 2.0.18", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +dependencies = [ + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.18", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.6.2", + "tracing", + "windows-sys 0.60.2", +] + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rdkafka" +version = "0.36.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1beea247b9a7600a81d4cc33f659ce1a77e1988323d7d2809c7ed1c21f4c316d" +dependencies = [ + "futures-channel", + "futures-util", + "libc", + "log", + "rdkafka-sys", + "serde", + "serde_derive", + "serde_json", + "slab", + "tokio", +] + +[[package]] +name = "rdkafka-sys" +version = "4.10.0+2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e234cf318915c1059d4921ef7f75616b5219b10b46e9f3a511a15eb4b56a3f77" +dependencies = [ + "cmake", + "libc", + "num_enum", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "redis" +version = "0.27.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d8f99a4090c89cc489a94833c901ead69bfbf3877b4867d5482e321ee875bc" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine", + "futures-util", + "itertools", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "ryu", + "sha1_smol", + "socket2 0.5.10", + "tokio", + "tokio-util", + "url", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_syscall" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35985aa610addc02e24fc232012c86fd11f14111180f902b67e2d5331f8ebf2b" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" + +[[package]] +name = "reqsign" +version = "0.16.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43451dbf3590a7590684c25fb8d12ecdcc90ed3ac123433e500447c7d77ed701" +dependencies = [ + "anyhow", + "async-trait", + "base64", + "chrono", + "form_urlencoded", + "getrandom 0.2.17", + "hex", + "hmac", + "home", + "http", + "log", + "percent-encoding", + "quick-xml 0.37.5", + "rand 0.8.5", + "reqwest", + "rust-ini", + "serde", + "serde_json", + "sha1", + "sha2", + "tokio", +] + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tokio-util", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 1.0.6", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rsa" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rust-ini" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" +dependencies = [ + "cfg-if", + "ordered-multimap", +] + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustls" +version = "0.23.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_php" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4dc2d929957a1b3f11267fe809e1fe39712b8705ba24b60c0916144850b76cd" +dependencies = [ + "displaydoc 0.1.7", + "serde", + "smallvec 0.6.14", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0" +dependencies = [ + "maybe-uninit", +] + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64", + "bytes", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.5", + "hashlink", + "indexmap", + "log", + "memchr", + "once_cell", + "percent-encoding", + "rustls", + "serde", + "sha2", + "smallvec 1.15.1", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", + "url", + "webpki-roots 0.26.11", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.116", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "syn 2.0.116", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64", + "bitflags", + "byteorder", + "bytes", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec 1.15.1", + "sqlx-core", + "stringprep", + "thiserror 2.0.18", + "tracing", + "whoami", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "structmeta" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329" +dependencies = [ + "proc-macro2", + "quote", + "structmeta-derive", + "syn 2.0.116", +] + +[[package]] +name = "structmeta-derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.116" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3df424c70518695237746f84cede799c9c58fcb37450d7b23716568cc8bc69cb" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde_core", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" + +[[package]] +name = "time-macros" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc 0.2.5", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.49.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +dependencies = [ + "bytes", + "libc", + "mio", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.6.2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.23.10+spec-1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +dependencies = [ + "indexmap", + "toml_datetime", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.9+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" +dependencies = [ + "winnow", +] + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "serde", + "serde_json", + "sharded-slab", + "smallvec 1.15.1", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-normalization" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" +dependencies = [ + "getrandom 0.4.1", + "js-sys", + "serde_core", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" +dependencies = [ + "cfg-if", + "futures-util", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn 2.0.116", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.6", +] + +[[package]] +name = "webpki-roots" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn 2.0.116", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.116", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yaup" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0144f1a16a199846cb21024da74edd930b43443463292f536b7110b4855b5c6" +dependencies = [ + "form_urlencoded", + "serde", + "thiserror 1.0.69", +] + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc 0.2.5", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.116", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..b44740291 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,28 @@ +[workspace] +members = [ + "crates/common", + "crates/app", + "crates/php-serialize", +] +resolver = "3" + +[workspace.package] +edition = "2021" +license = "AGPL-3.0-only" + +[workspace.dependencies] +anyhow = "1" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +serde_php = "0.5" +tokio = { version = "1", features = ["rt-multi-thread", "macros", "signal", "time"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } +clap = { version = "4", features = ["derive"] } +rdkafka = { version = "0.36", default-features = false } +sqlx = { version = "0.8", default-features = false, features = ["runtime-tokio-rustls", "mysql", "macros"] } +redis = { version = "0.27", features = ["tokio-comp"] } +meilisearch-sdk = "0.28" +opendal = { version = "0.55", features = ["services-s3"] } +bangumi-wiki-parser = { git = "https://github.com/bangumi/server-private", package = "bangumi-wiki-parser" } +php-serialize = { path = "crates/php-serialize" } diff --git a/config.example.toml b/config.example.toml index c0c84062d..f6873572a 100644 --- a/config.example.toml +++ b/config.example.toml @@ -5,6 +5,7 @@ banned-domain = "lista.cc|snapmail.cc|ashotmail.com|zoutlook.com" s3-entry-point = "" s3-access-key = "" s3-secret-key = "" +s3-image-resize-bucket = "img-resize" http.host = "127.0.0.1" http.port = 3_000 diff --git a/crates/app/Cargo.toml b/crates/app/Cargo.toml new file mode 100644 index 000000000..2656c03b9 --- /dev/null +++ b/crates/app/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "app" +version = "0.1.0" +edition.workspace = true +license.workspace = true + +[dependencies] +anyhow.workspace = true +tokio.workspace = true +tracing.workspace = true +clap.workspace = true +serde.workspace = true +serde_json.workspace = true +sqlx.workspace = true +redis.workspace = true +meilisearch-sdk.workspace = true +opendal.workspace = true +bangumi-wiki-parser.workspace = true +php-serialize.workspace = true +common = { path = "../common" } + +[target.'cfg(unix)'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build", "tokio", "ssl-vendored"] } + +[target.'cfg(not(unix))'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build", "tokio"] } diff --git a/crates/app/src/main.rs b/crates/app/src/main.rs new file mode 100644 index 000000000..af2e735c8 --- /dev/null +++ b/crates/app/src/main.rs @@ -0,0 +1,48 @@ +use anyhow::Context; +use clap::{Parser, Subcommand}; + +mod worker; + +#[derive(Debug, Parser)] +#[command(name = "app")] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Debug, Subcommand)] +enum Commands { + Worker { + #[command(subcommand)] + command: WorkerCommands, + }, + Server, +} + +#[derive(Debug, Subcommand)] +enum WorkerCommands { + Canal, + Timeline, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + common::init_tracing(); + + let cli = Cli::parse(); + + match cli.command { + Commands::Worker { command } => match command { + WorkerCommands::Canal => worker::canal::run().await, + WorkerCommands::Timeline => worker::timeline::run().await, + }, + Commands::Server => run_server().await, + } +} + +async fn run_server() -> anyhow::Result<()> { + tracing::info!("server subcommand placeholder is ready"); + tokio::signal::ctrl_c().await.context("wait ctrl-c")?; + tracing::info!("server placeholder shutdown"); + Ok(()) +} diff --git a/crates/app/src/worker/canal/character.rs b/crates/app/src/worker/canal/character.rs new file mode 100644 index 000000000..e91f81896 --- /dev/null +++ b/crates/app/src/worker/canal/character.rs @@ -0,0 +1,30 @@ +use anyhow::Context; +use serde::Deserialize; + +use super::search_event::SearchDispatcher; +use super::search_event::{self}; + +#[derive(Debug, Deserialize)] +struct CharacterKey { + crt_id: u32, +} + +pub async fn on_character( + search: &SearchDispatcher, + key: &[u8], + op: &str, +) -> anyhow::Result<()> { + let key: CharacterKey = serde_json::from_slice(key).context("parse character key")?; + on_character_change(search, key.crt_id, op).await?; + Ok(()) +} + +async fn on_character_change( + search: &SearchDispatcher, + character_id: u32, + op: &str, +) -> anyhow::Result<()> { + search + .dispatch(search_event::TARGET_CHARACTER, character_id, op) + .await +} diff --git a/crates/app/src/worker/canal/mod.rs b/crates/app/src/worker/canal/mod.rs new file mode 100644 index 000000000..c95f079b7 --- /dev/null +++ b/crates/app/src/worker/canal/mod.rs @@ -0,0 +1,151 @@ +use anyhow::{anyhow, Context}; +use common::config::{build_kafka_client_config, build_mysql_pool, AppConfig}; +use common::locate_error; +use rdkafka::consumer::{CommitMode, Consumer}; +use rdkafka::Message; + +mod character; +mod person; +mod search_event; +mod subject; +mod types; +mod user; + +use types::DebeziumPayload; + +pub async fn run() -> anyhow::Result<()> { + let cfg = AppConfig::from_env("canal")?; + + if cfg.kafka_topics.is_empty() { + return Err(anyhow!( + "empty topics: set RUST_KAFKA_TOPICS (comma-separated) or KAFKA_TOPICS" + )); + } + + let mysql_pool = build_mysql_pool(&cfg).await?; + let search = search_event::SearchDispatcher::new(&cfg, mysql_pool.clone())?; + let user = user::UserDispatcher::new(&cfg, mysql_pool)?; + + let mut client = build_kafka_client_config(&cfg); + client.set("group.id", &cfg.kafka_group_id); + client.set("enable.auto.commit", "false"); + + let consumer: rdkafka::consumer::StreamConsumer = + client.create().context("create kafka consumer")?; + + let topics: Vec<&str> = cfg.kafka_topics.iter().map(String::as_str).collect(); + consumer + .subscribe(&topics) + .context("subscribe kafka topics")?; + + tracing::info!( + group_id = %cfg.kafka_group_id, + topics = ?cfg.kafka_topics, + "canal worker started" + ); + + loop { + tokio::select! { + _ = tokio::signal::ctrl_c() => { + tracing::info!("received shutdown signal"); + return Ok(()); + } + msg = consumer.recv() => { + let msg = match msg { + Ok(item) => item, + Err(err) => { + tracing::error!(error = ?err, "failed to fetch kafka message"); + continue; + } + }; + + let key = msg.key().unwrap_or_default(); + let value = msg.payload().unwrap_or_default(); + + if let Err(err) = handle_message(&search, &user, key, value).await { + if let Some(at) = locate_error(&err) { + tracing::error!( + error.file = at.file, + error.lino = at.line, + error.message = at.message, + error_chain = %format!("{err:#}"), + "failed to process kafka message" + ); + } else { + tracing::error!(error = ?err, error_chain = %format!("{err:#}"), "failed to process kafka message"); + } + continue; + } + + if let Err(err) = consumer.commit_message(&msg, CommitMode::Sync) { + tracing::error!(error = ?err, "failed to commit kafka message"); + } + } + } + } +} + +async fn handle_message( + search: &search_event::SearchDispatcher, + user: &user::UserDispatcher, + key: &[u8], + value: &[u8], +) -> anyhow::Result<()> { + if value.is_empty() { + return Ok(()); + } + + let payload: DebeziumPayload = match serde_json::from_slice(value) { + Ok(payload) => payload, + Err(err) => { + tracing::warn!(error = ?err, "failed to parse kafka value, skip message"); + return Ok(()); + } + }; + + match payload.source.table.as_str() { + "chii_subject_fields" => subject::on_subject_field(search, key, &payload.op).await?, + "chii_subjects" => subject::on_subject(search, key, &payload.op).await?, + "chii_characters" => character::on_character(search, key, &payload.op).await?, + "chii_persons" => person::on_person(search, key, &payload.op).await?, + "chii_members" => user + .on_user(key, &payload.op, payload.before, payload.after) + .await?, + _ => tracing::debug!(table = %payload.source.table, "ignored table event"), + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use common::ResultExt; + + #[test] + fn demo_error_logging_output() { + common::init_tracing(); + + let err = demo_error().expect_err("demo error should fail"); + if let Some(at) = common::locate_error(&err) { + tracing::error!( + error.file = at.file, + error.lino = at.line, + error.message = at.message, + error_chain = %format!("{err:#}"), + "demo error output" + ); + } else { + tracing::error!( + error = ?err, + error_chain = %format!("{err:#}"), + "demo error output" + ); + } + } + + fn demo_error() -> anyhow::Result<()> { + let _: serde_json::Value = + serde_json::from_slice(b"not-json").context_loc("parse debezium payload")?; + Ok(()) + } +} diff --git a/crates/app/src/worker/canal/person.rs b/crates/app/src/worker/canal/person.rs new file mode 100644 index 000000000..674d4306d --- /dev/null +++ b/crates/app/src/worker/canal/person.rs @@ -0,0 +1,24 @@ +use anyhow::Context; +use serde::Deserialize; + +use super::search_event::SearchDispatcher; +use super::search_event::{self}; + +#[derive(Debug, Deserialize)] +struct PersonKey { + prsn_id: u32, +} + +pub async fn on_person(search: &SearchDispatcher, key: &[u8], op: &str) -> anyhow::Result<()> { + let key: PersonKey = serde_json::from_slice(key).context("parse person key")?; + on_person_change(search, key.prsn_id, op).await?; + Ok(()) +} + +async fn on_person_change( + search: &SearchDispatcher, + person_id: u32, + op: &str, +) -> anyhow::Result<()> { + search.dispatch(search_event::TARGET_PERSON, person_id, op).await +} diff --git a/crates/app/src/worker/canal/search_event.rs b/crates/app/src/worker/canal/search_event.rs new file mode 100644 index 000000000..930519574 --- /dev/null +++ b/crates/app/src/worker/canal/search_event.rs @@ -0,0 +1,528 @@ +use anyhow::{anyhow, Context}; +use bangumi_wiki_parser::{parse_omit_error, FieldValue, Wiki}; +use common::config::AppConfig; +use meilisearch_sdk::client::Client as MeiliSdkClient; +use php_serialize::from_str as parse_php_serialize; +use serde::{Deserialize, Serialize}; + +use super::types::{OP_CREATE, OP_DELETE, OP_SNAPSHOT, OP_UPDATE}; + +pub const TARGET_SUBJECT: &str = "subject"; +pub const TARGET_CHARACTER: &str = "character"; +pub const TARGET_PERSON: &str = "person"; + +pub struct SearchDispatcher { + pool: sqlx::MySqlPool, + meili: Option, +} + +struct MeiliClient { + client: MeiliSdkClient, +} + +#[derive(Serialize)] +struct SubjectDoc { + id: u32, + tag: Vec, + #[serde(rename = "meta_tag")] + meta_tag: Vec, + name: String, + aliases: Vec, + date: i32, + score: f64, + rating_count: u32, + page_rank: f64, + heat: u32, + rank: u32, + platform: u16, + #[serde(rename = "type")] + type_id: u8, + nsfw: bool, +} + +#[derive(Serialize)] +struct CharacterDoc { + id: u32, + name: String, + aliases: Vec, + comment: u32, + collect: u32, + nsfw: bool, +} + +#[derive(Serialize)] +struct PersonDoc { + id: u32, + name: String, + aliases: Vec, + comment: u32, + collect: u32, + career: Vec, +} + +#[derive(Deserialize)] +struct SubjectTagItem { + tag_name: Option, +} + +#[derive(sqlx::FromRow)] +struct SubjectRow { + subject_id: u32, + subject_name: String, + subject_name_cn: String, + field_infobox: String, + subject_type_id: u8, + subject_nsfw: bool, + subject_ban: u8, + subject_platform: u16, + field_meta_tags: String, + field_tags: String, + subject_wish: u32, + subject_collect: u32, + subject_doing: u32, + subject_on_hold: u32, + subject_dropped: u32, + field_rank: u32, + date: String, + field_rate_1: u32, + field_rate_2: u32, + field_rate_3: u32, + field_rate_4: u32, + field_rate_5: u32, + field_rate_6: u32, + field_rate_7: u32, + field_rate_8: u32, + field_rate_9: u32, + field_rate_10: u32, + field_redirect: u32, +} + +#[derive(sqlx::FromRow)] +struct CharacterRow { + crt_id: u32, + crt_name: String, + crt_infobox: String, + crt_comment: u32, + crt_collects: u32, + crt_nsfw: bool, + crt_redirect: u32, +} + +#[derive(sqlx::FromRow)] +struct PersonRow { + prsn_id: u32, + prsn_name: String, + prsn_infobox: String, + prsn_comment: u32, + prsn_collects: u32, + prsn_redirect: u32, + prsn_producer: bool, + prsn_mangaka: bool, + prsn_artist: bool, + prsn_seiyu: bool, + prsn_writer: bool, + prsn_illustrator: bool, + prsn_actor: bool, +} + +impl SearchDispatcher { + pub fn new(cfg: &AppConfig, pool: sqlx::MySqlPool) -> anyhow::Result { + let meili = if cfg.meilisearch_url.is_empty() { + None + } else { + let api_key = if cfg.meilisearch_key.is_empty() { + None + } else { + Some(cfg.meilisearch_key.clone()) + }; + Some(MeiliClient { + client: MeiliSdkClient::new(cfg.meilisearch_url.trim_end_matches('/'), api_key) + .context("create meilisearch client")?, + }) + }; + + Ok(Self { pool, meili }) + } + + pub async fn dispatch(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { + match op { + OP_CREATE | OP_UPDATE | OP_SNAPSHOT => self.upsert(target, entity_id, op).await, + OP_DELETE => self.delete(target, entity_id, op).await, + _ => Err(anyhow!("unexpected operator: {op}")), + } + } + + async fn upsert(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { + let Some(meili) = &self.meili else { + tracing::debug!(target, entity_id, op, "skip search upsert: meilisearch disabled"); + return Ok(()); + }; + + match target { + TARGET_SUBJECT => { + let row = sqlx::query_as::<_, SubjectRow>( + r#"SELECT s.subject_id, s.subject_name, s.subject_name_cn, s.field_infobox, + s.subject_type_id, s.subject_nsfw, s.subject_ban, + s.subject_platform, s.field_meta_tags, f.field_tags, + s.subject_wish, s.subject_collect, s.subject_doing, + s.subject_on_hold, s.subject_dropped, f.field_rank, DATE_FORMAT(f.field_date, '%Y-%m-%d'), + f.field_rate_1, f.field_rate_2, f.field_rate_3, f.field_rate_4, f.field_rate_5, + f.field_rate_6, f.field_rate_7, f.field_rate_8, f.field_rate_9, f.field_rate_10, + f.field_redirect + FROM chii_subjects s + JOIN chii_subject_fields f ON f.field_sid = s.subject_id + WHERE s.subject_id = ? + LIMIT 1"#, + ) + .bind(entity_id) + .fetch_optional(&self.pool) + .await + .context("load subject")?; + + let Some(row) = row + else { + return self.delete(target, entity_id, op).await; + }; + + if row.subject_ban != 0 || row.field_redirect != 0 { + return self.delete(target, entity_id, op).await; + } + + let mut aliases = Vec::new(); + if !row.subject_name_cn.is_empty() { + aliases.push(row.subject_name_cn.clone()); + } + let wiki = parse_omit_error(&row.field_infobox); + aliases.extend(extract_values_by_key(&wiki, "别名")); + + let meta_tag = split_space_values(&row.field_meta_tags); + let tag = parse_subject_tags(&row.field_tags); + let heat = row + .subject_wish + .saturating_add(row.subject_collect) + .saturating_add(row.subject_doing) + .saturating_add(row.subject_on_hold) + .saturating_add(row.subject_dropped); + let rating_total = row + .field_rate_1 + .saturating_add(row.field_rate_2) + .saturating_add(row.field_rate_3) + .saturating_add(row.field_rate_4) + .saturating_add(row.field_rate_5) + .saturating_add(row.field_rate_6) + .saturating_add(row.field_rate_7) + .saturating_add(row.field_rate_8) + .saturating_add(row.field_rate_9) + .saturating_add(row.field_rate_10); + let score_sum = (row.field_rate_1 as f64) * 1.0 + + (row.field_rate_2 as f64) * 2.0 + + (row.field_rate_3 as f64) * 3.0 + + (row.field_rate_4 as f64) * 4.0 + + (row.field_rate_5 as f64) * 5.0 + + (row.field_rate_6 as f64) * 6.0 + + (row.field_rate_7 as f64) * 7.0 + + (row.field_rate_8 as f64) * 8.0 + + (row.field_rate_9 as f64) * 9.0 + + (row.field_rate_10 as f64) * 10.0; + let score = if rating_total == 0 { + 0.0 + } else { + ((score_sum / (rating_total as f64)) * 10.0).round() / 10.0 + }; + + let doc = SubjectDoc { + id: row.subject_id, + tag, + meta_tag, + name: row.subject_name, + aliases, + date: parse_date_val(&row.date), + score, + rating_count: rating_total, + page_rank: rating_total as f64, + heat, + rank: row.field_rank, + platform: row.subject_platform, + type_id: row.subject_type_id, + nsfw: row.subject_nsfw, + }; + + meili.update_doc("subjects", &[doc]).await?; + } + TARGET_CHARACTER => { + let row = sqlx::query_as::<_, CharacterRow>( + r#"SELECT crt_id, crt_name, crt_infobox, crt_comment, crt_collects, crt_nsfw, crt_redirect + FROM chii_characters WHERE crt_id = ? LIMIT 1"#, + ) + .bind(entity_id) + .fetch_optional(&self.pool) + .await + .context("load character")?; + + let Some(row) = row else { + return self.delete(target, entity_id, op).await; + }; + + if row.crt_redirect != 0 { + return self.delete(target, entity_id, op).await; + } + + let doc = CharacterDoc { + id: row.crt_id, + name: row.crt_name, + aliases: extract_aliases(&parse_omit_error(&row.crt_infobox)), + comment: row.crt_comment, + collect: row.crt_collects, + nsfw: row.crt_nsfw, + }; + + meili.update_doc("characters", &[doc]).await?; + } + TARGET_PERSON => { + let row = sqlx::query_as::<_, PersonRow>( + r#"SELECT prsn_id, prsn_name, prsn_infobox, prsn_comment, prsn_collects, prsn_redirect, + prsn_producer, prsn_mangaka, prsn_artist, prsn_seiyu, + prsn_writer, prsn_illustrator, prsn_actor + FROM chii_persons WHERE prsn_id = ? LIMIT 1"#, + ) + .bind(entity_id) + .fetch_optional(&self.pool) + .await + .context("load person")?; + + let Some(row) = row else { + return self.delete(target, entity_id, op).await; + }; + + if row.prsn_redirect != 0 { + return self.delete(target, entity_id, op).await; + } + + let doc = PersonDoc { + id: row.prsn_id, + name: row.prsn_name, + aliases: extract_aliases(&parse_omit_error(&row.prsn_infobox)), + comment: row.prsn_comment, + collect: row.prsn_collects, + career: collect_careers( + row.prsn_producer, + row.prsn_mangaka, + row.prsn_artist, + row.prsn_seiyu, + row.prsn_writer, + row.prsn_illustrator, + row.prsn_actor, + ), + }; + + meili.update_doc("persons", &[doc]).await?; + } + _ => return Err(anyhow!("unknown search target: {target}")), + } + + tracing::info!(target, entity_id, op, action = "event_upsert", "search event handled"); + Ok(()) + } + + async fn delete(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { + let Some(meili) = &self.meili else { + tracing::debug!(target, entity_id, op, "skip search delete: meilisearch disabled"); + return Ok(()); + }; + + let index = match target { + TARGET_SUBJECT => "subjects", + TARGET_CHARACTER => "characters", + TARGET_PERSON => "persons", + _ => return Err(anyhow!("unknown search target: {target}")), + }; + + meili.delete_doc(index, entity_id).await?; + tracing::info!(target, entity_id, op, action = "event_delete", "search event handled"); + Ok(()) + } +} + +fn split_space_values(input: &str) -> Vec { + input + .split(' ') + .map(ToOwned::to_owned) + .collect() +} + +fn wiki_values(v: &FieldValue) -> Vec { + match v { + FieldValue::Scalar(text) => vec![text.clone()], + FieldValue::Array(items) => items.iter().map(|x| x.value.clone()).collect(), + FieldValue::Null => Vec::new(), + } +} + +fn extract_values_by_key(wiki: &Wiki, target_key: &str) -> Vec { + let mut out = Vec::new(); + for field in &wiki.fields { + if field.key == target_key { + out.extend(wiki_values(&field.value)); + } + } + out +} + +fn extract_aliases(wiki: &Wiki) -> Vec { + let mut aliases = Vec::new(); + + for field in &wiki.fields { + if field.key == "中文名" { + aliases.extend(wiki_values(&field.value)); + } + if field.key == "简体中文名" { + aliases.extend(wiki_values(&field.value)); + } + } + + for field in &wiki.fields { + if field.key == "别名" { + aliases.extend(wiki_values(&field.value)); + } + } + + aliases +} + +fn parse_subject_tags(input: &str) -> Vec { + let parsed: Vec = match parse_php_serialize(input) { + Ok(v) => v, + Err(_) => return Vec::new(), + }; + + parsed + .into_iter() + .filter_map(|item| item.tag_name) + .filter(|x| !x.is_empty()) + .collect() +} + +fn parse_date_val(date: &str) -> i32 { + if date.len() < 10 { + return 0; + } + + let year = date[0..4].parse::().ok(); + let month = date[5..7].parse::().ok(); + let day = date[8..10].parse::().ok(); + + match (year, month, day) { + (Some(y), Some(m), Some(d)) => y * 10000 + m * 100 + d, + _ => 0, + } +} + +fn collect_careers( + producer: bool, + mangaka: bool, + artist: bool, + seiyu: bool, + writer: bool, + illustrator: bool, + actor: bool, +) -> Vec { + let mut out = Vec::new(); + + if writer { + out.push("writer".to_string()); + } + + if producer { + out.push("producer".to_string()); + } + if mangaka { + out.push("mangaka".to_string()); + } + if artist { + out.push("artist".to_string()); + } + if seiyu { + out.push("seiyu".to_string()); + } + if illustrator { + out.push("illustrator".to_string()); + } + if actor { + out.push("actor".to_string()); + } + + out +} + +impl MeiliClient { + async fn update_doc( + &self, + index: &str, + docs: &[T], + ) -> anyhow::Result<()> { + self + .client + .index(index) + .add_documents(docs, Some("id")) + .await + .context("meilisearch update")?; + Ok(()) + } + + async fn delete_doc(&self, index: &str, id: u32) -> anyhow::Result<()> { + self + .client + .index(index) + .delete_document(id) + .await + .context("meilisearch delete")?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::{collect_careers, extract_aliases, parse_subject_tags}; + use bangumi_wiki_parser::parse_omit_error; + + #[test] + fn parse_subject_tags_php_serialized() { + let raw = "a:3:{i:0;a:2:{s:8:\"tag_name\";s:6:\"动画\";s:6:\"result\";s:1:\"2\";}i:1;a:2:{s:8:\"tag_name\";N;s:6:\"result\";s:1:\"1\";}i:2;a:2:{s:8:\"tag_name\";s:2:\"TV\";s:6:\"result\";s:1:\"1\";}}"; + + let tags = parse_subject_tags(raw); + assert_eq!(tags, vec!["动画".to_string(), "TV".to_string()]); + } + + #[test] + fn extract_aliases_for_person_character() { + let infobox = "{{Infobox\n|中文名=某角色\n|简体中文名=某角色简中\n|别名={\n[Alpha]\n[Beta]\n}\n|生日=2000-01-01\n}}"; + + let aliases = extract_aliases(&parse_omit_error(infobox)); + assert_eq!( + aliases, + vec![ + "某角色".to_string(), + "某角色简中".to_string(), + "Alpha".to_string(), + "Beta".to_string(), + ] + ); + } + + #[test] + fn collect_career_order_matches_go() { + let careers = collect_careers(true, true, true, true, true, true, true); + assert_eq!( + careers, + vec![ + "writer".to_string(), + "producer".to_string(), + "mangaka".to_string(), + "artist".to_string(), + "seiyu".to_string(), + "illustrator".to_string(), + "actor".to_string(), + ] + ); + } +} diff --git a/crates/app/src/worker/canal/subject.rs b/crates/app/src/worker/canal/subject.rs new file mode 100644 index 000000000..630087144 --- /dev/null +++ b/crates/app/src/worker/canal/subject.rs @@ -0,0 +1,39 @@ +use anyhow::Context; +use serde::Deserialize; + +use super::search_event::SearchDispatcher; +use super::search_event::{self}; + +#[derive(Debug, Deserialize)] +struct SubjectKey { + subject_id: u32, +} + +#[derive(Debug, Deserialize)] +struct SubjectFieldKey { + field_sid: u32, +} + +pub async fn on_subject(search: &SearchDispatcher, key: &[u8], op: &str) -> anyhow::Result<()> { + let key: SubjectKey = serde_json::from_slice(key).context("parse subject key")?; + on_subject_change(search, key.subject_id, op).await?; + Ok(()) +} + +pub async fn on_subject_field( + search: &SearchDispatcher, + key: &[u8], + op: &str, +) -> anyhow::Result<()> { + let key: SubjectFieldKey = serde_json::from_slice(key).context("parse subject field key")?; + on_subject_change(search, key.field_sid, op).await?; + Ok(()) +} + +async fn on_subject_change( + search: &SearchDispatcher, + subject_id: u32, + op: &str, +) -> anyhow::Result<()> { + search.dispatch(search_event::TARGET_SUBJECT, subject_id, op).await +} diff --git a/crates/app/src/worker/canal/types.rs b/crates/app/src/worker/canal/types.rs new file mode 100644 index 000000000..3b38a630c --- /dev/null +++ b/crates/app/src/worker/canal/types.rs @@ -0,0 +1,20 @@ +use serde::Deserialize; +use serde_json::Value; + +pub const OP_CREATE: &str = "c"; +pub const OP_DELETE: &str = "d"; +pub const OP_UPDATE: &str = "u"; +pub const OP_SNAPSHOT: &str = "r"; + +#[derive(Debug, Deserialize)] +pub struct DebeziumPayload { + pub before: Option, + pub after: Option, + pub source: DebeziumSource, + pub op: String, +} + +#[derive(Debug, Deserialize)] +pub struct DebeziumSource { + pub table: String, +} diff --git a/crates/app/src/worker/canal/user.rs b/crates/app/src/worker/canal/user.rs new file mode 100644 index 000000000..ec614952e --- /dev/null +++ b/crates/app/src/worker/canal/user.rs @@ -0,0 +1,293 @@ +use anyhow::Context; +use common::config::AppConfig; +use opendal::services; +use opendal::Operator; +use serde::Deserialize; +use serde_json::Value; +use std::collections::HashSet; + +use super::types::OP_UPDATE; + +#[derive(Debug, Deserialize)] +struct UserKey { + uid: u32, +} + +pub struct UserDispatcher { + pool: sqlx::MySqlPool, + redis: redis::Client, + s3: Option, +} + +#[derive(Debug, serde::Serialize)] +struct RedisUserChannel { + user_id: u32, + new_notify: u16, +} + +impl UserDispatcher { + pub fn new(cfg: &AppConfig, pool: sqlx::MySqlPool) -> anyhow::Result { + let redis = redis::Client::open(cfg.redis_url.as_str()).context("create redis client")?; + let s3 = build_s3_operator(cfg)?; + Ok(Self { pool, redis, s3 }) + } + + pub async fn on_user( + &self, + key: &[u8], + op: &str, + before: Option, + after: Option, + ) -> anyhow::Result<()> { + let key: UserKey = serde_json::from_slice(key).context("parse user key")?; + self.on_user_change(key.uid, op, before, after).await + } + + async fn on_user_change( + &self, + user_id: u32, + op: &str, + before: Option, + after: Option, + ) -> anyhow::Result<()> { + if op != OP_UPDATE { + return Ok(()); + } + + let (Some(before), Some(after)) = (before, after) else { + return Ok(()); + }; + + let old_password = before.get("password_crypt").and_then(Value::as_str); + let new_password = after.get("password_crypt").and_then(Value::as_str); + + if old_password != new_password { + self.revoke_user_sessions(user_id).await?; + tracing::info!(user_id, "password changed, sessions revoked"); + } + + let old_notify = before.get("new_notify").and_then(Value::as_u64); + let new_notify = after.get("new_notify").and_then(Value::as_u64); + if old_notify != new_notify { + let notify = new_notify.unwrap_or_default() as u16; + self.publish_notify_change(user_id, notify).await?; + tracing::info!(user_id, new_notify = notify, "new notify changed, published redis event"); + } + + let old_avatar = before.get("avatar").and_then(Value::as_str); + let new_avatar = after.get("avatar").and_then(Value::as_str); + if old_avatar != new_avatar { + if let (Some(s3), Some(avatar)) = (&self.s3, new_avatar) { + tracing::debug!(user_id, avatar, "avatar changed, clear image cache in background"); + let s3 = s3.clone(); + let avatar = avatar.to_owned(); + tokio::spawn(async move { + if let Err(err) = clear_image_cache(s3, avatar).await { + tracing::error!(error = ?err, error_chain = %format!("{err:#}"), "failed to clear s3 cached image"); + } + }); + } + } + + Ok(()) + } + + async fn revoke_user_sessions(&self, user_id: u32) -> anyhow::Result<()> { + let now = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .context("system time before unix epoch")? + .as_secs() as i64; + + let rows = sqlx::query_as::<_, (String,)>( + r#"SELECT `key` FROM chii_os_web_sessions WHERE user_id = ?"#, + ) + .bind(user_id) + .fetch_all(&self.pool) + .await + .context("load user sessions")?; + + sqlx::query( + r#"UPDATE chii_os_web_sessions SET expired_at = ? WHERE user_id = ?"#, + ) + .bind(now) + .bind(user_id) + .execute(&self.pool) + .await + .context("revoke user sessions in mysql")?; + + if rows.is_empty() { + return Ok(()); + } + + let mut conn = self + .redis + .get_multiplexed_async_connection() + .await + .context("get redis connection")?; + + let mut cmd = redis::cmd("DEL"); + for (key,) in rows { + cmd.arg(format!("chii:web:session:{key}")); + } + let _: i64 = cmd.query_async(&mut conn).await.context("delete redis sessions")?; + + Ok(()) + } + + async fn publish_notify_change(&self, user_id: u32, new_notify: u16) -> anyhow::Result<()> { + let message = serde_json::to_string(&RedisUserChannel { + user_id, + new_notify, + }) + .context("encode redis user notify message")?; + + let mut conn = self + .redis + .get_multiplexed_async_connection() + .await + .context("get redis connection")?; + + let channel = format!("event-user-notify-{user_id}"); + let _: i64 = redis::cmd("PUBLISH") + .arg(channel) + .arg(message) + .query_async(&mut conn) + .await + .context("publish user notify event")?; + + Ok(()) + } +} + +fn build_s3_operator(cfg: &AppConfig) -> anyhow::Result> { + if cfg.s3_entry_point.is_empty() || cfg.s3_access_key.is_empty() || cfg.s3_secret_key.is_empty() + { + return Ok(None); + } + + let builder = services::S3::default() + .root("/") + .endpoint(&cfg.s3_entry_point) + .access_key_id(&cfg.s3_access_key) + .secret_access_key(&cfg.s3_secret_key) + .bucket(&cfg.s3_image_resize_bucket); + + let builder = if let Some(region) = &cfg.s3_region { + builder.region(region) + } else { + builder + }; + + let operator = Operator::new(builder) + .context("create s3 operator")? + .finish(); + + Ok(Some(operator)) +} + +async fn clear_image_cache(s3: Operator, avatar: String) -> anyhow::Result<()> { + let (path, query) = avatar + .split_once('?') + .map_or((avatar.as_str(), ""), |(path, query)| (path, query)); + + let mut prefix = format!("/pic/user/l/{path}"); + if query.contains("hd=1") { + prefix = format!("/hd{prefix}"); + } + + tracing::debug!(avatar, prefix, "clear image cache by prefix"); + + let mut keys: HashSet = HashSet::new(); + + for candidate_prefix in prefix_candidates(&prefix) { + let dir = prefix_dirname(&candidate_prefix); + + let entries = match tokio::time::timeout( + std::time::Duration::from_secs(10), + s3.list(&dir), + ) + .await + { + Ok(Ok(entries)) => entries, + Ok(Err(err)) => { + tracing::warn!( + prefix = candidate_prefix, + dir, + error = ?err, + "failed to list cached avatar objects by dirname" + ); + continue; + } + Err(_) => { + tracing::warn!( + prefix = candidate_prefix, + dir, + "timeout while listing cached avatar objects by dirname" + ); + continue; + } + }; + + for entry in entries { + let path = entry.path(); + if path.starts_with(candidate_prefix.as_str()) { + keys.insert(path.to_string()); + } + } + + if !keys.is_empty() { + break; + } + } + + for key in keys { + if let Err(err) = s3.delete(&key).await { + tracing::error!( + key, + error = ?err, + "failed to delete cached avatar object" + ); + } + } + + Ok(()) +} + +fn prefix_candidates(prefix: &str) -> Vec { + if let Some(stripped) = prefix.strip_prefix('/') { + vec![prefix.to_owned(), stripped.to_owned()] + } else { + vec![prefix.to_owned()] + } +} + +fn prefix_dirname(prefix: &str) -> String { + match prefix.rsplit_once('/') { + Some((dir, _)) if !dir.is_empty() => format!("{dir}/"), + Some(_) => "/".to_string(), + None => "/".to_string(), + } +} + +#[cfg(test)] +mod tests { + use super::{prefix_candidates, prefix_dirname}; + + #[test] + fn test_prefix_dirname() { + assert_eq!(prefix_dirname("/pic/user/l/a.jpg"), "/pic/user/l/"); + assert_eq!(prefix_dirname("pic/user/l/a.jpg"), "pic/user/l/"); + assert_eq!(prefix_dirname("/a.jpg"), "/"); + assert_eq!(prefix_dirname("a.jpg"), "/"); + } + + #[test] + fn test_prefix_candidates() { + assert_eq!( + prefix_candidates("/pic/user/l/a.jpg"), + vec!["/pic/user/l/a.jpg".to_string(), "pic/user/l/a.jpg".to_string()] + ); + assert_eq!(prefix_candidates("pic/user/l/a.jpg"), vec!["pic/user/l/a.jpg".to_string()]); + } + +} diff --git a/crates/app/src/worker/mod.rs b/crates/app/src/worker/mod.rs new file mode 100644 index 000000000..199a6588c --- /dev/null +++ b/crates/app/src/worker/mod.rs @@ -0,0 +1,2 @@ +pub mod canal; +pub mod timeline; diff --git a/crates/app/src/worker/timeline.rs b/crates/app/src/worker/timeline.rs new file mode 100644 index 000000000..655568bf6 --- /dev/null +++ b/crates/app/src/worker/timeline.rs @@ -0,0 +1,22 @@ +use anyhow::Context; +use common::config::{build_kafka_client_config, build_mysql_pool, AppConfig}; +use rdkafka::producer::FutureProducer; + +const TIMELINE_TOPIC: &str = "timeline"; + +pub async fn run() -> anyhow::Result<()> { + let cfg = AppConfig::from_env("timeline-worker")?; + let _mysql = build_mysql_pool(&cfg).await?; + + let _producer: FutureProducer = build_kafka_client_config(&cfg) + .create() + .context("create kafka producer")?; + + tracing::info!(topic = TIMELINE_TOPIC, "timeline worker started"); + + tokio::signal::ctrl_c().await.context("wait ctrl-c")?; + + tracing::info!("timeline worker shutdown"); + + Ok(()) +} diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml new file mode 100644 index 000000000..965f1fc0b --- /dev/null +++ b/crates/common/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "common" +version = "0.1.0" +edition.workspace = true +license.workspace = true + +[dependencies] +anyhow.workspace = true +serde.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +sqlx.workspace = true + +[target.'cfg(unix)'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build", "tokio", "ssl-vendored"] } + +[target.'cfg(not(unix))'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build", "tokio"] } diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs new file mode 100644 index 000000000..a4e0ddda7 --- /dev/null +++ b/crates/common/src/config.rs @@ -0,0 +1,117 @@ +use std::env; + +use anyhow::Context; + +#[derive(Debug, Clone)] +pub struct AppConfig { + pub mysql_dsn: String, + pub redis_url: String, + pub meilisearch_url: String, + pub meilisearch_key: String, + pub s3_entry_point: String, + pub s3_access_key: String, + pub s3_secret_key: String, + pub s3_image_resize_bucket: String, + pub s3_region: Option, + pub kafka_brokers: String, + pub kafka_group_id: String, + pub kafka_topic: String, + pub kafka_topics: Vec, +} + +impl AppConfig { + pub fn from_env(service: &str) -> anyhow::Result { + let mysql_dsn = env::var("RUST_MYSQL_DSN").context("missing env RUST_MYSQL_DSN")?; + + let redis_url = env::var("RUST_REDIS_URL") + .or_else(|_| env::var("REDIS_URI")) + .unwrap_or_else(|_| "redis://127.0.0.1:6379/0".to_string()); + + let meilisearch_url = env::var("RUST_MEILISEARCH_URL") + .or_else(|_| env::var("MEILISEARCH_URL")) + .unwrap_or_default(); + + let meilisearch_key = env::var("RUST_MEILISEARCH_KEY") + .or_else(|_| env::var("MEILISEARCH_KEY")) + .unwrap_or_default(); + + let s3_entry_point = env::var("RUST_S3_ENTRY_POINT") + .or_else(|_| env::var("S3_ENTRY_POINT")) + .unwrap_or_default(); + + let s3_access_key = env::var("RUST_S3_ACCESS_KEY") + .or_else(|_| env::var("S3_ACCESS_KEY")) + .unwrap_or_default(); + + let s3_secret_key = env::var("RUST_S3_SECRET_KEY") + .or_else(|_| env::var("S3_SECRET_KEY")) + .unwrap_or_default(); + + let s3_image_resize_bucket = env::var("RUST_S3_IMAGE_RESIZE_BUCKET") + .or_else(|_| env::var("S3_IMAGE_RESIZE_BUCKET")) + .unwrap_or_else(|_| "img-resize".to_string()); + + let s3_region = env::var("RUST_S3_REGION") + .ok() + .or_else(|| env::var("AWS_REGION").ok()) + .filter(|x| !x.trim().is_empty()); + + let kafka_brokers = env::var("RUST_KAFKA_BROKERS") + .or_else(|_| env::var("KAFKA_BROKER")) + .context("missing env RUST_KAFKA_BROKERS or KAFKA_BROKER")?; + + let kafka_group_id = + env::var("RUST_KAFKA_GROUP_ID").unwrap_or_else(|_| format!("go-{service}")); + + let kafka_topic = + env::var("RUST_KAFKA_TOPIC").unwrap_or_else(|_| "timeline".to_string()); + + let kafka_topics = parse_topics( + env::var("RUST_KAFKA_TOPICS") + .ok() + .or_else(|| env::var("KAFKA_TOPICS").ok()), + ); + + Ok(Self { + mysql_dsn, + redis_url, + meilisearch_url, + meilisearch_key, + s3_entry_point, + s3_access_key, + s3_secret_key, + s3_image_resize_bucket, + s3_region, + kafka_brokers, + kafka_group_id, + kafka_topic, + kafka_topics, + }) + } +} + +fn parse_topics(raw: Option) -> Vec { + raw + .unwrap_or_default() + .split(',') + .map(str::trim) + .filter(|x| !x.is_empty()) + .map(ToOwned::to_owned) + .collect() +} + +pub async fn build_mysql_pool(cfg: &AppConfig) -> anyhow::Result { + let pool = sqlx::mysql::MySqlPoolOptions::new() + .max_connections(5) + .connect(&cfg.mysql_dsn) + .await + .context("failed to connect mysql")?; + + Ok(pool) +} + +pub fn build_kafka_client_config(cfg: &AppConfig) -> rdkafka::ClientConfig { + let mut c = rdkafka::ClientConfig::new(); + c.set("bootstrap.servers", &cfg.kafka_brokers); + c +} diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs new file mode 100644 index 000000000..b2a9c3f9e --- /dev/null +++ b/crates/common/src/lib.rs @@ -0,0 +1,80 @@ +pub mod config; + +#[derive(Debug)] +pub struct ErrorAt { + pub file: &'static str, + pub line: u32, + pub message: &'static str, + pub source: anyhow::Error, +} + +impl std::fmt::Display for ErrorAt { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for ErrorAt { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + Some(self.source.as_ref()) + } +} + +pub fn locate_error(err: &anyhow::Error) -> Option<&ErrorAt> { + for cause in err.chain() { + if let Some(at) = cause.downcast_ref::() { + return Some(at); + } + } + None +} + +pub trait ResultExt { + #[track_caller] + fn context_loc(self, message: &'static str) -> anyhow::Result; +} + +impl ResultExt for Result +where + E: Into, +{ + #[track_caller] + fn context_loc(self, message: &'static str) -> anyhow::Result { + self.map_err(|err| error_at(err, message)) + } +} + +#[cold] +#[inline(never)] +#[track_caller] +fn error_at(err: E, message: &'static str) -> anyhow::Error +where + E: Into, +{ + let caller = std::panic::Location::caller(); + anyhow::Error::new(ErrorAt { + file: caller.file(), + line: caller.line(), + message, + source: err.into(), + }) +} + +pub fn init_tracing() { + let filter = std::env::var("RUST_LOG").unwrap_or_else(|_| "info".to_string()); + let use_json = std::env::var("RUST_LOG_JSON") + .map(|v| matches!(v.as_str(), "1" | "true" | "TRUE" | "True")) + .unwrap_or(false); + + let builder = tracing_subscriber::fmt() + .with_env_filter(filter) + .with_target(true) + .with_file(true) + .with_line_number(true); + + if use_json { + let _ = builder.json().try_init(); + } else { + let _ = builder.try_init(); + } +} diff --git a/crates/php-serialize/Cargo.toml b/crates/php-serialize/Cargo.toml new file mode 100644 index 000000000..013f68a4d --- /dev/null +++ b/crates/php-serialize/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "php-serialize" +version = "0.1.0" +edition.workspace = true +license.workspace = true + +[dependencies] +serde.workspace = true +serde_php.workspace = true diff --git a/crates/php-serialize/src/lib.rs b/crates/php-serialize/src/lib.rs new file mode 100644 index 000000000..8be6bee2f --- /dev/null +++ b/crates/php-serialize/src/lib.rs @@ -0,0 +1,56 @@ +use serde::de::DeserializeOwned; + +pub use serde_php::Error; +pub use serde_php::Result; + +pub fn from_bytes(input: &[u8]) -> Result { + serde_php::from_bytes(input) +} + +pub fn from_str(input: &str) -> Result { + from_bytes(input.as_bytes()) +} + +#[cfg(test)] +mod tests { + use serde::Deserialize; + + use super::from_str; + + #[derive(Debug, Deserialize, PartialEq, Eq)] + struct TagItem { + tag_name: Option, + result: String, + } + + #[test] + fn deserialize_vec_struct() { + let raw = "a:3:{i:0;a:2:{s:8:\"tag_name\";s:6:\"动画\";s:6:\"result\";s:1:\"2\";}i:1;a:2:{s:8:\"tag_name\";N;s:6:\"result\";s:1:\"1\";}i:2;a:2:{s:8:\"tag_name\";s:2:\"TV\";s:6:\"result\";s:1:\"1\";}}"; + + let actual: Vec = from_str(raw).expect("deserialize php serialized vec"); + assert_eq!( + actual, + vec![ + TagItem { + tag_name: Some("动画".to_string()), + result: "2".to_string(), + }, + TagItem { + tag_name: None, + result: "1".to_string(), + }, + TagItem { + tag_name: Some("TV".to_string()), + result: "1".to_string(), + }, + ] + ); + } + + #[test] + fn deserialize_scalar_string() { + let raw = "s:6:\"动画\";"; + let value: String = from_str(raw).expect("deserialize php string"); + assert_eq!(value, "动画"); + } +} diff --git a/docs/rust-migration-plan.md b/docs/rust-migration-plan.md new file mode 100644 index 000000000..71b15b1c9 --- /dev/null +++ b/docs/rust-migration-plan.md @@ -0,0 +1,48 @@ +# Rust Migration Implementation Plan + +Date: 2026-02-19 +Status: in-progress +Target stack: `tokio` + `rdkafka` + `sqlx` + +## Scope (Phase 0 -> Phase 1) + +This repository keeps Go as the primary runtime while introducing a Rust workspace for gradual migration. + +Initial implementation goals: + +1. Add a Rust workspace integrated at repository root. +2. Add a single app executable with top-level subcommands: + - `worker` (contains `canal` / `timeline` placeholder runtime loops) + - `server` (placeholder runtime loop) +3. Add shared config loading and connection bootstrap for MySQL/Kafka. +4. Keep all changes non-invasive to existing Go startup and deployment. + +## Delivery milestones + +### M0: Bootstrap +- Rust workspace compiles. +- `cargo run -p app -- worker canal` starts and exits gracefully. +- `cargo run -p app -- worker timeline` starts and exits gracefully. +- `cargo run -p app -- server` starts and exits gracefully. + +### M1: Infra baseline +- Shared config supports environment variables. +- Kafka/MySQL clients can be initialized from config. +- Structured logging and basic shutdown signal handling are in place. + +### M2: Contract baseline +- Event and payload schemas are defined in Rust types for timeline/canal. +- Golden fixtures can be added for parity tests. + +## Out of scope (for this commit) + +- Production traffic switching +- Full endpoint migration +- Replacing Go DAL with sqlx queries + +## Next implementation tasks + +1. Implement real search/session/redis side effects in canal handlers (consume loop and commit semantics are already in place). +2. Implement timeline publish API and payload parity tests. +3. Add sqlx pool and first read-only repository for subject read-path. +4. Add CI jobs to build Rust workspace and run tests. diff --git a/docs/rust-workspace.md b/docs/rust-workspace.md new file mode 100644 index 000000000..40d782f6e --- /dev/null +++ b/docs/rust-workspace.md @@ -0,0 +1,30 @@ +# Rust Migration Workspace + +Rust migration code is integrated into repository root. + +## Crates + +- `common`: shared config/bootstrap/helpers +- `app`: single executable with top-level subcommands (`worker`, `server`) + +## Environment variables + +- `RUST_MYSQL_DSN` (required) +- `RUST_KAFKA_BROKERS` (required, fallback: `KAFKA_BROKER`) +- `RUST_KAFKA_TOPICS` (required for `worker canal`, comma-separated) +- `RUST_KAFKA_GROUP_ID` (optional) +- `RUST_KAFKA_TOPIC` (optional, default: `timeline`) +- `RUST_LOG` (optional, default: `info`) + +## Run + +```bash +cargo run -p app -- worker canal +cargo run -p app -- worker timeline +cargo run -p app -- server +``` + +## Current migration status + +- `worker canal`: real Kafka consume loop with Debezium payload parsing, table-based dispatch, and commit-after-success behavior. +- `worker timeline`: producer bootstrap and reusable timeline producer module are ready. diff --git a/etc/Dockerfile.rust b/etc/Dockerfile.rust new file mode 100644 index 000000000..0d5455847 --- /dev/null +++ b/etc/Dockerfile.rust @@ -0,0 +1,10 @@ +FROM gcr.io/distroless/cc-debian13:nonroot@sha256:84fcd3c223b144b0cb6edc5ecc75641819842a9679a3a58fd6294bec47532bf7 + +ARG TARGET=x86_64-unknown-linux-gnu + +WORKDIR /app + +COPY target/${TARGET}/release/app /app/app + +ENTRYPOINT ["/app/app"] +CMD ["--help"] diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 000000000..c49af7e38 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,4 @@ +[toolchain] +channel = "1.93.1" +components = ["rustfmt", "rustc", "cargo", "clippy"] +profile = "minimal" From a6f54107165e3ad2dd69f09e5a4da5a4c609de20 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Thu, 19 Feb 2026 22:01:05 +0800 Subject: [PATCH 2/7] refactor: clean up code formatting and organization across multiple files - Standardized import statements and removed unnecessary whitespace in `subject.rs`, `types.rs`, `user.rs`, `timeline.rs`, and `mod.rs`. - Improved readability and consistency in the `user.rs` file by restructuring the `on_user_change` function and its logic. - Updated `Cargo.toml` to adjust dependencies for different target platforms. - Enhanced test cases in `php-serialize` to ensure proper deserialization of PHP serialized data. --- Cargo.lock | 42 +- Cargo.toml | 2 +- crates/app/Cargo.toml | 8 +- crates/app/src/worker/canal/character.rs | 60 +- crates/app/src/worker/canal/mod.rs | 306 +++--- crates/app/src/worker/canal/person.rs | 54 +- crates/app/src/worker/canal/search_event.rs | 1079 ++++++++++--------- crates/app/src/worker/canal/subject.rs | 85 +- crates/app/src/worker/canal/types.rs | 40 +- crates/app/src/worker/canal/user.rs | 605 ++++++----- crates/app/src/worker/mod.rs | 4 +- crates/app/src/worker/timeline.rs | 44 +- crates/common/Cargo.toml | 8 +- crates/php-serialize/src/lib.rs | 112 +- 14 files changed, 1273 insertions(+), 1176 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eec2c59ad..6efb08847 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -440,6 +440,21 @@ dependencies = [ "typenum", ] +[[package]] +name = "curl-sys" +version = "0.4.85+curl-8.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0efa6142b5ecc05f6d3eaa39e6af4888b9d3939273fb592c92b7088a8cf3fdb" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", + "windows-sys 0.59.0", +] + [[package]] name = "der" version = "0.7.10" @@ -1223,6 +1238,18 @@ dependencies = [ "redox_syscall 0.7.1", ] +[[package]] +name = "libz-sys" +version = "1.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "litemap" version = "0.8.1" @@ -1816,9 +1843,9 @@ dependencies = [ [[package]] name = "rdkafka" -version = "0.36.2" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1beea247b9a7600a81d4cc33f659ce1a77e1988323d7d2809c7ed1c21f4c316d" +checksum = "5f1856d72dbbbea0d2a5b2eaf6af7fb3847ef2746e883b11781446a51dbc85c0" dependencies = [ "futures-channel", "futures-util", @@ -1839,7 +1866,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e234cf318915c1059d4921ef7f75616b5219b10b46e9f3a511a15eb4b56a3f77" dependencies = [ "cmake", + "curl-sys", "libc", + "libz-sys", "num_enum", "openssl-sys", "pkg-config", @@ -3194,6 +3223,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.60.2" diff --git a/Cargo.toml b/Cargo.toml index b44740291..e8ac94dda 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,7 +19,7 @@ tokio = { version = "1", features = ["rt-multi-thread", "macros", "signal", "tim tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } clap = { version = "4", features = ["derive"] } -rdkafka = { version = "0.36", default-features = false } +rdkafka = "0.38" sqlx = { version = "0.8", default-features = false, features = ["runtime-tokio-rustls", "mysql", "macros"] } redis = { version = "0.27", features = ["tokio-comp"] } meilisearch-sdk = "0.28" diff --git a/crates/app/Cargo.toml b/crates/app/Cargo.toml index 2656c03b9..fc02f7ded 100644 --- a/crates/app/Cargo.toml +++ b/crates/app/Cargo.toml @@ -19,8 +19,8 @@ bangumi-wiki-parser.workspace = true php-serialize.workspace = true common = { path = "../common" } -[target.'cfg(unix)'.dependencies] -rdkafka = { workspace = true, features = ["cmake-build", "tokio", "ssl-vendored"] } +[target.'cfg(windows)'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build"] } -[target.'cfg(not(unix))'.dependencies] -rdkafka = { workspace = true, features = ["cmake-build", "tokio"] } +[target.'cfg(unix)'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build", "static-linking", "ssl-vendored", "curl-static", "libz-static"] } diff --git a/crates/app/src/worker/canal/character.rs b/crates/app/src/worker/canal/character.rs index e91f81896..c12d6dd70 100644 --- a/crates/app/src/worker/canal/character.rs +++ b/crates/app/src/worker/canal/character.rs @@ -1,30 +1,30 @@ -use anyhow::Context; -use serde::Deserialize; - -use super::search_event::SearchDispatcher; -use super::search_event::{self}; - -#[derive(Debug, Deserialize)] -struct CharacterKey { - crt_id: u32, -} - -pub async fn on_character( - search: &SearchDispatcher, - key: &[u8], - op: &str, -) -> anyhow::Result<()> { - let key: CharacterKey = serde_json::from_slice(key).context("parse character key")?; - on_character_change(search, key.crt_id, op).await?; - Ok(()) -} - -async fn on_character_change( - search: &SearchDispatcher, - character_id: u32, - op: &str, -) -> anyhow::Result<()> { - search - .dispatch(search_event::TARGET_CHARACTER, character_id, op) - .await -} +use anyhow::Context; +use serde::Deserialize; + +use super::search_event::SearchDispatcher; +use super::search_event::{self}; + +#[derive(Debug, Deserialize)] +struct CharacterKey { + crt_id: u32, +} + +pub async fn on_character( + search: &SearchDispatcher, + key: &[u8], + op: &str, +) -> anyhow::Result<()> { + let key: CharacterKey = serde_json::from_slice(key).context("parse character key")?; + on_character_change(search, key.crt_id, op).await?; + Ok(()) +} + +async fn on_character_change( + search: &SearchDispatcher, + character_id: u32, + op: &str, +) -> anyhow::Result<()> { + search + .dispatch(search_event::TARGET_CHARACTER, character_id, op) + .await +} diff --git a/crates/app/src/worker/canal/mod.rs b/crates/app/src/worker/canal/mod.rs index c95f079b7..14df0dfe4 100644 --- a/crates/app/src/worker/canal/mod.rs +++ b/crates/app/src/worker/canal/mod.rs @@ -1,151 +1,155 @@ -use anyhow::{anyhow, Context}; -use common::config::{build_kafka_client_config, build_mysql_pool, AppConfig}; -use common::locate_error; -use rdkafka::consumer::{CommitMode, Consumer}; -use rdkafka::Message; - -mod character; -mod person; -mod search_event; -mod subject; -mod types; -mod user; - -use types::DebeziumPayload; - -pub async fn run() -> anyhow::Result<()> { - let cfg = AppConfig::from_env("canal")?; - - if cfg.kafka_topics.is_empty() { - return Err(anyhow!( - "empty topics: set RUST_KAFKA_TOPICS (comma-separated) or KAFKA_TOPICS" - )); - } - - let mysql_pool = build_mysql_pool(&cfg).await?; - let search = search_event::SearchDispatcher::new(&cfg, mysql_pool.clone())?; - let user = user::UserDispatcher::new(&cfg, mysql_pool)?; - - let mut client = build_kafka_client_config(&cfg); - client.set("group.id", &cfg.kafka_group_id); - client.set("enable.auto.commit", "false"); - - let consumer: rdkafka::consumer::StreamConsumer = - client.create().context("create kafka consumer")?; - - let topics: Vec<&str> = cfg.kafka_topics.iter().map(String::as_str).collect(); - consumer - .subscribe(&topics) - .context("subscribe kafka topics")?; - - tracing::info!( - group_id = %cfg.kafka_group_id, - topics = ?cfg.kafka_topics, - "canal worker started" - ); - - loop { - tokio::select! { - _ = tokio::signal::ctrl_c() => { - tracing::info!("received shutdown signal"); - return Ok(()); - } - msg = consumer.recv() => { - let msg = match msg { - Ok(item) => item, - Err(err) => { - tracing::error!(error = ?err, "failed to fetch kafka message"); - continue; - } - }; - - let key = msg.key().unwrap_or_default(); - let value = msg.payload().unwrap_or_default(); - - if let Err(err) = handle_message(&search, &user, key, value).await { - if let Some(at) = locate_error(&err) { - tracing::error!( - error.file = at.file, - error.lino = at.line, - error.message = at.message, - error_chain = %format!("{err:#}"), - "failed to process kafka message" - ); - } else { - tracing::error!(error = ?err, error_chain = %format!("{err:#}"), "failed to process kafka message"); - } - continue; - } - - if let Err(err) = consumer.commit_message(&msg, CommitMode::Sync) { - tracing::error!(error = ?err, "failed to commit kafka message"); - } - } - } - } -} - -async fn handle_message( - search: &search_event::SearchDispatcher, - user: &user::UserDispatcher, - key: &[u8], - value: &[u8], -) -> anyhow::Result<()> { - if value.is_empty() { - return Ok(()); - } - - let payload: DebeziumPayload = match serde_json::from_slice(value) { - Ok(payload) => payload, - Err(err) => { - tracing::warn!(error = ?err, "failed to parse kafka value, skip message"); - return Ok(()); - } - }; - - match payload.source.table.as_str() { - "chii_subject_fields" => subject::on_subject_field(search, key, &payload.op).await?, - "chii_subjects" => subject::on_subject(search, key, &payload.op).await?, - "chii_characters" => character::on_character(search, key, &payload.op).await?, - "chii_persons" => person::on_person(search, key, &payload.op).await?, - "chii_members" => user - .on_user(key, &payload.op, payload.before, payload.after) - .await?, - _ => tracing::debug!(table = %payload.source.table, "ignored table event"), - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use common::ResultExt; - - #[test] - fn demo_error_logging_output() { - common::init_tracing(); - - let err = demo_error().expect_err("demo error should fail"); - if let Some(at) = common::locate_error(&err) { - tracing::error!( - error.file = at.file, - error.lino = at.line, - error.message = at.message, - error_chain = %format!("{err:#}"), - "demo error output" - ); - } else { - tracing::error!( - error = ?err, - error_chain = %format!("{err:#}"), - "demo error output" - ); - } - } - - fn demo_error() -> anyhow::Result<()> { - let _: serde_json::Value = - serde_json::from_slice(b"not-json").context_loc("parse debezium payload")?; - Ok(()) - } -} +use anyhow::{anyhow, Context}; +use common::config::{build_kafka_client_config, build_mysql_pool, AppConfig}; +use common::locate_error; +use rdkafka::consumer::{CommitMode, Consumer}; +use rdkafka::Message; + +mod character; +mod person; +mod search_event; +mod subject; +mod types; +mod user; + +use types::DebeziumPayload; + +pub async fn run() -> anyhow::Result<()> { + let cfg = AppConfig::from_env("canal")?; + + if cfg.kafka_topics.is_empty() { + return Err(anyhow!( + "empty topics: set RUST_KAFKA_TOPICS (comma-separated) or KAFKA_TOPICS" + )); + } + + let mysql_pool = build_mysql_pool(&cfg).await?; + let search = search_event::SearchDispatcher::new(&cfg, mysql_pool.clone())?; + let user = user::UserDispatcher::new(&cfg, mysql_pool)?; + + let mut client = build_kafka_client_config(&cfg); + client.set("group.id", &cfg.kafka_group_id); + client.set("enable.auto.commit", "false"); + + let consumer: rdkafka::consumer::StreamConsumer = + client.create().context("create kafka consumer")?; + + let topics: Vec<&str> = cfg.kafka_topics.iter().map(String::as_str).collect(); + consumer + .subscribe(&topics) + .context("subscribe kafka topics")?; + + tracing::info!( + group_id = %cfg.kafka_group_id, + topics = ?cfg.kafka_topics, + "canal worker started" + ); + + loop { + tokio::select! { + _ = tokio::signal::ctrl_c() => { + tracing::info!("received shutdown signal"); + return Ok(()); + } + msg = consumer.recv() => { + let msg = match msg { + Ok(item) => item, + Err(err) => { + tracing::error!(error = ?err, "failed to fetch kafka message"); + continue; + } + }; + + let key = msg.key().unwrap_or_default(); + let value = msg.payload().unwrap_or_default(); + + if let Err(err) = handle_message(&search, &user, key, value).await { + if let Some(at) = locate_error(&err) { + tracing::error!( + error.file = at.file, + error.lino = at.line, + error.message = at.message, + error_chain = %format!("{err:#}"), + "failed to process kafka message" + ); + } else { + tracing::error!(error = ?err, error_chain = %format!("{err:#}"), "failed to process kafka message"); + } + continue; + } + + if let Err(err) = consumer.commit_message(&msg, CommitMode::Sync) { + tracing::error!(error = ?err, "failed to commit kafka message"); + } + } + } + } +} + +async fn handle_message( + search: &search_event::SearchDispatcher, + user: &user::UserDispatcher, + key: &[u8], + value: &[u8], +) -> anyhow::Result<()> { + if value.is_empty() { + return Ok(()); + } + + let payload: DebeziumPayload = match serde_json::from_slice(value) { + Ok(payload) => payload, + Err(err) => { + tracing::warn!(error = ?err, "failed to parse kafka value, skip message"); + return Ok(()); + } + }; + + match payload.source.table.as_str() { + "chii_subject_fields" => { + subject::on_subject_field(search, key, &payload.op).await? + } + "chii_subjects" => subject::on_subject(search, key, &payload.op).await?, + "chii_characters" => character::on_character(search, key, &payload.op).await?, + "chii_persons" => person::on_person(search, key, &payload.op).await?, + "chii_members" => { + user + .on_user(key, &payload.op, payload.before, payload.after) + .await? + } + _ => tracing::debug!(table = %payload.source.table, "ignored table event"), + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use common::ResultExt; + + #[test] + fn demo_error_logging_output() { + common::init_tracing(); + + let err = demo_error().expect_err("demo error should fail"); + if let Some(at) = common::locate_error(&err) { + tracing::error!( + error.file = at.file, + error.lino = at.line, + error.message = at.message, + error_chain = %format!("{err:#}"), + "demo error output" + ); + } else { + tracing::error!( + error = ?err, + error_chain = %format!("{err:#}"), + "demo error output" + ); + } + } + + fn demo_error() -> anyhow::Result<()> { + let _: serde_json::Value = + serde_json::from_slice(b"not-json").context_loc("parse debezium payload")?; + Ok(()) + } +} diff --git a/crates/app/src/worker/canal/person.rs b/crates/app/src/worker/canal/person.rs index 674d4306d..1c8bdeba5 100644 --- a/crates/app/src/worker/canal/person.rs +++ b/crates/app/src/worker/canal/person.rs @@ -1,24 +1,30 @@ -use anyhow::Context; -use serde::Deserialize; - -use super::search_event::SearchDispatcher; -use super::search_event::{self}; - -#[derive(Debug, Deserialize)] -struct PersonKey { - prsn_id: u32, -} - -pub async fn on_person(search: &SearchDispatcher, key: &[u8], op: &str) -> anyhow::Result<()> { - let key: PersonKey = serde_json::from_slice(key).context("parse person key")?; - on_person_change(search, key.prsn_id, op).await?; - Ok(()) -} - -async fn on_person_change( - search: &SearchDispatcher, - person_id: u32, - op: &str, -) -> anyhow::Result<()> { - search.dispatch(search_event::TARGET_PERSON, person_id, op).await -} +use anyhow::Context; +use serde::Deserialize; + +use super::search_event::SearchDispatcher; +use super::search_event::{self}; + +#[derive(Debug, Deserialize)] +struct PersonKey { + prsn_id: u32, +} + +pub async fn on_person( + search: &SearchDispatcher, + key: &[u8], + op: &str, +) -> anyhow::Result<()> { + let key: PersonKey = serde_json::from_slice(key).context("parse person key")?; + on_person_change(search, key.prsn_id, op).await?; + Ok(()) +} + +async fn on_person_change( + search: &SearchDispatcher, + person_id: u32, + op: &str, +) -> anyhow::Result<()> { + search + .dispatch(search_event::TARGET_PERSON, person_id, op) + .await +} diff --git a/crates/app/src/worker/canal/search_event.rs b/crates/app/src/worker/canal/search_event.rs index 930519574..161814271 100644 --- a/crates/app/src/worker/canal/search_event.rs +++ b/crates/app/src/worker/canal/search_event.rs @@ -1,528 +1,551 @@ -use anyhow::{anyhow, Context}; -use bangumi_wiki_parser::{parse_omit_error, FieldValue, Wiki}; -use common::config::AppConfig; -use meilisearch_sdk::client::Client as MeiliSdkClient; -use php_serialize::from_str as parse_php_serialize; -use serde::{Deserialize, Serialize}; - -use super::types::{OP_CREATE, OP_DELETE, OP_SNAPSHOT, OP_UPDATE}; - -pub const TARGET_SUBJECT: &str = "subject"; -pub const TARGET_CHARACTER: &str = "character"; -pub const TARGET_PERSON: &str = "person"; - -pub struct SearchDispatcher { - pool: sqlx::MySqlPool, - meili: Option, -} - -struct MeiliClient { - client: MeiliSdkClient, -} - -#[derive(Serialize)] -struct SubjectDoc { - id: u32, - tag: Vec, - #[serde(rename = "meta_tag")] - meta_tag: Vec, - name: String, - aliases: Vec, - date: i32, - score: f64, - rating_count: u32, - page_rank: f64, - heat: u32, - rank: u32, - platform: u16, - #[serde(rename = "type")] - type_id: u8, - nsfw: bool, -} - -#[derive(Serialize)] -struct CharacterDoc { - id: u32, - name: String, - aliases: Vec, - comment: u32, - collect: u32, - nsfw: bool, -} - -#[derive(Serialize)] -struct PersonDoc { - id: u32, - name: String, - aliases: Vec, - comment: u32, - collect: u32, - career: Vec, -} - -#[derive(Deserialize)] -struct SubjectTagItem { - tag_name: Option, -} - -#[derive(sqlx::FromRow)] -struct SubjectRow { - subject_id: u32, - subject_name: String, - subject_name_cn: String, - field_infobox: String, - subject_type_id: u8, - subject_nsfw: bool, - subject_ban: u8, - subject_platform: u16, - field_meta_tags: String, - field_tags: String, - subject_wish: u32, - subject_collect: u32, - subject_doing: u32, - subject_on_hold: u32, - subject_dropped: u32, - field_rank: u32, - date: String, - field_rate_1: u32, - field_rate_2: u32, - field_rate_3: u32, - field_rate_4: u32, - field_rate_5: u32, - field_rate_6: u32, - field_rate_7: u32, - field_rate_8: u32, - field_rate_9: u32, - field_rate_10: u32, - field_redirect: u32, -} - -#[derive(sqlx::FromRow)] -struct CharacterRow { - crt_id: u32, - crt_name: String, - crt_infobox: String, - crt_comment: u32, - crt_collects: u32, - crt_nsfw: bool, - crt_redirect: u32, -} - -#[derive(sqlx::FromRow)] -struct PersonRow { - prsn_id: u32, - prsn_name: String, - prsn_infobox: String, - prsn_comment: u32, - prsn_collects: u32, - prsn_redirect: u32, - prsn_producer: bool, - prsn_mangaka: bool, - prsn_artist: bool, - prsn_seiyu: bool, - prsn_writer: bool, - prsn_illustrator: bool, - prsn_actor: bool, -} - -impl SearchDispatcher { - pub fn new(cfg: &AppConfig, pool: sqlx::MySqlPool) -> anyhow::Result { - let meili = if cfg.meilisearch_url.is_empty() { - None - } else { - let api_key = if cfg.meilisearch_key.is_empty() { - None - } else { - Some(cfg.meilisearch_key.clone()) - }; - Some(MeiliClient { - client: MeiliSdkClient::new(cfg.meilisearch_url.trim_end_matches('/'), api_key) - .context("create meilisearch client")?, - }) - }; - - Ok(Self { pool, meili }) - } - - pub async fn dispatch(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { - match op { - OP_CREATE | OP_UPDATE | OP_SNAPSHOT => self.upsert(target, entity_id, op).await, - OP_DELETE => self.delete(target, entity_id, op).await, - _ => Err(anyhow!("unexpected operator: {op}")), - } - } - - async fn upsert(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { - let Some(meili) = &self.meili else { - tracing::debug!(target, entity_id, op, "skip search upsert: meilisearch disabled"); - return Ok(()); - }; - - match target { - TARGET_SUBJECT => { - let row = sqlx::query_as::<_, SubjectRow>( - r#"SELECT s.subject_id, s.subject_name, s.subject_name_cn, s.field_infobox, - s.subject_type_id, s.subject_nsfw, s.subject_ban, - s.subject_platform, s.field_meta_tags, f.field_tags, - s.subject_wish, s.subject_collect, s.subject_doing, - s.subject_on_hold, s.subject_dropped, f.field_rank, DATE_FORMAT(f.field_date, '%Y-%m-%d'), - f.field_rate_1, f.field_rate_2, f.field_rate_3, f.field_rate_4, f.field_rate_5, - f.field_rate_6, f.field_rate_7, f.field_rate_8, f.field_rate_9, f.field_rate_10, - f.field_redirect - FROM chii_subjects s - JOIN chii_subject_fields f ON f.field_sid = s.subject_id - WHERE s.subject_id = ? - LIMIT 1"#, - ) - .bind(entity_id) - .fetch_optional(&self.pool) - .await - .context("load subject")?; - - let Some(row) = row - else { - return self.delete(target, entity_id, op).await; - }; - - if row.subject_ban != 0 || row.field_redirect != 0 { - return self.delete(target, entity_id, op).await; - } - - let mut aliases = Vec::new(); - if !row.subject_name_cn.is_empty() { - aliases.push(row.subject_name_cn.clone()); - } - let wiki = parse_omit_error(&row.field_infobox); - aliases.extend(extract_values_by_key(&wiki, "别名")); - - let meta_tag = split_space_values(&row.field_meta_tags); - let tag = parse_subject_tags(&row.field_tags); - let heat = row - .subject_wish - .saturating_add(row.subject_collect) - .saturating_add(row.subject_doing) - .saturating_add(row.subject_on_hold) - .saturating_add(row.subject_dropped); - let rating_total = row - .field_rate_1 - .saturating_add(row.field_rate_2) - .saturating_add(row.field_rate_3) - .saturating_add(row.field_rate_4) - .saturating_add(row.field_rate_5) - .saturating_add(row.field_rate_6) - .saturating_add(row.field_rate_7) - .saturating_add(row.field_rate_8) - .saturating_add(row.field_rate_9) - .saturating_add(row.field_rate_10); - let score_sum = (row.field_rate_1 as f64) * 1.0 - + (row.field_rate_2 as f64) * 2.0 - + (row.field_rate_3 as f64) * 3.0 - + (row.field_rate_4 as f64) * 4.0 - + (row.field_rate_5 as f64) * 5.0 - + (row.field_rate_6 as f64) * 6.0 - + (row.field_rate_7 as f64) * 7.0 - + (row.field_rate_8 as f64) * 8.0 - + (row.field_rate_9 as f64) * 9.0 - + (row.field_rate_10 as f64) * 10.0; - let score = if rating_total == 0 { - 0.0 - } else { - ((score_sum / (rating_total as f64)) * 10.0).round() / 10.0 - }; - - let doc = SubjectDoc { - id: row.subject_id, - tag, - meta_tag, - name: row.subject_name, - aliases, - date: parse_date_val(&row.date), - score, - rating_count: rating_total, - page_rank: rating_total as f64, - heat, - rank: row.field_rank, - platform: row.subject_platform, - type_id: row.subject_type_id, - nsfw: row.subject_nsfw, - }; - - meili.update_doc("subjects", &[doc]).await?; - } - TARGET_CHARACTER => { - let row = sqlx::query_as::<_, CharacterRow>( - r#"SELECT crt_id, crt_name, crt_infobox, crt_comment, crt_collects, crt_nsfw, crt_redirect - FROM chii_characters WHERE crt_id = ? LIMIT 1"#, - ) - .bind(entity_id) - .fetch_optional(&self.pool) - .await - .context("load character")?; - - let Some(row) = row else { - return self.delete(target, entity_id, op).await; - }; - - if row.crt_redirect != 0 { - return self.delete(target, entity_id, op).await; - } - - let doc = CharacterDoc { - id: row.crt_id, - name: row.crt_name, - aliases: extract_aliases(&parse_omit_error(&row.crt_infobox)), - comment: row.crt_comment, - collect: row.crt_collects, - nsfw: row.crt_nsfw, - }; - - meili.update_doc("characters", &[doc]).await?; - } - TARGET_PERSON => { - let row = sqlx::query_as::<_, PersonRow>( - r#"SELECT prsn_id, prsn_name, prsn_infobox, prsn_comment, prsn_collects, prsn_redirect, - prsn_producer, prsn_mangaka, prsn_artist, prsn_seiyu, - prsn_writer, prsn_illustrator, prsn_actor - FROM chii_persons WHERE prsn_id = ? LIMIT 1"#, - ) - .bind(entity_id) - .fetch_optional(&self.pool) - .await - .context("load person")?; - - let Some(row) = row else { - return self.delete(target, entity_id, op).await; - }; - - if row.prsn_redirect != 0 { - return self.delete(target, entity_id, op).await; - } - - let doc = PersonDoc { - id: row.prsn_id, - name: row.prsn_name, - aliases: extract_aliases(&parse_omit_error(&row.prsn_infobox)), - comment: row.prsn_comment, - collect: row.prsn_collects, - career: collect_careers( - row.prsn_producer, - row.prsn_mangaka, - row.prsn_artist, - row.prsn_seiyu, - row.prsn_writer, - row.prsn_illustrator, - row.prsn_actor, - ), - }; - - meili.update_doc("persons", &[doc]).await?; - } - _ => return Err(anyhow!("unknown search target: {target}")), - } - - tracing::info!(target, entity_id, op, action = "event_upsert", "search event handled"); - Ok(()) - } - - async fn delete(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { - let Some(meili) = &self.meili else { - tracing::debug!(target, entity_id, op, "skip search delete: meilisearch disabled"); - return Ok(()); - }; - - let index = match target { - TARGET_SUBJECT => "subjects", - TARGET_CHARACTER => "characters", - TARGET_PERSON => "persons", - _ => return Err(anyhow!("unknown search target: {target}")), - }; - - meili.delete_doc(index, entity_id).await?; - tracing::info!(target, entity_id, op, action = "event_delete", "search event handled"); - Ok(()) - } -} - -fn split_space_values(input: &str) -> Vec { - input - .split(' ') - .map(ToOwned::to_owned) - .collect() -} - -fn wiki_values(v: &FieldValue) -> Vec { - match v { - FieldValue::Scalar(text) => vec![text.clone()], - FieldValue::Array(items) => items.iter().map(|x| x.value.clone()).collect(), - FieldValue::Null => Vec::new(), - } -} - -fn extract_values_by_key(wiki: &Wiki, target_key: &str) -> Vec { - let mut out = Vec::new(); - for field in &wiki.fields { - if field.key == target_key { - out.extend(wiki_values(&field.value)); - } - } - out -} - -fn extract_aliases(wiki: &Wiki) -> Vec { - let mut aliases = Vec::new(); - - for field in &wiki.fields { - if field.key == "中文名" { - aliases.extend(wiki_values(&field.value)); - } - if field.key == "简体中文名" { - aliases.extend(wiki_values(&field.value)); - } - } - - for field in &wiki.fields { - if field.key == "别名" { - aliases.extend(wiki_values(&field.value)); - } - } - - aliases -} - -fn parse_subject_tags(input: &str) -> Vec { - let parsed: Vec = match parse_php_serialize(input) { - Ok(v) => v, - Err(_) => return Vec::new(), - }; - - parsed - .into_iter() - .filter_map(|item| item.tag_name) - .filter(|x| !x.is_empty()) - .collect() -} - -fn parse_date_val(date: &str) -> i32 { - if date.len() < 10 { - return 0; - } - - let year = date[0..4].parse::().ok(); - let month = date[5..7].parse::().ok(); - let day = date[8..10].parse::().ok(); - - match (year, month, day) { - (Some(y), Some(m), Some(d)) => y * 10000 + m * 100 + d, - _ => 0, - } -} - -fn collect_careers( - producer: bool, - mangaka: bool, - artist: bool, - seiyu: bool, - writer: bool, - illustrator: bool, - actor: bool, -) -> Vec { - let mut out = Vec::new(); - - if writer { - out.push("writer".to_string()); - } - - if producer { - out.push("producer".to_string()); - } - if mangaka { - out.push("mangaka".to_string()); - } - if artist { - out.push("artist".to_string()); - } - if seiyu { - out.push("seiyu".to_string()); - } - if illustrator { - out.push("illustrator".to_string()); - } - if actor { - out.push("actor".to_string()); - } - - out -} - -impl MeiliClient { - async fn update_doc( - &self, - index: &str, - docs: &[T], - ) -> anyhow::Result<()> { - self - .client - .index(index) - .add_documents(docs, Some("id")) - .await - .context("meilisearch update")?; - Ok(()) - } - - async fn delete_doc(&self, index: &str, id: u32) -> anyhow::Result<()> { - self - .client - .index(index) - .delete_document(id) - .await - .context("meilisearch delete")?; - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::{collect_careers, extract_aliases, parse_subject_tags}; - use bangumi_wiki_parser::parse_omit_error; - - #[test] - fn parse_subject_tags_php_serialized() { - let raw = "a:3:{i:0;a:2:{s:8:\"tag_name\";s:6:\"动画\";s:6:\"result\";s:1:\"2\";}i:1;a:2:{s:8:\"tag_name\";N;s:6:\"result\";s:1:\"1\";}i:2;a:2:{s:8:\"tag_name\";s:2:\"TV\";s:6:\"result\";s:1:\"1\";}}"; - - let tags = parse_subject_tags(raw); - assert_eq!(tags, vec!["动画".to_string(), "TV".to_string()]); - } - - #[test] - fn extract_aliases_for_person_character() { - let infobox = "{{Infobox\n|中文名=某角色\n|简体中文名=某角色简中\n|别名={\n[Alpha]\n[Beta]\n}\n|生日=2000-01-01\n}}"; - - let aliases = extract_aliases(&parse_omit_error(infobox)); - assert_eq!( - aliases, - vec![ - "某角色".to_string(), - "某角色简中".to_string(), - "Alpha".to_string(), - "Beta".to_string(), - ] - ); - } - - #[test] - fn collect_career_order_matches_go() { - let careers = collect_careers(true, true, true, true, true, true, true); - assert_eq!( - careers, - vec![ - "writer".to_string(), - "producer".to_string(), - "mangaka".to_string(), - "artist".to_string(), - "seiyu".to_string(), - "illustrator".to_string(), - "actor".to_string(), - ] - ); - } -} +use anyhow::{anyhow, Context}; +use bangumi_wiki_parser::{parse_omit_error, FieldValue, Wiki}; +use common::config::AppConfig; +use meilisearch_sdk::client::Client as MeiliSdkClient; +use php_serialize::from_str as parse_php_serialize; +use serde::{Deserialize, Serialize}; + +use super::types::{OP_CREATE, OP_DELETE, OP_SNAPSHOT, OP_UPDATE}; + +pub const TARGET_SUBJECT: &str = "subject"; +pub const TARGET_CHARACTER: &str = "character"; +pub const TARGET_PERSON: &str = "person"; + +pub struct SearchDispatcher { + pool: sqlx::MySqlPool, + meili: Option, +} + +struct MeiliClient { + client: MeiliSdkClient, +} + +#[derive(Serialize)] +struct SubjectDoc { + id: u32, + tag: Vec, + #[serde(rename = "meta_tag")] + meta_tag: Vec, + name: String, + aliases: Vec, + date: i32, + score: f64, + rating_count: u32, + page_rank: f64, + heat: u32, + rank: u32, + platform: u16, + #[serde(rename = "type")] + type_id: u8, + nsfw: bool, +} + +#[derive(Serialize)] +struct CharacterDoc { + id: u32, + name: String, + aliases: Vec, + comment: u32, + collect: u32, + nsfw: bool, +} + +#[derive(Serialize)] +struct PersonDoc { + id: u32, + name: String, + aliases: Vec, + comment: u32, + collect: u32, + career: Vec, +} + +#[derive(Deserialize)] +struct SubjectTagItem { + tag_name: Option, +} + +#[derive(sqlx::FromRow)] +struct SubjectRow { + subject_id: u32, + subject_name: String, + subject_name_cn: String, + field_infobox: String, + subject_type_id: u8, + subject_nsfw: bool, + subject_ban: u8, + subject_platform: u16, + field_meta_tags: String, + field_tags: String, + subject_wish: u32, + subject_collect: u32, + subject_doing: u32, + subject_on_hold: u32, + subject_dropped: u32, + field_rank: u32, + date: String, + field_rate_1: u32, + field_rate_2: u32, + field_rate_3: u32, + field_rate_4: u32, + field_rate_5: u32, + field_rate_6: u32, + field_rate_7: u32, + field_rate_8: u32, + field_rate_9: u32, + field_rate_10: u32, + field_redirect: u32, +} + +#[derive(sqlx::FromRow)] +struct CharacterRow { + crt_id: u32, + crt_name: String, + crt_infobox: String, + crt_comment: u32, + crt_collects: u32, + crt_nsfw: bool, + crt_redirect: u32, +} + +#[derive(sqlx::FromRow)] +struct PersonRow { + prsn_id: u32, + prsn_name: String, + prsn_infobox: String, + prsn_comment: u32, + prsn_collects: u32, + prsn_redirect: u32, + prsn_producer: bool, + prsn_mangaka: bool, + prsn_artist: bool, + prsn_seiyu: bool, + prsn_writer: bool, + prsn_illustrator: bool, + prsn_actor: bool, +} + +impl SearchDispatcher { + pub fn new(cfg: &AppConfig, pool: sqlx::MySqlPool) -> anyhow::Result { + let meili = if cfg.meilisearch_url.is_empty() { + None + } else { + let api_key = if cfg.meilisearch_key.is_empty() { + None + } else { + Some(cfg.meilisearch_key.clone()) + }; + Some(MeiliClient { + client: MeiliSdkClient::new(cfg.meilisearch_url.trim_end_matches('/'), api_key) + .context("create meilisearch client")?, + }) + }; + + Ok(Self { pool, meili }) + } + + pub async fn dispatch( + &self, + target: &str, + entity_id: u32, + op: &str, + ) -> anyhow::Result<()> { + match op { + OP_CREATE | OP_UPDATE | OP_SNAPSHOT => self.upsert(target, entity_id, op).await, + OP_DELETE => self.delete(target, entity_id, op).await, + _ => Err(anyhow!("unexpected operator: {op}")), + } + } + + async fn upsert(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { + let Some(meili) = &self.meili else { + tracing::debug!( + target, + entity_id, + op, + "skip search upsert: meilisearch disabled" + ); + return Ok(()); + }; + + match target { + TARGET_SUBJECT => { + let row = sqlx::query_as::<_, SubjectRow>( + r#"SELECT s.subject_id, s.subject_name, s.subject_name_cn, s.field_infobox, + s.subject_type_id, s.subject_nsfw, s.subject_ban, + s.subject_platform, s.field_meta_tags, f.field_tags, + s.subject_wish, s.subject_collect, s.subject_doing, + s.subject_on_hold, s.subject_dropped, f.field_rank, DATE_FORMAT(f.field_date, '%Y-%m-%d'), + f.field_rate_1, f.field_rate_2, f.field_rate_3, f.field_rate_4, f.field_rate_5, + f.field_rate_6, f.field_rate_7, f.field_rate_8, f.field_rate_9, f.field_rate_10, + f.field_redirect + FROM chii_subjects s + JOIN chii_subject_fields f ON f.field_sid = s.subject_id + WHERE s.subject_id = ? + LIMIT 1"#, + ) + .bind(entity_id) + .fetch_optional(&self.pool) + .await + .context("load subject")?; + + let Some(row) = row else { + return self.delete(target, entity_id, op).await; + }; + + if row.subject_ban != 0 || row.field_redirect != 0 { + return self.delete(target, entity_id, op).await; + } + + let mut aliases = Vec::new(); + if !row.subject_name_cn.is_empty() { + aliases.push(row.subject_name_cn.clone()); + } + let wiki = parse_omit_error(&row.field_infobox); + aliases.extend(extract_values_by_key(&wiki, "别名")); + + let meta_tag = split_space_values(&row.field_meta_tags); + let tag = parse_subject_tags(&row.field_tags); + let heat = row + .subject_wish + .saturating_add(row.subject_collect) + .saturating_add(row.subject_doing) + .saturating_add(row.subject_on_hold) + .saturating_add(row.subject_dropped); + let rating_total = row + .field_rate_1 + .saturating_add(row.field_rate_2) + .saturating_add(row.field_rate_3) + .saturating_add(row.field_rate_4) + .saturating_add(row.field_rate_5) + .saturating_add(row.field_rate_6) + .saturating_add(row.field_rate_7) + .saturating_add(row.field_rate_8) + .saturating_add(row.field_rate_9) + .saturating_add(row.field_rate_10); + let score_sum = (row.field_rate_1 as f64) * 1.0 + + (row.field_rate_2 as f64) * 2.0 + + (row.field_rate_3 as f64) * 3.0 + + (row.field_rate_4 as f64) * 4.0 + + (row.field_rate_5 as f64) * 5.0 + + (row.field_rate_6 as f64) * 6.0 + + (row.field_rate_7 as f64) * 7.0 + + (row.field_rate_8 as f64) * 8.0 + + (row.field_rate_9 as f64) * 9.0 + + (row.field_rate_10 as f64) * 10.0; + let score = if rating_total == 0 { + 0.0 + } else { + ((score_sum / (rating_total as f64)) * 10.0).round() / 10.0 + }; + + let doc = SubjectDoc { + id: row.subject_id, + tag, + meta_tag, + name: row.subject_name, + aliases, + date: parse_date_val(&row.date), + score, + rating_count: rating_total, + page_rank: rating_total as f64, + heat, + rank: row.field_rank, + platform: row.subject_platform, + type_id: row.subject_type_id, + nsfw: row.subject_nsfw, + }; + + meili.update_doc("subjects", &[doc]).await?; + } + TARGET_CHARACTER => { + let row = sqlx::query_as::<_, CharacterRow>( + r#"SELECT crt_id, crt_name, crt_infobox, crt_comment, crt_collects, crt_nsfw, crt_redirect + FROM chii_characters WHERE crt_id = ? LIMIT 1"#, + ) + .bind(entity_id) + .fetch_optional(&self.pool) + .await + .context("load character")?; + + let Some(row) = row else { + return self.delete(target, entity_id, op).await; + }; + + if row.crt_redirect != 0 { + return self.delete(target, entity_id, op).await; + } + + let doc = CharacterDoc { + id: row.crt_id, + name: row.crt_name, + aliases: extract_aliases(&parse_omit_error(&row.crt_infobox)), + comment: row.crt_comment, + collect: row.crt_collects, + nsfw: row.crt_nsfw, + }; + + meili.update_doc("characters", &[doc]).await?; + } + TARGET_PERSON => { + let row = sqlx::query_as::<_, PersonRow>( + r#"SELECT prsn_id, prsn_name, prsn_infobox, prsn_comment, prsn_collects, prsn_redirect, + prsn_producer, prsn_mangaka, prsn_artist, prsn_seiyu, + prsn_writer, prsn_illustrator, prsn_actor + FROM chii_persons WHERE prsn_id = ? LIMIT 1"#, + ) + .bind(entity_id) + .fetch_optional(&self.pool) + .await + .context("load person")?; + + let Some(row) = row else { + return self.delete(target, entity_id, op).await; + }; + + if row.prsn_redirect != 0 { + return self.delete(target, entity_id, op).await; + } + + let doc = PersonDoc { + id: row.prsn_id, + name: row.prsn_name, + aliases: extract_aliases(&parse_omit_error(&row.prsn_infobox)), + comment: row.prsn_comment, + collect: row.prsn_collects, + career: collect_careers( + row.prsn_producer, + row.prsn_mangaka, + row.prsn_artist, + row.prsn_seiyu, + row.prsn_writer, + row.prsn_illustrator, + row.prsn_actor, + ), + }; + + meili.update_doc("persons", &[doc]).await?; + } + _ => return Err(anyhow!("unknown search target: {target}")), + } + + tracing::info!( + target, + entity_id, + op, + action = "event_upsert", + "search event handled" + ); + Ok(()) + } + + async fn delete(&self, target: &str, entity_id: u32, op: &str) -> anyhow::Result<()> { + let Some(meili) = &self.meili else { + tracing::debug!( + target, + entity_id, + op, + "skip search delete: meilisearch disabled" + ); + return Ok(()); + }; + + let index = match target { + TARGET_SUBJECT => "subjects", + TARGET_CHARACTER => "characters", + TARGET_PERSON => "persons", + _ => return Err(anyhow!("unknown search target: {target}")), + }; + + meili.delete_doc(index, entity_id).await?; + tracing::info!( + target, + entity_id, + op, + action = "event_delete", + "search event handled" + ); + Ok(()) + } +} + +fn split_space_values(input: &str) -> Vec { + input.split(' ').map(ToOwned::to_owned).collect() +} + +fn wiki_values(v: &FieldValue) -> Vec { + match v { + FieldValue::Scalar(text) => vec![text.clone()], + FieldValue::Array(items) => items.iter().map(|x| x.value.clone()).collect(), + FieldValue::Null => Vec::new(), + } +} + +fn extract_values_by_key(wiki: &Wiki, target_key: &str) -> Vec { + let mut out = Vec::new(); + for field in &wiki.fields { + if field.key == target_key { + out.extend(wiki_values(&field.value)); + } + } + out +} + +fn extract_aliases(wiki: &Wiki) -> Vec { + let mut aliases = Vec::new(); + + for field in &wiki.fields { + if field.key == "中文名" { + aliases.extend(wiki_values(&field.value)); + } + if field.key == "简体中文名" { + aliases.extend(wiki_values(&field.value)); + } + } + + for field in &wiki.fields { + if field.key == "别名" { + aliases.extend(wiki_values(&field.value)); + } + } + + aliases +} + +fn parse_subject_tags(input: &str) -> Vec { + let parsed: Vec = match parse_php_serialize(input) { + Ok(v) => v, + Err(_) => return Vec::new(), + }; + + parsed + .into_iter() + .filter_map(|item| item.tag_name) + .filter(|x| !x.is_empty()) + .collect() +} + +fn parse_date_val(date: &str) -> i32 { + if date.len() < 10 { + return 0; + } + + let year = date[0..4].parse::().ok(); + let month = date[5..7].parse::().ok(); + let day = date[8..10].parse::().ok(); + + match (year, month, day) { + (Some(y), Some(m), Some(d)) => y * 10000 + m * 100 + d, + _ => 0, + } +} + +fn collect_careers( + producer: bool, + mangaka: bool, + artist: bool, + seiyu: bool, + writer: bool, + illustrator: bool, + actor: bool, +) -> Vec { + let mut out = Vec::new(); + + if writer { + out.push("writer".to_string()); + } + + if producer { + out.push("producer".to_string()); + } + if mangaka { + out.push("mangaka".to_string()); + } + if artist { + out.push("artist".to_string()); + } + if seiyu { + out.push("seiyu".to_string()); + } + if illustrator { + out.push("illustrator".to_string()); + } + if actor { + out.push("actor".to_string()); + } + + out +} + +impl MeiliClient { + async fn update_doc( + &self, + index: &str, + docs: &[T], + ) -> anyhow::Result<()> { + self + .client + .index(index) + .add_documents(docs, Some("id")) + .await + .context("meilisearch update")?; + Ok(()) + } + + async fn delete_doc(&self, index: &str, id: u32) -> anyhow::Result<()> { + self + .client + .index(index) + .delete_document(id) + .await + .context("meilisearch delete")?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::{collect_careers, extract_aliases, parse_subject_tags}; + use bangumi_wiki_parser::parse_omit_error; + + #[test] + fn parse_subject_tags_php_serialized() { + let raw = "a:3:{i:0;a:2:{s:8:\"tag_name\";s:6:\"动画\";s:6:\"result\";s:1:\"2\";}i:1;a:2:{s:8:\"tag_name\";N;s:6:\"result\";s:1:\"1\";}i:2;a:2:{s:8:\"tag_name\";s:2:\"TV\";s:6:\"result\";s:1:\"1\";}}"; + + let tags = parse_subject_tags(raw); + assert_eq!(tags, vec!["动画".to_string(), "TV".to_string()]); + } + + #[test] + fn extract_aliases_for_person_character() { + let infobox = "{{Infobox\n|中文名=某角色\n|简体中文名=某角色简中\n|别名={\n[Alpha]\n[Beta]\n}\n|生日=2000-01-01\n}}"; + + let aliases = extract_aliases(&parse_omit_error(infobox)); + assert_eq!( + aliases, + vec![ + "某角色".to_string(), + "某角色简中".to_string(), + "Alpha".to_string(), + "Beta".to_string(), + ] + ); + } + + #[test] + fn collect_career_order_matches_go() { + let careers = collect_careers(true, true, true, true, true, true, true); + assert_eq!( + careers, + vec![ + "writer".to_string(), + "producer".to_string(), + "mangaka".to_string(), + "artist".to_string(), + "seiyu".to_string(), + "illustrator".to_string(), + "actor".to_string(), + ] + ); + } +} diff --git a/crates/app/src/worker/canal/subject.rs b/crates/app/src/worker/canal/subject.rs index 630087144..4b0047325 100644 --- a/crates/app/src/worker/canal/subject.rs +++ b/crates/app/src/worker/canal/subject.rs @@ -1,39 +1,46 @@ -use anyhow::Context; -use serde::Deserialize; - -use super::search_event::SearchDispatcher; -use super::search_event::{self}; - -#[derive(Debug, Deserialize)] -struct SubjectKey { - subject_id: u32, -} - -#[derive(Debug, Deserialize)] -struct SubjectFieldKey { - field_sid: u32, -} - -pub async fn on_subject(search: &SearchDispatcher, key: &[u8], op: &str) -> anyhow::Result<()> { - let key: SubjectKey = serde_json::from_slice(key).context("parse subject key")?; - on_subject_change(search, key.subject_id, op).await?; - Ok(()) -} - -pub async fn on_subject_field( - search: &SearchDispatcher, - key: &[u8], - op: &str, -) -> anyhow::Result<()> { - let key: SubjectFieldKey = serde_json::from_slice(key).context("parse subject field key")?; - on_subject_change(search, key.field_sid, op).await?; - Ok(()) -} - -async fn on_subject_change( - search: &SearchDispatcher, - subject_id: u32, - op: &str, -) -> anyhow::Result<()> { - search.dispatch(search_event::TARGET_SUBJECT, subject_id, op).await -} +use anyhow::Context; +use serde::Deserialize; + +use super::search_event::SearchDispatcher; +use super::search_event::{self}; + +#[derive(Debug, Deserialize)] +struct SubjectKey { + subject_id: u32, +} + +#[derive(Debug, Deserialize)] +struct SubjectFieldKey { + field_sid: u32, +} + +pub async fn on_subject( + search: &SearchDispatcher, + key: &[u8], + op: &str, +) -> anyhow::Result<()> { + let key: SubjectKey = serde_json::from_slice(key).context("parse subject key")?; + on_subject_change(search, key.subject_id, op).await?; + Ok(()) +} + +pub async fn on_subject_field( + search: &SearchDispatcher, + key: &[u8], + op: &str, +) -> anyhow::Result<()> { + let key: SubjectFieldKey = + serde_json::from_slice(key).context("parse subject field key")?; + on_subject_change(search, key.field_sid, op).await?; + Ok(()) +} + +async fn on_subject_change( + search: &SearchDispatcher, + subject_id: u32, + op: &str, +) -> anyhow::Result<()> { + search + .dispatch(search_event::TARGET_SUBJECT, subject_id, op) + .await +} diff --git a/crates/app/src/worker/canal/types.rs b/crates/app/src/worker/canal/types.rs index 3b38a630c..6107cb1f5 100644 --- a/crates/app/src/worker/canal/types.rs +++ b/crates/app/src/worker/canal/types.rs @@ -1,20 +1,20 @@ -use serde::Deserialize; -use serde_json::Value; - -pub const OP_CREATE: &str = "c"; -pub const OP_DELETE: &str = "d"; -pub const OP_UPDATE: &str = "u"; -pub const OP_SNAPSHOT: &str = "r"; - -#[derive(Debug, Deserialize)] -pub struct DebeziumPayload { - pub before: Option, - pub after: Option, - pub source: DebeziumSource, - pub op: String, -} - -#[derive(Debug, Deserialize)] -pub struct DebeziumSource { - pub table: String, -} +use serde::Deserialize; +use serde_json::Value; + +pub const OP_CREATE: &str = "c"; +pub const OP_DELETE: &str = "d"; +pub const OP_UPDATE: &str = "u"; +pub const OP_SNAPSHOT: &str = "r"; + +#[derive(Debug, Deserialize)] +pub struct DebeziumPayload { + pub before: Option, + pub after: Option, + pub source: DebeziumSource, + pub op: String, +} + +#[derive(Debug, Deserialize)] +pub struct DebeziumSource { + pub table: String, +} diff --git a/crates/app/src/worker/canal/user.rs b/crates/app/src/worker/canal/user.rs index ec614952e..8e06a6afe 100644 --- a/crates/app/src/worker/canal/user.rs +++ b/crates/app/src/worker/canal/user.rs @@ -1,293 +1,312 @@ -use anyhow::Context; -use common::config::AppConfig; -use opendal::services; -use opendal::Operator; -use serde::Deserialize; -use serde_json::Value; -use std::collections::HashSet; - -use super::types::OP_UPDATE; - -#[derive(Debug, Deserialize)] -struct UserKey { - uid: u32, -} - -pub struct UserDispatcher { - pool: sqlx::MySqlPool, - redis: redis::Client, - s3: Option, -} - -#[derive(Debug, serde::Serialize)] -struct RedisUserChannel { - user_id: u32, - new_notify: u16, -} - -impl UserDispatcher { - pub fn new(cfg: &AppConfig, pool: sqlx::MySqlPool) -> anyhow::Result { - let redis = redis::Client::open(cfg.redis_url.as_str()).context("create redis client")?; - let s3 = build_s3_operator(cfg)?; - Ok(Self { pool, redis, s3 }) - } - - pub async fn on_user( - &self, - key: &[u8], - op: &str, - before: Option, - after: Option, - ) -> anyhow::Result<()> { - let key: UserKey = serde_json::from_slice(key).context("parse user key")?; - self.on_user_change(key.uid, op, before, after).await - } - - async fn on_user_change( - &self, - user_id: u32, - op: &str, - before: Option, - after: Option, - ) -> anyhow::Result<()> { - if op != OP_UPDATE { - return Ok(()); - } - - let (Some(before), Some(after)) = (before, after) else { - return Ok(()); - }; - - let old_password = before.get("password_crypt").and_then(Value::as_str); - let new_password = after.get("password_crypt").and_then(Value::as_str); - - if old_password != new_password { - self.revoke_user_sessions(user_id).await?; - tracing::info!(user_id, "password changed, sessions revoked"); - } - - let old_notify = before.get("new_notify").and_then(Value::as_u64); - let new_notify = after.get("new_notify").and_then(Value::as_u64); - if old_notify != new_notify { - let notify = new_notify.unwrap_or_default() as u16; - self.publish_notify_change(user_id, notify).await?; - tracing::info!(user_id, new_notify = notify, "new notify changed, published redis event"); - } - - let old_avatar = before.get("avatar").and_then(Value::as_str); - let new_avatar = after.get("avatar").and_then(Value::as_str); - if old_avatar != new_avatar { - if let (Some(s3), Some(avatar)) = (&self.s3, new_avatar) { - tracing::debug!(user_id, avatar, "avatar changed, clear image cache in background"); - let s3 = s3.clone(); - let avatar = avatar.to_owned(); - tokio::spawn(async move { - if let Err(err) = clear_image_cache(s3, avatar).await { - tracing::error!(error = ?err, error_chain = %format!("{err:#}"), "failed to clear s3 cached image"); - } - }); - } - } - - Ok(()) - } - - async fn revoke_user_sessions(&self, user_id: u32) -> anyhow::Result<()> { - let now = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .context("system time before unix epoch")? - .as_secs() as i64; - - let rows = sqlx::query_as::<_, (String,)>( - r#"SELECT `key` FROM chii_os_web_sessions WHERE user_id = ?"#, - ) - .bind(user_id) - .fetch_all(&self.pool) - .await - .context("load user sessions")?; - - sqlx::query( - r#"UPDATE chii_os_web_sessions SET expired_at = ? WHERE user_id = ?"#, - ) - .bind(now) - .bind(user_id) - .execute(&self.pool) - .await - .context("revoke user sessions in mysql")?; - - if rows.is_empty() { - return Ok(()); - } - - let mut conn = self - .redis - .get_multiplexed_async_connection() - .await - .context("get redis connection")?; - - let mut cmd = redis::cmd("DEL"); - for (key,) in rows { - cmd.arg(format!("chii:web:session:{key}")); - } - let _: i64 = cmd.query_async(&mut conn).await.context("delete redis sessions")?; - - Ok(()) - } - - async fn publish_notify_change(&self, user_id: u32, new_notify: u16) -> anyhow::Result<()> { - let message = serde_json::to_string(&RedisUserChannel { - user_id, - new_notify, - }) - .context("encode redis user notify message")?; - - let mut conn = self - .redis - .get_multiplexed_async_connection() - .await - .context("get redis connection")?; - - let channel = format!("event-user-notify-{user_id}"); - let _: i64 = redis::cmd("PUBLISH") - .arg(channel) - .arg(message) - .query_async(&mut conn) - .await - .context("publish user notify event")?; - - Ok(()) - } -} - -fn build_s3_operator(cfg: &AppConfig) -> anyhow::Result> { - if cfg.s3_entry_point.is_empty() || cfg.s3_access_key.is_empty() || cfg.s3_secret_key.is_empty() - { - return Ok(None); - } - - let builder = services::S3::default() - .root("/") - .endpoint(&cfg.s3_entry_point) - .access_key_id(&cfg.s3_access_key) - .secret_access_key(&cfg.s3_secret_key) - .bucket(&cfg.s3_image_resize_bucket); - - let builder = if let Some(region) = &cfg.s3_region { - builder.region(region) - } else { - builder - }; - - let operator = Operator::new(builder) - .context("create s3 operator")? - .finish(); - - Ok(Some(operator)) -} - -async fn clear_image_cache(s3: Operator, avatar: String) -> anyhow::Result<()> { - let (path, query) = avatar - .split_once('?') - .map_or((avatar.as_str(), ""), |(path, query)| (path, query)); - - let mut prefix = format!("/pic/user/l/{path}"); - if query.contains("hd=1") { - prefix = format!("/hd{prefix}"); - } - - tracing::debug!(avatar, prefix, "clear image cache by prefix"); - - let mut keys: HashSet = HashSet::new(); - - for candidate_prefix in prefix_candidates(&prefix) { - let dir = prefix_dirname(&candidate_prefix); - - let entries = match tokio::time::timeout( - std::time::Duration::from_secs(10), - s3.list(&dir), - ) - .await - { - Ok(Ok(entries)) => entries, - Ok(Err(err)) => { - tracing::warn!( - prefix = candidate_prefix, - dir, - error = ?err, - "failed to list cached avatar objects by dirname" - ); - continue; - } - Err(_) => { - tracing::warn!( - prefix = candidate_prefix, - dir, - "timeout while listing cached avatar objects by dirname" - ); - continue; - } - }; - - for entry in entries { - let path = entry.path(); - if path.starts_with(candidate_prefix.as_str()) { - keys.insert(path.to_string()); - } - } - - if !keys.is_empty() { - break; - } - } - - for key in keys { - if let Err(err) = s3.delete(&key).await { - tracing::error!( - key, - error = ?err, - "failed to delete cached avatar object" - ); - } - } - - Ok(()) -} - -fn prefix_candidates(prefix: &str) -> Vec { - if let Some(stripped) = prefix.strip_prefix('/') { - vec![prefix.to_owned(), stripped.to_owned()] - } else { - vec![prefix.to_owned()] - } -} - -fn prefix_dirname(prefix: &str) -> String { - match prefix.rsplit_once('/') { - Some((dir, _)) if !dir.is_empty() => format!("{dir}/"), - Some(_) => "/".to_string(), - None => "/".to_string(), - } -} - -#[cfg(test)] -mod tests { - use super::{prefix_candidates, prefix_dirname}; - - #[test] - fn test_prefix_dirname() { - assert_eq!(prefix_dirname("/pic/user/l/a.jpg"), "/pic/user/l/"); - assert_eq!(prefix_dirname("pic/user/l/a.jpg"), "pic/user/l/"); - assert_eq!(prefix_dirname("/a.jpg"), "/"); - assert_eq!(prefix_dirname("a.jpg"), "/"); - } - - #[test] - fn test_prefix_candidates() { - assert_eq!( - prefix_candidates("/pic/user/l/a.jpg"), - vec!["/pic/user/l/a.jpg".to_string(), "pic/user/l/a.jpg".to_string()] - ); - assert_eq!(prefix_candidates("pic/user/l/a.jpg"), vec!["pic/user/l/a.jpg".to_string()]); - } - -} +use anyhow::Context; +use common::config::AppConfig; +use opendal::services; +use opendal::Operator; +use serde::Deserialize; +use serde_json::Value; +use std::collections::HashSet; + +use super::types::OP_UPDATE; + +#[derive(Debug, Deserialize)] +struct UserKey { + uid: u32, +} + +pub struct UserDispatcher { + pool: sqlx::MySqlPool, + redis: redis::Client, + s3: Option, +} + +#[derive(Debug, serde::Serialize)] +struct RedisUserChannel { + user_id: u32, + new_notify: u16, +} + +impl UserDispatcher { + pub fn new(cfg: &AppConfig, pool: sqlx::MySqlPool) -> anyhow::Result { + let redis = + redis::Client::open(cfg.redis_url.as_str()).context("create redis client")?; + let s3 = build_s3_operator(cfg)?; + Ok(Self { pool, redis, s3 }) + } + + pub async fn on_user( + &self, + key: &[u8], + op: &str, + before: Option, + after: Option, + ) -> anyhow::Result<()> { + let key: UserKey = serde_json::from_slice(key).context("parse user key")?; + self.on_user_change(key.uid, op, before, after).await + } + + async fn on_user_change( + &self, + user_id: u32, + op: &str, + before: Option, + after: Option, + ) -> anyhow::Result<()> { + if op != OP_UPDATE { + return Ok(()); + } + + let (Some(before), Some(after)) = (before, after) else { + return Ok(()); + }; + + let old_password = before.get("password_crypt").and_then(Value::as_str); + let new_password = after.get("password_crypt").and_then(Value::as_str); + + if old_password != new_password { + self.revoke_user_sessions(user_id).await?; + tracing::info!(user_id, "password changed, sessions revoked"); + } + + let old_notify = before.get("new_notify").and_then(Value::as_u64); + let new_notify = after.get("new_notify").and_then(Value::as_u64); + if old_notify != new_notify { + let notify = new_notify.unwrap_or_default() as u16; + self.publish_notify_change(user_id, notify).await?; + tracing::info!( + user_id, + new_notify = notify, + "new notify changed, published redis event" + ); + } + + let old_avatar = before.get("avatar").and_then(Value::as_str); + let new_avatar = after.get("avatar").and_then(Value::as_str); + if old_avatar != new_avatar { + if let (Some(s3), Some(avatar)) = (&self.s3, new_avatar) { + tracing::debug!( + user_id, + avatar, + "avatar changed, clear image cache in background" + ); + let s3 = s3.clone(); + let avatar = avatar.to_owned(); + tokio::spawn(async move { + if let Err(err) = clear_image_cache(s3, avatar).await { + tracing::error!(error = ?err, error_chain = %format!("{err:#}"), "failed to clear s3 cached image"); + } + }); + } + } + + Ok(()) + } + + async fn revoke_user_sessions(&self, user_id: u32) -> anyhow::Result<()> { + let now = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .context("system time before unix epoch")? + .as_secs() as i64; + + let rows = sqlx::query_as::<_, (String,)>( + r#"SELECT `key` FROM chii_os_web_sessions WHERE user_id = ?"#, + ) + .bind(user_id) + .fetch_all(&self.pool) + .await + .context("load user sessions")?; + + sqlx::query(r#"UPDATE chii_os_web_sessions SET expired_at = ? WHERE user_id = ?"#) + .bind(now) + .bind(user_id) + .execute(&self.pool) + .await + .context("revoke user sessions in mysql")?; + + if rows.is_empty() { + return Ok(()); + } + + let mut conn = self + .redis + .get_multiplexed_async_connection() + .await + .context("get redis connection")?; + + let mut cmd = redis::cmd("DEL"); + for (key,) in rows { + cmd.arg(format!("chii:web:session:{key}")); + } + let _: i64 = cmd + .query_async(&mut conn) + .await + .context("delete redis sessions")?; + + Ok(()) + } + + async fn publish_notify_change( + &self, + user_id: u32, + new_notify: u16, + ) -> anyhow::Result<()> { + let message = serde_json::to_string(&RedisUserChannel { + user_id, + new_notify, + }) + .context("encode redis user notify message")?; + + let mut conn = self + .redis + .get_multiplexed_async_connection() + .await + .context("get redis connection")?; + + let channel = format!("event-user-notify-{user_id}"); + let _: i64 = redis::cmd("PUBLISH") + .arg(channel) + .arg(message) + .query_async(&mut conn) + .await + .context("publish user notify event")?; + + Ok(()) + } +} + +fn build_s3_operator(cfg: &AppConfig) -> anyhow::Result> { + if cfg.s3_entry_point.is_empty() + || cfg.s3_access_key.is_empty() + || cfg.s3_secret_key.is_empty() + { + return Ok(None); + } + + let builder = services::S3::default() + .root("/") + .endpoint(&cfg.s3_entry_point) + .access_key_id(&cfg.s3_access_key) + .secret_access_key(&cfg.s3_secret_key) + .bucket(&cfg.s3_image_resize_bucket); + + let builder = if let Some(region) = &cfg.s3_region { + builder.region(region) + } else { + builder + }; + + let operator = Operator::new(builder) + .context("create s3 operator")? + .finish(); + + Ok(Some(operator)) +} + +async fn clear_image_cache(s3: Operator, avatar: String) -> anyhow::Result<()> { + let (path, query) = avatar + .split_once('?') + .map_or((avatar.as_str(), ""), |(path, query)| (path, query)); + + let mut prefix = format!("/pic/user/l/{path}"); + if query.contains("hd=1") { + prefix = format!("/hd{prefix}"); + } + + tracing::debug!(avatar, prefix, "clear image cache by prefix"); + + let mut keys: HashSet = HashSet::new(); + + for candidate_prefix in prefix_candidates(&prefix) { + let dir = prefix_dirname(&candidate_prefix); + + let entries = + match tokio::time::timeout(std::time::Duration::from_secs(10), s3.list(&dir)) + .await + { + Ok(Ok(entries)) => entries, + Ok(Err(err)) => { + tracing::warn!( + prefix = candidate_prefix, + dir, + error = ?err, + "failed to list cached avatar objects by dirname" + ); + continue; + } + Err(_) => { + tracing::warn!( + prefix = candidate_prefix, + dir, + "timeout while listing cached avatar objects by dirname" + ); + continue; + } + }; + + for entry in entries { + let path = entry.path(); + if path.starts_with(candidate_prefix.as_str()) { + keys.insert(path.to_string()); + } + } + + if !keys.is_empty() { + break; + } + } + + for key in keys { + if let Err(err) = s3.delete(&key).await { + tracing::error!( + key, + error = ?err, + "failed to delete cached avatar object" + ); + } + } + + Ok(()) +} + +fn prefix_candidates(prefix: &str) -> Vec { + if let Some(stripped) = prefix.strip_prefix('/') { + vec![prefix.to_owned(), stripped.to_owned()] + } else { + vec![prefix.to_owned()] + } +} + +fn prefix_dirname(prefix: &str) -> String { + match prefix.rsplit_once('/') { + Some((dir, _)) if !dir.is_empty() => format!("{dir}/"), + Some(_) => "/".to_string(), + None => "/".to_string(), + } +} + +#[cfg(test)] +mod tests { + use super::{prefix_candidates, prefix_dirname}; + + #[test] + fn test_prefix_dirname() { + assert_eq!(prefix_dirname("/pic/user/l/a.jpg"), "/pic/user/l/"); + assert_eq!(prefix_dirname("pic/user/l/a.jpg"), "pic/user/l/"); + assert_eq!(prefix_dirname("/a.jpg"), "/"); + assert_eq!(prefix_dirname("a.jpg"), "/"); + } + + #[test] + fn test_prefix_candidates() { + assert_eq!( + prefix_candidates("/pic/user/l/a.jpg"), + vec![ + "/pic/user/l/a.jpg".to_string(), + "pic/user/l/a.jpg".to_string() + ] + ); + assert_eq!( + prefix_candidates("pic/user/l/a.jpg"), + vec!["pic/user/l/a.jpg".to_string()] + ); + } +} diff --git a/crates/app/src/worker/mod.rs b/crates/app/src/worker/mod.rs index 199a6588c..1096e2834 100644 --- a/crates/app/src/worker/mod.rs +++ b/crates/app/src/worker/mod.rs @@ -1,2 +1,2 @@ -pub mod canal; -pub mod timeline; +pub mod canal; +pub mod timeline; diff --git a/crates/app/src/worker/timeline.rs b/crates/app/src/worker/timeline.rs index 655568bf6..dc461e8ff 100644 --- a/crates/app/src/worker/timeline.rs +++ b/crates/app/src/worker/timeline.rs @@ -1,22 +1,22 @@ -use anyhow::Context; -use common::config::{build_kafka_client_config, build_mysql_pool, AppConfig}; -use rdkafka::producer::FutureProducer; - -const TIMELINE_TOPIC: &str = "timeline"; - -pub async fn run() -> anyhow::Result<()> { - let cfg = AppConfig::from_env("timeline-worker")?; - let _mysql = build_mysql_pool(&cfg).await?; - - let _producer: FutureProducer = build_kafka_client_config(&cfg) - .create() - .context("create kafka producer")?; - - tracing::info!(topic = TIMELINE_TOPIC, "timeline worker started"); - - tokio::signal::ctrl_c().await.context("wait ctrl-c")?; - - tracing::info!("timeline worker shutdown"); - - Ok(()) -} +use anyhow::Context; +use common::config::{build_kafka_client_config, build_mysql_pool, AppConfig}; +use rdkafka::producer::FutureProducer; + +const TIMELINE_TOPIC: &str = "timeline"; + +pub async fn run() -> anyhow::Result<()> { + let cfg = AppConfig::from_env("timeline-worker")?; + let _mysql = build_mysql_pool(&cfg).await?; + + let _producer: FutureProducer = build_kafka_client_config(&cfg) + .create() + .context("create kafka producer")?; + + tracing::info!(topic = TIMELINE_TOPIC, "timeline worker started"); + + tokio::signal::ctrl_c().await.context("wait ctrl-c")?; + + tracing::info!("timeline worker shutdown"); + + Ok(()) +} diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 965f1fc0b..9cd7666ca 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -11,8 +11,8 @@ tracing.workspace = true tracing-subscriber.workspace = true sqlx.workspace = true -[target.'cfg(unix)'.dependencies] -rdkafka = { workspace = true, features = ["cmake-build", "tokio", "ssl-vendored"] } +[target.'cfg(windows)'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build"] } -[target.'cfg(not(unix))'.dependencies] -rdkafka = { workspace = true, features = ["cmake-build", "tokio"] } +[target.'cfg(unix)'.dependencies] +rdkafka = { workspace = true, features = ["cmake-build", "static-linking", "ssl-vendored", "curl-static", "libz-static"] } diff --git a/crates/php-serialize/src/lib.rs b/crates/php-serialize/src/lib.rs index 8be6bee2f..87f793cc2 100644 --- a/crates/php-serialize/src/lib.rs +++ b/crates/php-serialize/src/lib.rs @@ -1,56 +1,56 @@ -use serde::de::DeserializeOwned; - -pub use serde_php::Error; -pub use serde_php::Result; - -pub fn from_bytes(input: &[u8]) -> Result { - serde_php::from_bytes(input) -} - -pub fn from_str(input: &str) -> Result { - from_bytes(input.as_bytes()) -} - -#[cfg(test)] -mod tests { - use serde::Deserialize; - - use super::from_str; - - #[derive(Debug, Deserialize, PartialEq, Eq)] - struct TagItem { - tag_name: Option, - result: String, - } - - #[test] - fn deserialize_vec_struct() { - let raw = "a:3:{i:0;a:2:{s:8:\"tag_name\";s:6:\"动画\";s:6:\"result\";s:1:\"2\";}i:1;a:2:{s:8:\"tag_name\";N;s:6:\"result\";s:1:\"1\";}i:2;a:2:{s:8:\"tag_name\";s:2:\"TV\";s:6:\"result\";s:1:\"1\";}}"; - - let actual: Vec = from_str(raw).expect("deserialize php serialized vec"); - assert_eq!( - actual, - vec![ - TagItem { - tag_name: Some("动画".to_string()), - result: "2".to_string(), - }, - TagItem { - tag_name: None, - result: "1".to_string(), - }, - TagItem { - tag_name: Some("TV".to_string()), - result: "1".to_string(), - }, - ] - ); - } - - #[test] - fn deserialize_scalar_string() { - let raw = "s:6:\"动画\";"; - let value: String = from_str(raw).expect("deserialize php string"); - assert_eq!(value, "动画"); - } -} +use serde::de::DeserializeOwned; + +pub use serde_php::Error; +pub use serde_php::Result; + +pub fn from_bytes(input: &[u8]) -> Result { + serde_php::from_bytes(input) +} + +pub fn from_str(input: &str) -> Result { + from_bytes(input.as_bytes()) +} + +#[cfg(test)] +mod tests { + use serde::Deserialize; + + use super::from_str; + + #[derive(Debug, Deserialize, PartialEq, Eq)] + struct TagItem { + tag_name: Option, + result: String, + } + + #[test] + fn deserialize_vec_struct() { + let raw = "a:3:{i:0;a:2:{s:8:\"tag_name\";s:6:\"动画\";s:6:\"result\";s:1:\"2\";}i:1;a:2:{s:8:\"tag_name\";N;s:6:\"result\";s:1:\"1\";}i:2;a:2:{s:8:\"tag_name\";s:2:\"TV\";s:6:\"result\";s:1:\"1\";}}"; + + let actual: Vec = from_str(raw).expect("deserialize php serialized vec"); + assert_eq!( + actual, + vec![ + TagItem { + tag_name: Some("动画".to_string()), + result: "2".to_string(), + }, + TagItem { + tag_name: None, + result: "1".to_string(), + }, + TagItem { + tag_name: Some("TV".to_string()), + result: "1".to_string(), + }, + ] + ); + } + + #[test] + fn deserialize_scalar_string() { + let raw = "s:6:\"动画\";"; + let value: String = from_str(raw).expect("deserialize php string"); + assert_eq!(value, "动画"); + } +} From a3bc0f8e354d8cac6216d4360dcffa420ad4aac6 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 20 Feb 2026 00:05:57 +0800 Subject: [PATCH 3/7] up --- Cargo.lock | 198 +++++ crates/app/Cargo.toml | 8 + crates/app/src/lib.rs | 2 + crates/app/src/main.rs | 8 +- crates/app/src/server/characters.rs | 712 ++++++++++++++++++ crates/app/src/server/media.rs | 141 ++++ crates/app/src/server/mod.rs | 870 ++++++++++++++++++++++ crates/app/src/server/persons.rs | 637 ++++++++++++++++ crates/app/src/server/subjects.rs | 1029 ++++++++++++++++++++++++++ crates/app/src/server/test_mocks.rs | 15 + crates/app/src/server/users.rs | 447 +++++++++++ crates/app/tests/common/mod.rs | 66 ++ crates/app/tests/server_real_deps.rs | 260 +++++++ crates/app/tests/server_smoke.rs | 235 ++++++ docs/rust-migration-plan.md | 15 +- docs/rust-workspace.md | 33 + 16 files changed, 4667 insertions(+), 9 deletions(-) create mode 100644 crates/app/src/lib.rs create mode 100644 crates/app/src/server/characters.rs create mode 100644 crates/app/src/server/media.rs create mode 100644 crates/app/src/server/mod.rs create mode 100644 crates/app/src/server/persons.rs create mode 100644 crates/app/src/server/subjects.rs create mode 100644 crates/app/src/server/test_mocks.rs create mode 100644 crates/app/src/server/users.rs create mode 100644 crates/app/tests/common/mod.rs create mode 100644 crates/app/tests/server_real_deps.rs create mode 100644 crates/app/tests/server_smoke.rs diff --git a/Cargo.lock b/Cargo.lock index 6efb08847..258b9f21b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -87,10 +87,13 @@ name = "app" version = "0.1.0" dependencies = [ "anyhow", + "async-trait", + "axum", "bangumi-wiki-parser", "clap", "common", "meilisearch-sdk", + "mockall", "opendal", "php-serialize", "rdkafka", @@ -99,7 +102,10 @@ dependencies = [ "serde_json", "sqlx", "tokio", + "tower", "tracing", + "utoipa", + "uuid", ] [[package]] @@ -143,6 +149,61 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "backon" version = "1.6.0" @@ -525,6 +586,12 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "downcast" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" + [[package]] name = "either" version = "1.15.0" @@ -594,6 +661,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fragile" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619" + [[package]] name = "futures" version = "0.3.32" @@ -884,6 +957,12 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + [[package]] name = "hyper" version = "1.8.1" @@ -898,6 +977,7 @@ dependencies = [ "http", "http-body", "httparse", + "httpdate", "itoa", "pin-project-lite", "pin-utils", @@ -1286,6 +1366,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "maybe-uninit" version = "2.0.0" @@ -1348,6 +1434,12 @@ version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "mio" version = "1.1.1" @@ -1359,6 +1451,32 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "mockall" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "mockall_derive", + "predicates", + "predicates-tree", +] + +[[package]] +name = "mockall_derive" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.116", +] + [[package]] name = "nom" version = "8.0.0" @@ -1664,6 +1782,32 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "predicates" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe" +dependencies = [ + "anstyle", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144" + +[[package]] +name = "predicates-tree" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2" +dependencies = [ + "predicates-core", + "termtree", +] + [[package]] name = "prettyplease" version = "0.2.37" @@ -1916,6 +2060,18 @@ dependencies = [ "bitflags", ] +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + [[package]] name = "regex-automata" version = "0.4.14" @@ -2165,6 +2321,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + [[package]] name = "serde_php" version = "0.5.0" @@ -2526,6 +2693,12 @@ dependencies = [ "syn 2.0.116", ] +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + [[package]] name = "thiserror" version = "1.0.69" @@ -2744,6 +2917,7 @@ dependencies = [ "tokio", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -2932,6 +3106,30 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "utoipa" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fcc29c80c21c31608227e0912b2d7fddba57ad76b606890627ba8ee7964e993" +dependencies = [ + "indexmap", + "serde", + "serde_json", + "utoipa-gen", +] + +[[package]] +name = "utoipa-gen" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d79d08d92ab8af4c5e8a6da20c47ae3f61a0f1dabc1997cdf2d082b757ca08b" +dependencies = [ + "proc-macro2", + "quote", + "regex", + "syn 2.0.116", +] + [[package]] name = "uuid" version = "1.21.0" diff --git a/crates/app/Cargo.toml b/crates/app/Cargo.toml index fc02f7ded..939237b92 100644 --- a/crates/app/Cargo.toml +++ b/crates/app/Cargo.toml @@ -17,6 +17,10 @@ meilisearch-sdk.workspace = true opendal.workspace = true bangumi-wiki-parser.workspace = true php-serialize.workspace = true +axum = "0.7" +uuid = { version = "1", features = ["v7"] } +utoipa = { version = "5", features = ["axum_extras"] } +async-trait = "0.1" common = { path = "../common" } [target.'cfg(windows)'.dependencies] @@ -24,3 +28,7 @@ rdkafka = { workspace = true, features = ["cmake-build"] } [target.'cfg(unix)'.dependencies] rdkafka = { workspace = true, features = ["cmake-build", "static-linking", "ssl-vendored", "curl-static", "libz-static"] } + +[dev-dependencies] +tower = "0.5" +mockall = "0.13" diff --git a/crates/app/src/lib.rs b/crates/app/src/lib.rs new file mode 100644 index 000000000..b9c8060a1 --- /dev/null +++ b/crates/app/src/lib.rs @@ -0,0 +1,2 @@ +pub mod server; +pub mod worker; diff --git a/crates/app/src/main.rs b/crates/app/src/main.rs index af2e735c8..2246998a2 100644 --- a/crates/app/src/main.rs +++ b/crates/app/src/main.rs @@ -1,8 +1,7 @@ use anyhow::Context; +use app::{server, worker}; use clap::{Parser, Subcommand}; -mod worker; - #[derive(Debug, Parser)] #[command(name = "app")] struct Cli { @@ -41,8 +40,5 @@ async fn main() -> anyhow::Result<()> { } async fn run_server() -> anyhow::Result<()> { - tracing::info!("server subcommand placeholder is ready"); - tokio::signal::ctrl_c().await.context("wait ctrl-c")?; - tracing::info!("server placeholder shutdown"); - Ok(()) + server::run().await.context("run server") } diff --git a/crates/app/src/server/characters.rs b/crates/app/src/server/characters.rs new file mode 100644 index 000000000..d30b1ff9e --- /dev/null +++ b/crates/app/src/server/characters.rs @@ -0,0 +1,712 @@ +use async_trait::async_trait; +use axum::{ + extract::{Extension, Path, Query, State}, + http::StatusCode, + response::Redirect, + Json, +}; +use serde::{Deserialize, Serialize}; +use sqlx::QueryBuilder; +use std::collections::HashMap; +use utoipa::ToSchema; + +#[cfg(test)] +use mockall::automock; + +use super::media::{ + person_image, select_person_image_url, select_subject_image_url, PersonImages, + DEFAULT_IMAGE_URL, +}; +use super::{ + character_staff_string, execute_search, join_filter, parse_page, search_total, + user_id_from_auth, ApiResult, AppState, MySqlExecutor, PageInfo, PageQuery, + RequestAuth, +}; + +#[derive(Debug, Deserialize, Default, ToSchema)] +pub(super) struct CharacterReq { + keyword: String, + #[serde(default)] + filter: CharacterFilter, +} + +#[derive(Debug, Deserialize, Default, ToSchema)] +pub(super) struct CharacterFilter { + nsfw: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +struct SearchHit { + id: u32, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct CharacterDoc { + id: u32, + name: String, + #[serde(rename = "type")] + role: u8, + summary: String, + locked: bool, + #[serde(default)] + images: PersonImages, + stat: Stat, + gender: Option, + blood_type: Option, + birth_year: Option, + birth_mon: Option, + birth_day: Option, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct Stat { + comments: u32, + collects: u32, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct CharacterSearchResponse { + #[serde(flatten)] + page: PageInfo, + data: Vec, +} + +#[derive(sqlx::FromRow)] +struct CharacterRow { + crt_id: u32, + crt_name: String, + crt_role: u8, + crt_summary: String, + crt_img: String, + crt_comment: u32, + crt_collects: u32, + crt_lock: i8, + gender: Option, + bloodtype: Option, + birth_year: Option, + birth_mon: Option, + birth_day: Option, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct CharacterRelatedSubject { + id: u32, + #[serde(rename = "type")] + subject_type: u8, + staff: String, + eps: String, + name: String, + name_cn: String, + image: String, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct CharacterRelatedPerson { + id: u32, + name: String, + #[serde(rename = "type")] + person_type: u8, + images: PersonImages, + subject_id: u32, + subject_type: u8, + subject_name: String, + subject_name_cn: String, + staff: String, +} + +#[derive(sqlx::FromRow)] +struct RelatedSubjectRow { + subject_id: u32, + subject_type: u8, + subject_name: String, + subject_name_cn: String, + subject_image: String, + relation_type: u8, + eps: String, +} + +#[derive(sqlx::FromRow)] +struct RelatedPersonRow { + person_id: u32, + person_name: String, + person_type: u8, + person_img: String, + subject_id: u32, + subject_type: u8, + subject_name: String, + subject_name_cn: String, + relation_type: u8, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub(super) struct ImageQuery { + #[serde(rename = "type")] + image_type: String, +} + +#[cfg_attr(test, automock)] +#[async_trait] +pub(super) trait CharacterImageRepo: Send + Sync { + async fn find_character_image_path( + &self, + character_id: u32, + ) -> Result, super::ApiError>; +} + +struct DbCharacterImageRepo<'a> { + pool: &'a sqlx::MySqlPool, +} + +#[async_trait] +impl CharacterImageRepo for DbCharacterImageRepo<'_> { + async fn find_character_image_path( + &self, + character_id: u32, + ) -> Result, super::ApiError> { + sqlx::query_scalar::<_, String>( + "SELECT crt_img FROM chii_characters WHERE crt_redirect = 0 AND crt_id = ? LIMIT 1", + ) + .bind(character_id) + .fetch_optional(self.pool) + .await + .map_err(|_| super::ApiError::internal("load character image failed")) + } +} + +#[utoipa::path( + post, + path = "/v0/search/characters", + request_body = CharacterReq, + params(PageQuery), + responses( + (status = 200, description = "返回搜索结果", body = CharacterSearchResponse), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn search_characters( + State(state): State, + Extension(auth): Extension, + page: Query, + Json(body): Json, +) -> ApiResult { + let (limit, offset) = parse_page(page); + let allow_nsfw = auth.allow_nsfw; + + let mut filters = Vec::new(); + if !allow_nsfw { + filters.push("nsfw = false".to_string()); + } else if let Some(v) = body.filter.nsfw { + filters.push(format!("nsfw = {v}")); + } + + let docs = execute_search::( + &state, + "characters", + &body.keyword, + limit, + offset, + join_filter(&filters), + None, + ) + .await?; + + let ids: Vec = docs.hits.iter().map(|x| x.result.id).collect(); + let data = load_characters(&state, &ids).await?; + + let total = search_total(&docs); + + Ok(Json(CharacterSearchResponse { + page: PageInfo::new(total, limit, offset), + data, + })) +} + +#[utoipa::path( + get, + path = "/v0/characters/{character_id}", + params(("character_id" = u32, Path, description = "角色 ID")), + responses( + (status = 200, description = "角色详情", body = CharacterDoc), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_character( + State(state): State, + Extension(auth): Extension, + Path(character_id): Path, +) -> ApiResult { + let allow_nsfw = auth.allow_nsfw; + + let mut qb = QueryBuilder::new( + "SELECT c.crt_id, c.crt_name, c.crt_role, c.crt_summary, c.crt_img, c.crt_comment, c.crt_collects, c.crt_lock, \ + f.gender, f.bloodtype, f.birth_year, f.birth_mon, f.birth_day \ + FROM chii_characters c \ + LEFT JOIN chii_person_fields f ON f.prsn_cat = 'crt' AND f.prsn_id = c.crt_id \ + WHERE c.crt_redirect = 0 AND c.crt_id = ", + ); + qb.push_bind(character_id); + if !allow_nsfw { + qb.push(" AND c.crt_nsfw = 0"); + } + qb.push(" LIMIT 1"); + + let row: Option = qb + .build_query_as() + .fetch_optional(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load character failed"))?; + + let row = row.ok_or_else(|| super::ApiError::not_found("character not found"))?; + Ok(Json(character_from_row(&row))) +} + +#[utoipa::path( + get, + path = "/v0/characters/{character_id}/image", + params( + ("character_id" = u32, Path, description = "角色 ID"), + ("type" = String, Query, description = "图片尺寸,可选值:small, grid, large, medium") + ), + responses( + (status = 302, description = "重定向到图片地址"), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_character_image( + State(state): State, + Path(character_id): Path, + Query(query): Query, +) -> Result { + let repo = DbCharacterImageRepo { pool: &state.pool }; + let image_url = + resolve_character_image_url(&repo, character_id, &query.image_type).await?; + Ok(Redirect::temporary(&image_url)) +} + +#[utoipa::path( + post, + path = "/v0/characters/{character_id}/collect", + params(("character_id" = u32, Path, description = "角色 ID")), + responses( + (status = 204, description = "收藏成功"), + (status = 401, description = "未授权", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn collect_character( + State(state): State, + Extension(auth): Extension, + Path(character_id): Path, +) -> Result { + let user_id = user_id_from_auth(auth)?; + collect_character_with_pool(&state.pool, user_id, character_id).await?; + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + delete, + path = "/v0/characters/{character_id}/collect", + params(("character_id" = u32, Path, description = "角色 ID")), + responses( + (status = 204, description = "取消收藏成功"), + (status = 401, description = "未授权", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn uncollect_character( + State(state): State, + Extension(auth): Extension, + Path(character_id): Path, +) -> Result { + let user_id = user_id_from_auth(auth)?; + uncollect_character_with_pool(&state.pool, user_id, character_id).await?; + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + get, + path = "/v0/characters/{character_id}/subjects", + params(("character_id" = u32, Path, description = "角色 ID")), + responses( + (status = 200, description = "角色关联条目", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_character_related_subjects( + State(state): State, + Path(character_id): Path, +) -> ApiResult> { + ensure_character_exists(&state.pool, character_id).await?; + + let rows: Vec = sqlx::query_as( + "SELECT i.subject_id, s.subject_type_id AS subject_type, s.subject_name, s.subject_name_cn, \ + s.subject_image, i.crt_type AS relation_type, i.crt_appear_eps AS eps \ + FROM chii_crt_subject_index i \ + JOIN chii_subjects s ON s.subject_id = i.subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE i.crt_id = ? AND s.subject_ban = 0 AND f.field_redirect = 0 \ + ORDER BY i.crt_order, s.subject_id", + ) + .bind(character_id) + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load related subjects failed"))?; + + let data = rows + .into_iter() + .map(|row| CharacterRelatedSubject { + id: row.subject_id, + subject_type: row.subject_type, + staff: character_staff_string(row.relation_type), + eps: row.eps, + name: row.subject_name, + name_cn: row.subject_name_cn, + image: select_subject_image_url(&row.subject_image, "large").unwrap_or_default(), + }) + .collect(); + + Ok(Json(data)) +} + +#[utoipa::path( + get, + path = "/v0/characters/{character_id}/persons", + params(("character_id" = u32, Path, description = "角色 ID")), + responses( + (status = 200, description = "角色关联人物", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_character_related_persons( + State(state): State, + Path(character_id): Path, +) -> ApiResult> { + ensure_character_exists(&state.pool, character_id).await?; + + let rows: Vec = sqlx::query_as( + "SELECT p.prsn_id AS person_id, p.prsn_name AS person_name, p.prsn_type AS person_type, p.prsn_img AS person_img, \ + s.subject_id, s.subject_type_id AS subject_type, s.subject_name, s.subject_name_cn, \ + COALESCE(si.crt_type, 0) AS relation_type \ + FROM chii_crt_cast_index ci \ + JOIN chii_persons p ON p.prsn_id = ci.prsn_id \ + JOIN chii_subjects s ON s.subject_id = ci.subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + LEFT JOIN chii_crt_subject_index si ON si.crt_id = ci.crt_id AND si.subject_id = ci.subject_id \ + WHERE ci.crt_id = ? AND p.prsn_redirect = 0 AND s.subject_ban = 0 AND f.field_redirect = 0 \ + ORDER BY s.subject_id, p.prsn_id", + ) + .bind(character_id) + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load related persons failed"))?; + + let data = rows + .into_iter() + .map(|row| CharacterRelatedPerson { + id: row.person_id, + name: row.person_name, + person_type: row.person_type, + images: person_image(&row.person_img), + subject_id: row.subject_id, + subject_type: row.subject_type, + subject_name: row.subject_name, + subject_name_cn: row.subject_name_cn, + staff: character_staff_string(row.relation_type), + }) + .collect(); + + Ok(Json(data)) +} + +async fn ensure_character_exists<'e>( + executor: impl MySqlExecutor<'e>, + character_id: u32, +) -> Result<(), super::ApiError> { + let exists = sqlx::query_scalar::<_, u32>( + "SELECT crt_id FROM chii_characters WHERE crt_redirect = 0 AND crt_id = ? LIMIT 1", + ) + .bind(character_id) + .fetch_optional(executor) + .await + .map_err(|_| super::ApiError::internal("load character failed"))? + .is_some(); + + if !exists { + return Err(super::ApiError::not_found("character not found")); + } + + Ok(()) +} + +pub(super) async fn collect_character_with_pool( + pool: &sqlx::MySqlPool, + user_id: u32, + character_id: u32, +) -> Result<(), super::ApiError> { + ensure_character_exists(pool, character_id).await?; + + let exists = sqlx::query_scalar::<_, u32>( + "SELECT prsn_clt_id FROM chii_person_collects WHERE prsn_clt_cat = 'crt' AND prsn_clt_uid = ? AND prsn_clt_mid = ? LIMIT 1", + ) + .bind(user_id) + .bind(character_id) + .fetch_optional(pool) + .await + .map_err(|_| super::ApiError::internal("query character collect failed"))? + .is_some(); + + if exists { + return Ok(()); + } + + sqlx::query( + "INSERT INTO chii_person_collects (prsn_clt_cat, prsn_clt_mid, prsn_clt_uid, prsn_clt_dateline) VALUES ('crt', ?, ?, UNIX_TIMESTAMP())", + ) + .bind(character_id) + .bind(user_id) + .execute(pool) + .await + .map_err(|_| super::ApiError::internal("add character collect failed"))?; + + Ok(()) +} + +pub(super) async fn uncollect_character_with_pool( + pool: &sqlx::MySqlPool, + user_id: u32, + character_id: u32, +) -> Result<(), super::ApiError> { + ensure_character_exists(pool, character_id).await?; + + let result = sqlx::query( + "DELETE FROM chii_person_collects WHERE prsn_clt_cat = 'crt' AND prsn_clt_uid = ? AND prsn_clt_mid = ?", + ) + .bind(user_id) + .bind(character_id) + .execute(pool) + .await + .map_err(|_| super::ApiError::internal("remove character collect failed"))?; + + if result.rows_affected() == 0 { + return Err(super::ApiError::not_found("character not collected")); + } + + Ok(()) +} + +#[cfg(test)] +#[allow(dead_code)] +pub(super) async fn collect_character_with_tx( + tx: &mut sqlx::Transaction<'_, sqlx::MySql>, + user_id: u32, + character_id: u32, +) -> Result<(), super::ApiError> { + ensure_character_exists(&mut **tx, character_id).await?; + + let exists = sqlx::query_scalar::<_, u32>( + "SELECT prsn_clt_id FROM chii_person_collects WHERE prsn_clt_cat = 'crt' AND prsn_clt_uid = ? AND prsn_clt_mid = ? LIMIT 1", + ) + .bind(user_id) + .bind(character_id) + .fetch_optional(&mut **tx) + .await + .map_err(|_| super::ApiError::internal("query character collect failed"))? + .is_some(); + + if exists { + return Ok(()); + } + + sqlx::query( + "INSERT INTO chii_person_collects (prsn_clt_cat, prsn_clt_mid, prsn_clt_uid, prsn_clt_dateline) VALUES ('crt', ?, ?, UNIX_TIMESTAMP())", + ) + .bind(character_id) + .bind(user_id) + .execute(&mut **tx) + .await + .map_err(|_| super::ApiError::internal("add character collect failed"))?; + + Ok(()) +} + +#[cfg(test)] +#[allow(dead_code)] +pub(super) async fn uncollect_character_with_tx( + tx: &mut sqlx::Transaction<'_, sqlx::MySql>, + user_id: u32, + character_id: u32, +) -> Result<(), super::ApiError> { + ensure_character_exists(&mut **tx, character_id).await?; + + let result = sqlx::query( + "DELETE FROM chii_person_collects WHERE prsn_clt_cat = 'crt' AND prsn_clt_uid = ? AND prsn_clt_mid = ?", + ) + .bind(user_id) + .bind(character_id) + .execute(&mut **tx) + .await + .map_err(|_| super::ApiError::internal("remove character collect failed"))?; + + if result.rows_affected() == 0 { + return Err(super::ApiError::not_found("character not collected")); + } + + Ok(()) +} + +async fn resolve_character_image_url( + repo: &impl CharacterImageRepo, + character_id: u32, + image_type: &str, +) -> Result { + let path = repo.find_character_image_path(character_id).await?; + let path = path.ok_or_else(|| super::ApiError::not_found("character not found"))?; + + let image_url = select_person_image_url(&path, image_type).ok_or_else(|| { + super::ApiError::bad_request(format!("bad image type: {image_type}")) + })?; + + if image_url.is_empty() { + return Ok(DEFAULT_IMAGE_URL.to_string()); + } + + Ok(image_url) +} + +async fn load_characters( + state: &AppState, + ids: &[u32], +) -> Result, super::ApiError> { + if ids.is_empty() { + return Ok(Vec::new()); + } + + let mut qb = QueryBuilder::new( + "SELECT c.crt_id, c.crt_name, c.crt_role, c.crt_summary, c.crt_img, c.crt_comment, c.crt_collects, c.crt_lock, \ + f.gender, f.bloodtype, f.birth_year, f.birth_mon, f.birth_day \ + FROM chii_characters c \ + LEFT JOIN chii_person_fields f ON f.prsn_cat = 'crt' AND f.prsn_id = c.crt_id \ + WHERE c.crt_redirect = 0 AND c.crt_id IN (", + ); + + { + let mut separated = qb.separated(", "); + for id in ids { + separated.push_bind(*id); + } + } + qb.push(")"); + + let rows: Vec = qb + .build_query_as() + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load characters failed"))?; + + let by_id: HashMap = + rows.into_iter().map(|x| (x.crt_id, x)).collect(); + + let mut result = Vec::with_capacity(ids.len()); + for id in ids { + if let Some(row) = by_id.get(id) { + result.push(character_from_row(row)); + } + } + + Ok(result) +} + +fn character_from_row(row: &CharacterRow) -> CharacterDoc { + CharacterDoc { + id: row.crt_id, + name: row.crt_name.clone(), + role: row.crt_role, + summary: row.crt_summary.clone(), + locked: row.crt_lock != 0, + images: person_image(&row.crt_img), + stat: Stat { + comments: row.crt_comment, + collects: row.crt_collects, + }, + gender: map_gender(row.gender), + blood_type: row.bloodtype.filter(|x| *x != 0), + birth_year: row.birth_year.filter(|x| *x != 0), + birth_mon: row.birth_mon.filter(|x| *x != 0), + birth_day: row.birth_day.filter(|x| *x != 0), + } +} + +fn map_gender(raw: Option) -> Option { + match raw { + Some(1) => Some("male".to_string()), + Some(2) => Some("female".to_string()), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::resolve_character_image_url; + use crate::server::test_mocks::MockPool; + use axum::http::StatusCode; + + #[tokio::test] + async fn resolve_character_image_url_returns_default_for_empty_path() { + let mut pool = MockPool::new(); + pool + .character_image_repo + .expect_find_character_image_path() + .withf(|character_id| *character_id == 8) + .times(1) + .returning(|_| Ok(Some(String::new()))); + + let got = resolve_character_image_url(&pool.character_image_repo, 8, "small") + .await + .expect("resolve image"); + + assert_eq!(got, "https://lain.bgm.tv/img/no_icon_subject.png"); + } + + #[tokio::test] + async fn resolve_character_image_url_returns_not_found_when_missing() { + let mut pool = MockPool::new(); + pool + .character_image_repo + .expect_find_character_image_path() + .withf(|character_id| *character_id == 404) + .times(1) + .returning(|_| Ok(None)); + + let err = resolve_character_image_url(&pool.character_image_repo, 404, "small") + .await + .expect_err("expect not found"); + + assert_eq!(err.status, StatusCode::NOT_FOUND); + assert_eq!(err.message, "character not found"); + } + + #[tokio::test] + async fn resolve_character_image_url_returns_bad_request_for_invalid_type() { + let mut pool = MockPool::new(); + pool + .character_image_repo + .expect_find_character_image_path() + .times(1) + .returning(|_| Ok(Some("ab/cd.jpg".to_string()))); + + let err = resolve_character_image_url(&pool.character_image_repo, 1, "invalid") + .await + .expect_err("expect bad request"); + + assert_eq!(err.status, StatusCode::BAD_REQUEST); + assert_eq!(err.message, "bad image type: invalid"); + } +} diff --git a/crates/app/src/server/media.rs b/crates/app/src/server/media.rs new file mode 100644 index 000000000..4e4cd213f --- /dev/null +++ b/crates/app/src/server/media.rs @@ -0,0 +1,141 @@ +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; + +pub(super) const DEFAULT_IMAGE_URL: &str = + "https://lain.bgm.tv/img/no_icon_subject.png"; + +#[derive(Debug, Clone, Default, Deserialize, Serialize, ToSchema)] +pub(super) struct PersonImages { + small: String, + grid: String, + large: String, + medium: String, +} + +#[derive(Debug, Clone, Default, Deserialize, Serialize, ToSchema)] +pub(super) struct SubjectImages { + small: String, + grid: String, + large: String, + medium: String, + common: String, +} + +pub(super) fn person_image(path: &str) -> PersonImages { + if path.is_empty() { + return PersonImages::default(); + } + + PersonImages { + large: format!("https://lain.bgm.tv/pic/crt/l/{path}"), + small: format!("https://lain.bgm.tv/r/100/pic/crt/l/{path}"), + grid: format!("https://lain.bgm.tv/pic/crt/g/{path}"), + medium: format!("https://lain.bgm.tv/r/400/pic/crt/l/{path}"), + } +} + +pub(super) fn select_person_image_url(path: &str, image_type: &str) -> Option { + let images = person_image(path); + match image_type { + "small" => Some(images.small), + "grid" => Some(images.grid), + "large" => Some(images.large), + "medium" => Some(images.medium), + _ => None, + } +} + +pub(super) fn subject_image(path: &str) -> SubjectImages { + if path.is_empty() { + return SubjectImages::default(); + } + + SubjectImages { + large: format!("https://lain.bgm.tv/pic/cover/l/{path}"), + grid: format!("https://lain.bgm.tv/r/100/pic/cover/l/{path}"), + small: format!("https://lain.bgm.tv/r/200/pic/cover/l/{path}"), + common: format!("https://lain.bgm.tv/r/400/pic/cover/l/{path}"), + medium: format!("https://lain.bgm.tv/r/800/pic/cover/l/{path}"), + } +} + +pub(super) fn select_subject_image_url(path: &str, image_type: &str) -> Option { + let images = subject_image(path); + match image_type { + "small" => Some(images.small), + "grid" => Some(images.grid), + "large" => Some(images.large), + "medium" => Some(images.medium), + "common" => Some(images.common), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::{select_person_image_url, select_subject_image_url, DEFAULT_IMAGE_URL}; + + #[test] + fn select_subject_image_url_maps_all_supported_types() { + let path = "ab/cd.jpg"; + + assert_eq!( + select_subject_image_url(path, "small").as_deref(), + Some("https://lain.bgm.tv/r/200/pic/cover/l/ab/cd.jpg") + ); + assert_eq!( + select_subject_image_url(path, "grid").as_deref(), + Some("https://lain.bgm.tv/r/100/pic/cover/l/ab/cd.jpg") + ); + assert_eq!( + select_subject_image_url(path, "large").as_deref(), + Some("https://lain.bgm.tv/pic/cover/l/ab/cd.jpg") + ); + assert_eq!( + select_subject_image_url(path, "medium").as_deref(), + Some("https://lain.bgm.tv/r/800/pic/cover/l/ab/cd.jpg") + ); + assert_eq!( + select_subject_image_url(path, "common").as_deref(), + Some("https://lain.bgm.tv/r/400/pic/cover/l/ab/cd.jpg") + ); + } + + #[test] + fn select_person_image_url_maps_all_supported_types() { + let path = "ef/gh.jpg"; + + assert_eq!( + select_person_image_url(path, "small").as_deref(), + Some("https://lain.bgm.tv/r/100/pic/crt/l/ef/gh.jpg") + ); + assert_eq!( + select_person_image_url(path, "grid").as_deref(), + Some("https://lain.bgm.tv/pic/crt/g/ef/gh.jpg") + ); + assert_eq!( + select_person_image_url(path, "large").as_deref(), + Some("https://lain.bgm.tv/pic/crt/l/ef/gh.jpg") + ); + assert_eq!( + select_person_image_url(path, "medium").as_deref(), + Some("https://lain.bgm.tv/r/400/pic/crt/l/ef/gh.jpg") + ); + } + + #[test] + fn select_image_url_rejects_invalid_type() { + assert_eq!(select_subject_image_url("ab/cd.jpg", "x"), None); + assert_eq!(select_person_image_url("ef/gh.jpg", "x"), None); + } + + #[test] + fn empty_image_path_results_in_empty_selected_url() { + assert_eq!(select_subject_image_url("", "small").as_deref(), Some("")); + assert_eq!(select_person_image_url("", "small").as_deref(), Some("")); + assert_eq!( + DEFAULT_IMAGE_URL, + "https://lain.bgm.tv/img/no_icon_subject.png" + ); + } +} diff --git a/crates/app/src/server/mod.rs b/crates/app/src/server/mod.rs new file mode 100644 index 000000000..b63a2c495 --- /dev/null +++ b/crates/app/src/server/mod.rs @@ -0,0 +1,870 @@ +use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}; + +use axum::{ + body::Body, + extract::{Query, State}, + http::StatusCode, + http::{ + header::AUTHORIZATION, header::USER_AGENT, HeaderMap, HeaderName, HeaderValue, + Request, + }, + middleware::{self, Next}, + response::{IntoResponse, Response}, + routing::{get, post}, + Json, Router, +}; +use meilisearch_sdk::{client::Client as MeiliClient, search::SearchResults}; +use serde::{Deserialize, Serialize}; +use sqlx::MySqlPool; +use std::{ + collections::HashMap, + sync::OnceLock, + time::{SystemTime, UNIX_EPOCH}, +}; +use tracing::Instrument; +use utoipa::OpenApi; +use uuid::Uuid; + +mod characters; +mod media; +mod persons; +mod subjects; +#[cfg(test)] +mod test_mocks; +mod users; + +const DEFAULT_LIMIT: usize = 10; +const MAX_LIMIT: usize = 20; +const NSFW_THRESHOLD_SECONDS: i64 = 60 * 24 * 60 * 60; + +#[derive(Clone)] +pub struct AppState { + meili: MeiliClient, + pool: MySqlPool, +} + +impl AppState { + pub fn new(meili: MeiliClient, pool: MySqlPool) -> Self { + Self { meili, pool } + } + + pub fn pool(&self) -> &MySqlPool { + &self.pool + } +} + +pub(super) trait MySqlExecutor<'e>: + sqlx::Executor<'e, Database = sqlx::MySql> +{ +} + +impl<'e, T> MySqlExecutor<'e> for T where T: sqlx::Executor<'e, Database = sqlx::MySql> {} + +#[derive(Debug, Clone, Copy, Default)] +pub(super) struct RequestAuth { + pub(super) user_id: Option, + pub(super) allow_nsfw: bool, +} + +#[derive(Debug, Deserialize, utoipa::IntoParams)] +#[into_params(parameter_in = Query)] +pub(super) struct PageQuery { + limit: Option, + offset: Option, +} + +#[derive(Debug, Serialize, utoipa::ToSchema)] +pub(super) struct ErrorBody { + error: String, +} + +#[derive(Debug, Serialize, utoipa::ToSchema)] +pub(super) struct PageInfo { + total: usize, + limit: usize, + offset: usize, +} + +impl PageInfo { + pub(super) fn new(total: usize, limit: usize, offset: usize) -> Self { + Self { + total, + limit, + offset, + } + } +} + +#[derive(OpenApi)] +#[openapi(paths( + subjects::search_subjects, + subjects::get_subject, + subjects::get_subject_image, + subjects::get_subject_related_persons, + subjects::get_subject_related_characters, + subjects::get_subject_related_subjects, + characters::search_characters, + characters::get_character, + characters::get_character_image, + characters::collect_character, + characters::uncollect_character, + characters::get_character_related_subjects, + characters::get_character_related_persons, + persons::search_persons, + persons::get_person, + persons::get_person_image, + persons::collect_person, + persons::uncollect_person, + persons::get_person_related_subjects, + persons::get_person_related_characters, + users::list_user_collections, + users::get_user_collection, +))] +struct ApiDoc; + +#[derive(Debug)] +pub(super) struct ApiError { + status: StatusCode, + message: String, +} + +impl ApiError { + pub(super) fn bad_request(message: impl Into) -> Self { + Self { + status: StatusCode::BAD_REQUEST, + message: message.into(), + } + } + + pub(super) fn internal(message: impl Into) -> Self { + Self { + status: StatusCode::INTERNAL_SERVER_ERROR, + message: message.into(), + } + } + + pub(super) fn not_found(message: impl Into) -> Self { + Self { + status: StatusCode::NOT_FOUND, + message: message.into(), + } + } + + pub(super) fn unauthorized(message: impl Into) -> Self { + Self { + status: StatusCode::UNAUTHORIZED, + message: message.into(), + } + } +} + +impl IntoResponse for ApiError { + fn into_response(self) -> Response { + ( + self.status, + Json(ErrorBody { + error: self.message, + }), + ) + .into_response() + } +} + +pub(super) type ApiResult = Result, ApiError>; + +pub async fn state_from_env() -> anyhow::Result { + let meili_url = std::env::var("RUST_MEILISEARCH_URL") + .or_else(|_| std::env::var("MEILISEARCH_URL")) + .unwrap_or_default(); + if meili_url.trim().is_empty() { + anyhow::bail!("missing env RUST_MEILISEARCH_URL or MEILISEARCH_URL") + } + + let meili_key = std::env::var("RUST_MEILISEARCH_KEY") + .or_else(|_| std::env::var("MEILISEARCH_KEY")) + .ok(); + + let mysql_dsn = std::env::var("RUST_MYSQL_DSN") + .or_else(|_| std::env::var("MYSQL_DSN")) + .map_err(|_| anyhow::anyhow!("missing env RUST_MYSQL_DSN or MYSQL_DSN"))?; + let pool = sqlx::mysql::MySqlPoolOptions::new() + .max_connections(5) + .connect(&mysql_dsn) + .await + .map_err(|e| anyhow::anyhow!(e))?; + + let meili = MeiliClient::new(meili_url.trim_end_matches('/'), meili_key) + .map_err(|e| anyhow::anyhow!(e))?; + + Ok(AppState::new(meili, pool)) +} + +pub fn build_router(state: AppState) -> Router { + Router::new() + .route("/openapi.json", get(openapi_json)) + .route("/v0/search/subjects", post(subjects::search_subjects)) + .route("/v0/search/characters", post(characters::search_characters)) + .route("/v0/search/persons", post(persons::search_persons)) + .route("/v0/subjects/:subject_id", get(subjects::get_subject)) + .route( + "/v0/subjects/:subject_id/image", + get(subjects::get_subject_image), + ) + .route( + "/v0/subjects/:subject_id/persons", + get(subjects::get_subject_related_persons), + ) + .route( + "/v0/subjects/:subject_id/characters", + get(subjects::get_subject_related_characters), + ) + .route( + "/v0/subjects/:subject_id/subjects", + get(subjects::get_subject_related_subjects), + ) + .route( + "/v0/characters/:character_id", + get(characters::get_character), + ) + .route( + "/v0/characters/:character_id/image", + get(characters::get_character_image), + ) + .route( + "/v0/characters/:character_id/collect", + post(characters::collect_character).delete(characters::uncollect_character), + ) + .route( + "/v0/characters/:character_id/subjects", + get(characters::get_character_related_subjects), + ) + .route( + "/v0/characters/:character_id/persons", + get(characters::get_character_related_persons), + ) + .route("/v0/persons/:person_id", get(persons::get_person)) + .route( + "/v0/persons/:person_id/image", + get(persons::get_person_image), + ) + .route( + "/v0/persons/:person_id/collect", + post(persons::collect_person).delete(persons::uncollect_person), + ) + .route( + "/v0/persons/:person_id/subjects", + get(persons::get_person_related_subjects), + ) + .route( + "/v0/persons/:person_id/characters", + get(persons::get_person_related_characters), + ) + .route( + "/v0/users/:username/collections", + get(users::list_user_collections), + ) + .route( + "/v0/users/:username/collections/:subject_id", + get(users::get_user_collection), + ) + .layer(middleware::from_fn_with_state( + state.clone(), + request_log_context_middleware, + )) + .with_state(state) +} + +pub async fn run() -> anyhow::Result<()> { + let bind = + std::env::var("RUST_HTTP_ADDR").unwrap_or_else(|_| "127.0.0.1:3000".to_string()); + let addr: SocketAddr = bind.parse()?; + let state = state_from_env().await?; + let app = build_router(state); + + let listener = tokio::net::TcpListener::bind(addr).await?; + tracing::info!(%addr, "rust search server listening"); + + axum::serve( + listener, + app.into_make_service_with_connect_info::(), + ) + .await?; + Ok(()) +} + +async fn openapi_json() -> Json { + Json(ApiDoc::openapi()) +} + +async fn request_log_context_middleware( + State(state): State, + mut req: Request, + next: Next, +) -> Response { + let auth = request_auth_from_headers(&state, req.headers()).await; + req.extensions_mut().insert(auth); + + let request_id = request_id_from_header(req.headers().get("Cf-Ray")); + let user_agent = req + .headers() + .get(USER_AGENT) + .and_then(|h| h.to_str().ok()) + .unwrap_or("") + .to_owned(); + let remote_addr = extract_client_ip(&req) + .map(|x| x.to_string()) + .unwrap_or_default(); + + let method = req.method().clone(); + let path = req.uri().path().to_owned(); + + let span = tracing::info_span!( + "http.request", + request_id = %request_id, + user_id = ?auth.user_id, + allow_nsfw = auth.allow_nsfw, + user_agent = %user_agent, + remote_addr = %remote_addr, + method = %method, + path = %path, + ); + + let mut response = next.run(req).instrument(span).await; + + if let Ok(v) = HeaderValue::from_str(&request_id) { + response + .headers_mut() + .insert(HeaderName::from_static("cf-ray"), v.clone()); + response + .headers_mut() + .insert(HeaderName::from_static("x-request-id"), v); + } + + response +} + +#[derive(Debug, Clone)] +enum TrustedNet { + V4(Ipv4Addr, u8), + V6(Ipv6Addr, u8), +} + +static TRUSTED_PROXIES: OnceLock> = OnceLock::new(); + +fn trusted_proxies() -> &'static [TrustedNet] { + TRUSTED_PROXIES + .get_or_init(|| { + std::env::var("RUST_TRUSTED_PROXIES") + .ok() + .into_iter() + .flat_map(|raw| { + raw + .split(',') + .map(str::trim) + .map(ToOwned::to_owned) + .collect::>() + }) + .filter_map(|s| parse_trusted_net(&s)) + .collect() + }) + .as_slice() +} + +fn parse_trusted_net(raw: &str) -> Option { + if raw.is_empty() { + return None; + } + + let (ip_str, prefix) = if let Some((ip, p)) = raw.split_once('/') { + let prefix = p.parse::().ok()?; + (ip, Some(prefix)) + } else { + (raw, None) + }; + + let ip = ip_str.parse::().ok()?; + match ip { + IpAddr::V4(v4) => Some(TrustedNet::V4(v4, prefix.unwrap_or(32))), + IpAddr::V6(v6) => Some(TrustedNet::V6(v6, prefix.unwrap_or(128))), + } +} + +fn extract_client_ip(req: &Request) -> Option { + let peer_ip = req + .extensions() + .get::>() + .map(|x| x.0.ip())?; + + if !is_trusted_proxy(peer_ip) { + return Some(peer_ip); + } + + if let Some(ip) = header_ip(req, "cf-connecting-ip") { + return Some(ip); + } + + if let Some(ip) = forwarded_for_client_ip(req, peer_ip) { + return Some(ip); + } + + if let Some(ip) = header_ip(req, "x-real-ip") { + return Some(ip); + } + + Some(peer_ip) +} + +fn header_ip(req: &Request, name: &str) -> Option { + req + .headers() + .get(name) + .and_then(|v| v.to_str().ok()) + .and_then(parse_header_ip) +} + +fn parse_header_ip(raw: &str) -> Option { + let s = raw.trim(); + if s.is_empty() { + return None; + } + + if let Ok(ip) = s.parse::() { + return Some(ip); + } + + if s.starts_with('[') && s.ends_with(']') { + return s[1..s.len() - 1].parse::().ok(); + } + + None +} + +fn forwarded_for_client_ip(req: &Request, peer_ip: IpAddr) -> Option { + let header = req.headers().get("x-forwarded-for")?.to_str().ok()?; + let mut chain: Vec = header.split(',').filter_map(parse_header_ip).collect(); + + chain.push(peer_ip); + + chain.into_iter().rev().find(|&ip| !is_trusted_proxy(ip)) +} + +fn is_trusted_proxy(ip: IpAddr) -> bool { + trusted_proxies().iter().any(|net| ip_in_net(ip, net)) +} + +fn ip_in_net(ip: IpAddr, net: &TrustedNet) -> bool { + match (ip, net) { + (IpAddr::V4(ip), TrustedNet::V4(base, prefix)) => { + let p = (*prefix).min(32); + let ip_u = u32::from(ip); + let base_u = u32::from(*base); + let mask = if p == 0 { 0 } else { u32::MAX << (32 - p) }; + (ip_u & mask) == (base_u & mask) + } + (IpAddr::V6(ip), TrustedNet::V6(base, prefix)) => { + let p = (*prefix).min(128); + let ip_u = u128::from_be_bytes(ip.octets()); + let base_u = u128::from_be_bytes(base.octets()); + let mask = if p == 0 { 0 } else { u128::MAX << (128 - p) }; + (ip_u & mask) == (base_u & mask) + } + _ => false, + } +} + +fn request_id_from_header(value: Option<&HeaderValue>) -> String { + value + .and_then(|h| h.to_str().ok()) + .map(str::trim) + .filter(|v| !v.is_empty()) + .map(ToOwned::to_owned) + .unwrap_or_else(|| Uuid::now_v7().to_string()) +} + +async fn request_auth_from_headers( + state: &AppState, + headers: &HeaderMap, +) -> RequestAuth { + let token = match bearer_token_from_headers(headers) { + Ok(v) => v, + Err(_) => return RequestAuth::default(), + }; + + let raw_user_id = match sqlx::query_scalar::<_, String>( + r#"SELECT t.user_id + FROM chii_oauth_access_tokens t + WHERE t.access_token = BINARY ? AND t.expires > NOW() + LIMIT 1"#, + ) + .bind(token) + .fetch_optional(&state.pool) + .await + { + Ok(v) => v, + Err(_) => return RequestAuth::default(), + }; + + let Some(raw_user_id) = raw_user_id else { + return RequestAuth::default(); + }; + + let user_id = match parse_user_id(&raw_user_id) { + Some(v) => v, + None => return RequestAuth::default(), + }; + + let regdate = match sqlx::query_scalar::<_, i64>( + "SELECT regdate FROM chii_members WHERE uid = ? LIMIT 1", + ) + .bind(user_id) + .fetch_optional(&state.pool) + .await + { + Ok(v) => v, + Err(_) => return RequestAuth::default(), + }; + + let Some(regdate) = regdate else { + return RequestAuth::default(); + }; + + let now = match SystemTime::now().duration_since(UNIX_EPOCH) { + Ok(v) => v.as_secs() as i64, + Err(_) => return RequestAuth::default(), + }; + + RequestAuth { + user_id: Some(user_id), + allow_nsfw: now.saturating_sub(regdate) >= NSFW_THRESHOLD_SECONDS, + } +} + +fn parse_user_id(raw: &str) -> Option { + let value = raw.trim(); + if value.is_empty() || !value.chars().all(|c| c.is_ascii_digit()) { + return None; + } + + value.parse::().ok() +} + +fn bearer_token_from_headers(headers: &HeaderMap) -> Result<&str, ()> { + let Some(auth_header) = headers.get(AUTHORIZATION) else { + return Err(()); + }; + + let Ok(auth_header) = auth_header.to_str() else { + return Err(()); + }; + + let Some((scheme, token)) = auth_header.split_once(' ') else { + return Err(()); + }; + + if scheme != "Bearer" || token.trim().is_empty() { + return Err(()); + } + + Ok(token.trim()) +} + +pub(super) fn user_id_from_auth(auth: RequestAuth) -> Result { + auth + .user_id + .ok_or_else(|| ApiError::unauthorized("missing or invalid bearer token")) +} + +#[derive(Debug, Clone, Deserialize)] +struct PlatformInfo { + #[serde(default)] + r#type: String, + #[serde(default)] + type_cn: String, +} + +static PLATFORM_MAP: OnceLock>> = + OnceLock::new(); + +fn platform_map() -> &'static HashMap> { + PLATFORM_MAP.get_or_init(|| { + serde_json::from_str(include_str!("../../../../pkg/vars/platform.go.json")) + .unwrap_or_default() + }) +} + +#[derive(Debug, Clone, Deserialize)] +struct VarsMapRoot { + define: VarsMapDefine, +} + +#[derive(Debug, Clone, Deserialize)] +struct VarsMapDefine { + #[serde(rename = "type")] + type_map: HashMap, + types: HashMap>, +} + +#[derive(Debug, Clone, Deserialize)] +struct VarsMapItem { + #[serde(default)] + cn: String, +} + +fn decode_vars_map(raw: &str) -> HashMap> { + let parsed: VarsMapRoot = match serde_json::from_str(raw) { + Ok(v) => v, + Err(_) => return HashMap::new(), + }; + + let mut result: HashMap> = HashMap::new(); + + for (type_name, values) in parsed.define.types { + let Some(type_id) = parsed.define.type_map.get(&type_name).copied() else { + continue; + }; + + let mut items = HashMap::new(); + for (relation_id, relation) in values { + let Ok(id) = relation_id.parse::() else { + continue; + }; + if !relation.cn.is_empty() { + items.insert(id, relation.cn); + } + } + + if !items.is_empty() { + result.insert(type_id, items); + } + } + + result +} + +static RELATION_MAP: OnceLock>> = OnceLock::new(); + +fn relation_map() -> &'static HashMap> { + RELATION_MAP.get_or_init(|| { + decode_vars_map(include_str!("../../../../pkg/vars/relations.go.json")) + }) +} + +static STAFF_MAP: OnceLock>> = OnceLock::new(); + +fn staff_map() -> &'static HashMap> { + STAFF_MAP.get_or_init(|| { + decode_vars_map(include_str!("../../../../pkg/vars/staffs.go.json")) + }) +} + +fn subject_type_string(subject_type: u8) -> &'static str { + match subject_type { + 1 => "书籍", + 2 => "动画", + 3 => "音乐", + 4 => "游戏", + 6 => "三次元", + _ => "unknown subject type", + } +} + +pub(super) fn relation_string( + destination_subject_type: u8, + relation_type: u16, +) -> String { + if relation_type == 1 { + return subject_type_string(destination_subject_type).to_string(); + } + + relation_map() + .get(&destination_subject_type) + .and_then(|m| m.get(&relation_type)) + .cloned() + .unwrap_or_else(|| subject_type_string(destination_subject_type).to_string()) +} + +pub(super) fn staff_string(subject_type: u8, staff_type: u16) -> String { + staff_map() + .get(&subject_type) + .and_then(|m| m.get(&staff_type)) + .cloned() + .unwrap_or_default() +} + +pub(super) fn character_staff_string(staff_type: u8) -> String { + match staff_type { + 1 => "主角".to_string(), + 2 => "配角".to_string(), + 3 => "客串".to_string(), + 4 => "闲角".to_string(), + 5 => "旁白".to_string(), + 6 => "声库".to_string(), + _ => String::new(), + } +} + +pub(super) fn platform_string(subject_type: u8, platform_id: u16) -> Option { + let platform = platform_map() + .get(&subject_type) + .and_then(|by_type| by_type.get(&platform_id)); + + match platform { + Some(v) => { + if !v.type_cn.is_empty() { + Some(v.type_cn.clone()) + } else if !v.r#type.is_empty() { + Some(v.r#type.clone()) + } else { + None + } + } + None => { + if subject_type != 0 { + tracing::warn!(subject_type, platform_id, "unknown platform mapping"); + } + None + } + } +} + +pub(super) async fn execute_search( + state: &AppState, + index: &str, + keyword: &str, + limit: usize, + offset: usize, + filter: Option, + sort: Option<&[&str]>, +) -> Result, ApiError> +where + T: serde::de::DeserializeOwned + Send + Sync + 'static, +{ + let index = state.meili.index(index); + let mut query = index.search(); + query + .with_query(keyword) + .with_limit(limit) + .with_offset(offset); + + if let Some(filter) = filter.as_deref() { + query.with_filter(filter); + } + + if let Some(sort) = sort { + query.with_sort(sort); + } + + query.execute().await.map_err(|e| { + tracing::error!(error = %e, "search request failed"); + ApiError::internal("search failed") + }) +} + +pub(super) fn search_total(docs: &SearchResults) -> usize { + docs + .estimated_total_hits + .or(docs.total_hits) + .unwrap_or(docs.hits.len()) +} + +pub(super) fn parse_page(page: Query) -> (usize, usize) { + let page = page.0; + let limit = page.limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT); + let offset = page.offset.unwrap_or(0); + (limit, offset) +} + +pub(super) fn join_filter(items: &[String]) -> Option { + if items.is_empty() { + None + } else { + Some(items.join(" AND ")) + } +} + +pub(super) fn quote_str(value: &str) -> String { + match serde_json::to_string(value) { + Ok(v) => v, + Err(_) => format!("\"{}\"", value.replace('"', "\\\"")), + } +} + +fn parse_op_and_value(input: &str) -> (&str, &str) { + let trimmed = input.trim(); + if let Some(rest) = trimmed.strip_prefix(">=") { + return (">=", rest.trim()); + } + if let Some(rest) = trimmed.strip_prefix("<=") { + return ("<=", rest.trim()); + } + if let Some(rest) = trimmed.strip_prefix('>') { + return (">", rest.trim()); + } + if let Some(rest) = trimmed.strip_prefix('<') { + return ("<", rest.trim()); + } + if let Some(rest) = trimmed.strip_prefix('=') { + return ("=", rest.trim()); + } + ("=", trimmed) +} + +pub(super) fn parse_integer_filter( + input: &str, + key: &str, +) -> Result<(String, i64), ApiError> { + let (op, value) = parse_op_and_value(input); + let number = value.parse::().map_err(|_| { + ApiError::bad_request(format!( + "invalid {key} filter: {input:?}, should be in the format of \"^(>|<|>=|<=|=) *\\d+$\"" + )) + })?; + Ok((op.to_string(), number)) +} + +pub(super) fn parse_float_filter( + input: &str, + key: &str, +) -> Result<(String, f64), ApiError> { + let (op, value) = parse_op_and_value(input); + let number = value.parse::().map_err(|_| { + ApiError::bad_request(format!( + "invalid {key} filter: {input:?}, should be in the format of \"^(>|<|>=|<=|=) *\\d+(\\.\\d+)?$\"" + )) + })?; + Ok((op.to_string(), number)) +} + +pub(super) fn parse_date_filter(input: &str) -> Result<(String, i32), ApiError> { + let (op, value) = parse_op_and_value(input); + let value = parse_ymd_to_int(value).ok_or_else(|| { + ApiError::bad_request(format!( + "invalid date filter: {input:?}, date should be in the format of \"YYYY-MM-DD\"" + )) + })?; + Ok((op.to_string(), value)) +} + +fn parse_ymd_to_int(date: &str) -> Option { + if date.len() < 10 { + return None; + } + + let bytes = date.as_bytes(); + if bytes.get(4).copied() != Some(b'-') || bytes.get(7).copied() != Some(b'-') { + return None; + } + + if !date[0..4].chars().all(|c| c.is_ascii_digit()) + || !date[5..7].chars().all(|c| c.is_ascii_digit()) + || !date[8..10].chars().all(|c| c.is_ascii_digit()) + { + return None; + } + + let year = date[0..4].parse::().ok()?; + let month = date[5..7].parse::().ok()?; + let day = date[8..10].parse::().ok()?; + + Some(year * 10000 + month * 100 + day) +} diff --git a/crates/app/src/server/persons.rs b/crates/app/src/server/persons.rs new file mode 100644 index 000000000..1204eb46e --- /dev/null +++ b/crates/app/src/server/persons.rs @@ -0,0 +1,637 @@ +use async_trait::async_trait; +use axum::{ + extract::{Extension, Path, Query, State}, + http::StatusCode, + response::Redirect, + Json, +}; +use serde::{Deserialize, Serialize}; +use sqlx::QueryBuilder; +use std::collections::HashMap; +use utoipa::ToSchema; + +#[cfg(test)] +use mockall::automock; + +use super::media::{ + person_image, select_person_image_url, select_subject_image_url, PersonImages, + DEFAULT_IMAGE_URL, +}; +use super::{ + character_staff_string, execute_search, join_filter, parse_page, quote_str, + search_total, staff_string, user_id_from_auth, ApiResult, AppState, MySqlExecutor, + PageInfo, PageQuery, RequestAuth, +}; + +#[derive(Debug, Deserialize, Default, ToSchema)] +pub(super) struct PersonReq { + keyword: String, + #[serde(default)] + filter: PersonFilter, +} + +#[derive(Debug, Deserialize, Default, ToSchema)] +pub(super) struct PersonFilter { + #[serde(default)] + career: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +struct SearchHit { + id: u32, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct PersonDoc { + id: u32, + name: String, + #[serde(rename = "type")] + person_type: u8, + career: Vec, + short_summary: String, + locked: bool, + #[serde(default)] + images: PersonImages, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct PersonSearchResponse { + #[serde(flatten)] + page: PageInfo, + data: Vec, +} + +#[derive(sqlx::FromRow)] +struct PersonRow { + prsn_id: u32, + prsn_name: String, + prsn_type: u8, + prsn_producer: bool, + prsn_mangaka: bool, + prsn_artist: bool, + prsn_seiyu: bool, + prsn_writer: bool, + prsn_illustrator: bool, + prsn_actor: bool, + prsn_summary: String, + prsn_img: String, + prsn_lock: i8, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct PersonRelatedSubject { + id: u32, + #[serde(rename = "type")] + subject_type: u8, + staff: String, + eps: String, + name: String, + name_cn: String, + image: String, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct PersonRelatedCharacter { + id: u32, + name: String, + #[serde(rename = "type")] + character_type: u8, + images: PersonImages, + subject_id: u32, + subject_type: u8, + subject_name: String, + subject_name_cn: String, + staff: String, +} + +#[derive(sqlx::FromRow)] +struct RelatedSubjectRow { + subject_id: u32, + subject_type: u8, + subject_name: String, + subject_name_cn: String, + subject_image: String, + staff_type: u16, + eps: String, +} + +#[derive(sqlx::FromRow)] +struct RelatedCharacterRow { + character_id: u32, + character_name: String, + character_type: u8, + character_img: String, + subject_id: u32, + subject_type: u8, + subject_name: String, + subject_name_cn: String, + relation_type: u8, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub(super) struct ImageQuery { + #[serde(rename = "type")] + image_type: String, +} + +#[cfg_attr(test, automock)] +#[async_trait] +pub(super) trait PersonImageRepo: Send + Sync { + async fn find_person_image_path( + &self, + person_id: u32, + ) -> Result, super::ApiError>; +} + +struct DbPersonImageRepo<'a> { + pool: &'a sqlx::MySqlPool, +} + +#[async_trait] +impl PersonImageRepo for DbPersonImageRepo<'_> { + async fn find_person_image_path( + &self, + person_id: u32, + ) -> Result, super::ApiError> { + sqlx::query_scalar::<_, String>( + "SELECT prsn_img FROM chii_persons WHERE prsn_redirect = 0 AND prsn_id = ? LIMIT 1", + ) + .bind(person_id) + .fetch_optional(self.pool) + .await + .map_err(|_| super::ApiError::internal("load person image failed")) + } +} + +#[utoipa::path( + post, + path = "/v0/search/persons", + request_body = PersonReq, + params(PageQuery), + responses( + (status = 200, description = "返回搜索结果", body = PersonSearchResponse), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn search_persons( + State(state): State, + page: Query, + Json(body): Json, +) -> ApiResult { + let (limit, offset) = parse_page(page); + + let mut filters = Vec::new(); + for career in &body.filter.career { + filters.push(format!("career = {}", quote_str(career))); + } + + let docs = execute_search::( + &state, + "persons", + &body.keyword, + limit, + offset, + join_filter(&filters), + None, + ) + .await?; + + let ids: Vec = docs.hits.iter().map(|x| x.result.id).collect(); + let data = load_persons(&state, &ids).await?; + + let total = search_total(&docs); + + Ok(Json(PersonSearchResponse { + page: PageInfo::new(total, limit, offset), + data, + })) +} + +#[utoipa::path( + get, + path = "/v0/persons/{person_id}", + params(("person_id" = u32, Path, description = "人物 ID")), + responses( + (status = 200, description = "人物详情", body = PersonDoc), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_person( + State(state): State, + Path(person_id): Path, +) -> ApiResult { + let row = sqlx::query_as::<_, PersonRow>( + "SELECT prsn_id, prsn_name, prsn_type, \ + prsn_producer, prsn_mangaka, prsn_artist, prsn_seiyu, prsn_writer, prsn_illustrator, prsn_actor, \ + prsn_summary, prsn_img, prsn_lock \ + FROM chii_persons WHERE prsn_redirect = 0 AND prsn_id = ? LIMIT 1", + ) + .bind(person_id) + .fetch_optional(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load person failed"))?; + + let row = row.ok_or_else(|| super::ApiError::not_found("person not found"))?; + Ok(Json(person_from_row(&row))) +} + +#[utoipa::path( + get, + path = "/v0/persons/{person_id}/image", + params( + ("person_id" = u32, Path, description = "人物 ID"), + ("type" = String, Query, description = "图片尺寸,可选值:small, grid, large, medium") + ), + responses( + (status = 302, description = "重定向到图片地址"), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_person_image( + State(state): State, + Path(person_id): Path, + Query(query): Query, +) -> Result { + let repo = DbPersonImageRepo { pool: &state.pool }; + let image_url = resolve_person_image_url(&repo, person_id, &query.image_type).await?; + Ok(Redirect::temporary(&image_url)) +} + +#[utoipa::path( + post, + path = "/v0/persons/{person_id}/collect", + params(("person_id" = u32, Path, description = "人物 ID")), + responses( + (status = 204, description = "收藏成功"), + (status = 401, description = "未授权", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn collect_person( + State(state): State, + Extension(auth): Extension, + Path(person_id): Path, +) -> Result { + let user_id = user_id_from_auth(auth)?; + collect_person_with_pool(&state.pool, user_id, person_id).await?; + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + delete, + path = "/v0/persons/{person_id}/collect", + params(("person_id" = u32, Path, description = "人物 ID")), + responses( + (status = 204, description = "取消收藏成功"), + (status = 401, description = "未授权", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn uncollect_person( + State(state): State, + Extension(auth): Extension, + Path(person_id): Path, +) -> Result { + let user_id = user_id_from_auth(auth)?; + uncollect_person_with_pool(&state.pool, user_id, person_id).await?; + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + get, + path = "/v0/persons/{person_id}/subjects", + params(("person_id" = u32, Path, description = "人物 ID")), + responses( + (status = 200, description = "人物关联条目", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_person_related_subjects( + State(state): State, + Path(person_id): Path, +) -> ApiResult> { + ensure_person_exists(&state.pool, person_id).await?; + + let rows: Vec = sqlx::query_as( + "SELECT s.subject_id, s.subject_type_id AS subject_type, s.subject_name, s.subject_name_cn, s.subject_image, \ + i.prsn_position AS staff_type, i.prsn_appear_eps AS eps \ + FROM chii_person_cs_index i \ + JOIN chii_subjects s ON s.subject_id = i.subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE i.prsn_type = 'prsn' AND i.prsn_id = ? AND s.subject_ban = 0 AND f.field_redirect = 0 \ + ORDER BY i.prsn_position, s.subject_id", + ) + .bind(person_id) + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load related subjects failed"))?; + + let data = rows + .into_iter() + .map(|row| PersonRelatedSubject { + id: row.subject_id, + subject_type: row.subject_type, + staff: staff_string(row.subject_type, row.staff_type), + eps: row.eps, + name: row.subject_name, + name_cn: row.subject_name_cn, + image: select_subject_image_url(&row.subject_image, "large").unwrap_or_default(), + }) + .collect(); + + Ok(Json(data)) +} + +#[utoipa::path( + get, + path = "/v0/persons/{person_id}/characters", + params(("person_id" = u32, Path, description = "人物 ID")), + responses( + (status = 200, description = "人物关联角色", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_person_related_characters( + State(state): State, + Path(person_id): Path, +) -> ApiResult> { + ensure_person_exists(&state.pool, person_id).await?; + + let rows: Vec = sqlx::query_as( + "SELECT c.crt_id AS character_id, c.crt_name AS character_name, c.crt_role AS character_type, c.crt_img AS character_img, \ + s.subject_id, s.subject_type_id AS subject_type, s.subject_name, s.subject_name_cn, \ + COALESCE(si.crt_type, 0) AS relation_type \ + FROM chii_crt_cast_index ci \ + JOIN chii_characters c ON c.crt_id = ci.crt_id \ + JOIN chii_subjects s ON s.subject_id = ci.subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + LEFT JOIN chii_crt_subject_index si ON si.crt_id = ci.crt_id AND si.subject_id = ci.subject_id \ + WHERE ci.prsn_id = ? AND c.crt_redirect = 0 AND s.subject_ban = 0 AND f.field_redirect = 0 \ + ORDER BY s.subject_id, c.crt_id", + ) + .bind(person_id) + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load related characters failed"))?; + + let data = rows + .into_iter() + .map(|row| PersonRelatedCharacter { + id: row.character_id, + name: row.character_name, + character_type: row.character_type, + images: person_image(&row.character_img), + subject_id: row.subject_id, + subject_type: row.subject_type, + subject_name: row.subject_name, + subject_name_cn: row.subject_name_cn, + staff: character_staff_string(row.relation_type), + }) + .collect(); + + Ok(Json(data)) +} + +async fn ensure_person_exists<'e>( + executor: impl MySqlExecutor<'e>, + person_id: u32, +) -> Result<(), super::ApiError> { + let exists = sqlx::query_scalar::<_, u32>( + "SELECT prsn_id FROM chii_persons WHERE prsn_redirect = 0 AND prsn_id = ? LIMIT 1", + ) + .bind(person_id) + .fetch_optional(executor) + .await + .map_err(|_| super::ApiError::internal("load person failed"))? + .is_some(); + + if !exists { + return Err(super::ApiError::not_found("person not found")); + } + + Ok(()) +} + +pub(super) async fn collect_person_with_pool( + pool: &sqlx::MySqlPool, + user_id: u32, + person_id: u32, +) -> Result<(), super::ApiError> { + ensure_person_exists(pool, person_id).await?; + + let exists = sqlx::query_scalar::<_, u32>( + "SELECT prsn_clt_id FROM chii_person_collects WHERE prsn_clt_cat = 'prsn' AND prsn_clt_uid = ? AND prsn_clt_mid = ? LIMIT 1", + ) + .bind(user_id) + .bind(person_id) + .fetch_optional(pool) + .await + .map_err(|_| super::ApiError::internal("query person collect failed"))? + .is_some(); + + if exists { + return Ok(()); + } + + sqlx::query( + "INSERT INTO chii_person_collects (prsn_clt_cat, prsn_clt_mid, prsn_clt_uid, prsn_clt_dateline) VALUES ('prsn', ?, ?, UNIX_TIMESTAMP())", + ) + .bind(person_id) + .bind(user_id) + .execute(pool) + .await + .map_err(|_| super::ApiError::internal("add person collect failed"))?; + + Ok(()) +} + +pub(super) async fn uncollect_person_with_pool( + pool: &sqlx::MySqlPool, + user_id: u32, + person_id: u32, +) -> Result<(), super::ApiError> { + ensure_person_exists(pool, person_id).await?; + + let result = sqlx::query( + "DELETE FROM chii_person_collects WHERE prsn_clt_cat = 'prsn' AND prsn_clt_uid = ? AND prsn_clt_mid = ?", + ) + .bind(user_id) + .bind(person_id) + .execute(pool) + .await + .map_err(|_| super::ApiError::internal("remove person collect failed"))?; + + if result.rows_affected() == 0 { + return Err(super::ApiError::not_found("person not collected")); + } + + Ok(()) +} + +async fn resolve_person_image_url( + repo: &impl PersonImageRepo, + person_id: u32, + image_type: &str, +) -> Result { + let path = repo.find_person_image_path(person_id).await?; + let path = path.ok_or_else(|| super::ApiError::not_found("person not found"))?; + + let image_url = select_person_image_url(&path, image_type).ok_or_else(|| { + super::ApiError::bad_request(format!("bad image type: {image_type}")) + })?; + + if image_url.is_empty() { + return Ok(DEFAULT_IMAGE_URL.to_string()); + } + + Ok(image_url) +} + +async fn load_persons( + state: &AppState, + ids: &[u32], +) -> Result, super::ApiError> { + if ids.is_empty() { + return Ok(Vec::new()); + } + + let mut qb = QueryBuilder::new( + "SELECT prsn_id, prsn_name, prsn_type, \ + prsn_producer, prsn_mangaka, prsn_artist, prsn_seiyu, prsn_writer, prsn_illustrator, prsn_actor, \ + prsn_summary, prsn_img, prsn_lock \ + FROM chii_persons WHERE prsn_redirect = 0 AND prsn_id IN (", + ); + + { + let mut separated = qb.separated(", "); + for id in ids { + separated.push_bind(*id); + } + } + qb.push(")"); + + let rows: Vec = qb + .build_query_as() + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load persons failed"))?; + + let by_id: HashMap = + rows.into_iter().map(|x| (x.prsn_id, x)).collect(); + + let mut result = Vec::with_capacity(ids.len()); + for id in ids { + if let Some(row) = by_id.get(id) { + result.push(person_from_row(row)); + } + } + + Ok(result) +} + +fn person_from_row(row: &PersonRow) -> PersonDoc { + PersonDoc { + id: row.prsn_id, + name: row.prsn_name.clone(), + person_type: row.prsn_type, + career: careers(row), + short_summary: row.prsn_summary.clone(), + locked: row.prsn_lock != 0, + images: person_image(&row.prsn_img), + } +} + +fn careers(row: &PersonRow) -> Vec { + let mut items = Vec::with_capacity(7); + + if row.prsn_writer { + items.push("writer".to_string()); + } + if row.prsn_producer { + items.push("producer".to_string()); + } + if row.prsn_mangaka { + items.push("mangaka".to_string()); + } + if row.prsn_artist { + items.push("artist".to_string()); + } + if row.prsn_seiyu { + items.push("seiyu".to_string()); + } + if row.prsn_illustrator { + items.push("illustrator".to_string()); + } + if row.prsn_actor { + items.push("actor".to_string()); + } + + items +} + +#[cfg(test)] +mod tests { + use super::resolve_person_image_url; + use crate::server::test_mocks::MockPool; + use axum::http::StatusCode; + + #[tokio::test] + async fn resolve_person_image_url_returns_default_for_empty_path() { + let mut pool = MockPool::new(); + pool + .person_image_repo + .expect_find_person_image_path() + .withf(|person_id| *person_id == 9) + .times(1) + .returning(|_| Ok(Some(String::new()))); + + let got = resolve_person_image_url(&pool.person_image_repo, 9, "small") + .await + .expect("resolve image"); + + assert_eq!(got, "https://lain.bgm.tv/img/no_icon_subject.png"); + } + + #[tokio::test] + async fn resolve_person_image_url_returns_not_found_when_missing() { + let mut pool = MockPool::new(); + pool + .person_image_repo + .expect_find_person_image_path() + .withf(|person_id| *person_id == 404) + .times(1) + .returning(|_| Ok(None)); + + let err = resolve_person_image_url(&pool.person_image_repo, 404, "small") + .await + .expect_err("expect not found"); + + assert_eq!(err.status, StatusCode::NOT_FOUND); + assert_eq!(err.message, "person not found"); + } + + #[tokio::test] + async fn resolve_person_image_url_returns_bad_request_for_invalid_type() { + let mut pool = MockPool::new(); + pool + .person_image_repo + .expect_find_person_image_path() + .times(1) + .returning(|_| Ok(Some("ab/cd.jpg".to_string()))); + + let err = resolve_person_image_url(&pool.person_image_repo, 1, "invalid") + .await + .expect_err("expect bad request"); + + assert_eq!(err.status, StatusCode::BAD_REQUEST); + assert_eq!(err.message, "bad image type: invalid"); + } +} diff --git a/crates/app/src/server/subjects.rs b/crates/app/src/server/subjects.rs new file mode 100644 index 000000000..3ec072b8d --- /dev/null +++ b/crates/app/src/server/subjects.rs @@ -0,0 +1,1029 @@ +use async_trait::async_trait; +use axum::{ + extract::{Extension, Path, Query, State}, + response::Redirect, + Json, +}; +use php_serialize::from_str as parse_php_serialize; +use serde::{Deserialize, Serialize}; +use sqlx::QueryBuilder; +use std::collections::HashMap; +use utoipa::ToSchema; + +#[cfg(test)] +use mockall::automock; + +use super::media::{ + person_image, select_subject_image_url, subject_image, PersonImages, SubjectImages, + DEFAULT_IMAGE_URL, +}; +use super::{ + character_staff_string, execute_search, join_filter, parse_date_filter, + parse_float_filter, parse_integer_filter, parse_page, platform_string, quote_str, + relation_string, search_total, staff_string, ApiError, ApiResult, AppState, + MySqlExecutor, PageInfo, PageQuery, RequestAuth, +}; + +#[derive(Debug, Deserialize, Default, ToSchema)] +pub(super) struct SubjectReq { + keyword: String, + #[serde(default)] + sort: String, + #[serde(default)] + filter: SubjectFilter, +} + +#[derive(Debug, Deserialize, Default, ToSchema)] +pub(super) struct SubjectFilter { + #[serde(default)] + r#type: Vec, + #[serde(default)] + tag: Vec, + #[serde(default)] + air_date: Vec, + #[serde(default)] + rating: Vec, + #[serde(default)] + rating_count: Vec, + #[serde(default)] + rank: Vec, + #[serde(default)] + meta_tags: Vec, + nsfw: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +struct SearchHit { + id: u32, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct SubjectDoc { + id: u32, + #[serde(rename = "type")] + type_id: u8, + name: String, + name_cn: String, + summary: String, + nsfw: bool, + locked: bool, + platform: Option, + #[serde(default)] + meta_tags: Vec, + volumes: u32, + eps: u32, + series: bool, + total_episodes: i64, + rating: Rating, + collection: Collection, + #[serde(default)] + tags: Vec, + images: SubjectImages, + date: Option, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct SubjectTag { + name: String, + count: u32, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct Collection { + wish: u32, + collect: u32, + doing: u32, + on_hold: u32, + dropped: u32, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct Rating { + rank: u32, + total: u32, + score: f64, + count: RatingCount, +} + +#[derive(Debug, Deserialize, Serialize, ToSchema)] +pub(super) struct RatingCount { + #[serde(rename = "1")] + field1: u32, + #[serde(rename = "2")] + field2: u32, + #[serde(rename = "3")] + field3: u32, + #[serde(rename = "4")] + field4: u32, + #[serde(rename = "5")] + field5: u32, + #[serde(rename = "6")] + field6: u32, + #[serde(rename = "7")] + field7: u32, + #[serde(rename = "8")] + field8: u32, + #[serde(rename = "9")] + field9: u32, + #[serde(rename = "10")] + field10: u32, +} + +#[derive(sqlx::FromRow)] +struct SubjectRow { + subject_id: u32, + subject_type_id: u8, + subject_name: String, + subject_name_cn: String, + field_summary: String, + subject_nsfw: bool, + subject_ban: u8, + subject_platform: u16, + field_meta_tags: String, + field_volumes: u32, + field_eps: u32, + subject_series: bool, + subject_image: String, + subject_wish: u32, + subject_collect: u32, + subject_doing: u32, + subject_on_hold: u32, + subject_dropped: u32, + field_rank: u32, + field_rate_1: u32, + field_rate_2: u32, + field_rate_3: u32, + field_rate_4: u32, + field_rate_5: u32, + field_rate_6: u32, + field_rate_7: u32, + field_rate_8: u32, + field_rate_9: u32, + field_rate_10: u32, + field_date: Option, + field_tags: Vec, +} + +#[derive(Deserialize)] +struct SubjectTagItem { + tag_name: Option, + tag_count: Option, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub(super) struct ImageQuery { + #[serde(rename = "type")] + image_type: String, +} + +#[cfg_attr(test, automock)] +#[async_trait] +pub(super) trait SubjectImageRepo: Send + Sync { + async fn find_subject_image_path( + &self, + subject_id: u32, + allow_nsfw: bool, + ) -> Result, ApiError>; +} + +struct DbSubjectImageRepo<'a> { + pool: &'a sqlx::MySqlPool, +} + +#[async_trait] +impl SubjectImageRepo for DbSubjectImageRepo<'_> { + async fn find_subject_image_path( + &self, + subject_id: u32, + allow_nsfw: bool, + ) -> Result, ApiError> { + let mut qb = QueryBuilder::new( + "SELECT s.subject_image \ + FROM chii_subjects s \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE s.subject_id = ", + ); + qb.push_bind(subject_id); + qb.push(" AND s.subject_ban = 0 AND f.field_redirect = 0"); + if !allow_nsfw { + qb.push(" AND s.subject_nsfw = 0"); + } + qb.push(" LIMIT 1"); + + qb.build_query_scalar() + .fetch_optional(self.pool) + .await + .map_err(|_| ApiError::internal("load subject image failed")) + } +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct SubjectSearchResponse { + #[serde(flatten)] + page: PageInfo, + data: Vec, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct SubjectRelatedSubject { + id: u32, + #[serde(rename = "type")] + subject_type: u8, + name: String, + name_cn: String, + images: SubjectImages, + relation: String, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct SubjectRelatedPerson { + id: u32, + name: String, + #[serde(rename = "type")] + person_type: u8, + career: Vec, + images: PersonImages, + relation: String, + eps: String, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct SubjectRelatedCharacter { + id: u32, + name: String, + summary: String, + #[serde(rename = "type")] + role: u8, + images: PersonImages, + relation: String, + #[serde(default)] + actors: Vec, +} + +#[derive(Debug, Clone, Serialize, ToSchema)] +pub(super) struct SubjectActor { + id: u32, + name: String, + short_summary: String, + #[serde(rename = "type")] + person_type: u8, + career: Vec, + images: PersonImages, + locked: bool, +} + +#[derive(sqlx::FromRow)] +struct RelatedSubjectRow { + relation_type: u16, + related_subject_id: u32, + related_subject_type_id: u8, + related_subject_name: String, + related_subject_name_cn: String, + related_subject_image: String, +} + +#[derive(sqlx::FromRow)] +struct RelatedPersonRow { + person_id: u32, + person_name: String, + person_type: u8, + person_img: String, + prsn_position: u16, + prsn_appear_eps: String, + prsn_producer: bool, + prsn_mangaka: bool, + prsn_artist: bool, + prsn_seiyu: bool, + prsn_writer: bool, + prsn_illustrator: bool, + prsn_actor: bool, +} + +#[derive(sqlx::FromRow)] +struct RelatedCharacterRow { + character_id: u32, + character_name: String, + character_type: u8, + character_summary: String, + character_img: String, + relation_type: u8, +} + +#[derive(sqlx::FromRow)] +struct RelatedActorRow { + character_id: u32, + actor_id: u32, + actor_name: String, + actor_summary: String, + actor_type: u8, + actor_img: String, + actor_lock: i8, + prsn_producer: bool, + prsn_mangaka: bool, + prsn_artist: bool, + prsn_seiyu: bool, + prsn_writer: bool, + prsn_illustrator: bool, + prsn_actor: bool, +} + +#[utoipa::path( + post, + path = "/v0/search/subjects", + request_body = SubjectReq, + params(PageQuery), + responses( + (status = 200, description = "返回搜索结果", body = SubjectSearchResponse), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] + +pub(super) async fn search_subjects( + State(state): State, + Extension(auth): Extension, + page: Query, + Json(body): Json, +) -> ApiResult { + let (limit, offset) = parse_page(page); + let allow_nsfw = auth.allow_nsfw; + + let mut filters = Vec::new(); + + if !body.filter.air_date.is_empty() { + let mut or_items = Vec::new(); + for raw in &body.filter.air_date { + let (op, value) = parse_date_filter(raw)?; + or_items.push(format!("date {op} {value}")); + } + filters.push(format!("({})", or_items.join(" OR "))); + } + + if !body.filter.r#type.is_empty() { + let mut or_items = Vec::new(); + for t in &body.filter.r#type { + or_items.push(format!("type = {t}")); + } + filters.push(format!("({})", or_items.join(" OR "))); + } + + if !allow_nsfw || body.filter.nsfw == Some(false) { + filters.push("(nsfw = false)".to_string()); + } + + for v in &body.filter.meta_tags { + filters.push(format!("meta_tag = {}", quote_str(v))); + } + + for v in &body.filter.tag { + filters.push(format!("tag = {}", quote_str(v))); + } + + for v in &body.filter.rank { + let (op, num) = parse_integer_filter(v, "rank")?; + filters.push(format!("rank {op} {num}")); + } + + for v in &body.filter.rating { + let (op, num) = parse_float_filter(v, "rating")?; + filters.push(format!("score {op} {num}")); + } + + for v in &body.filter.rating_count { + let (op, num) = parse_integer_filter(v, "rating_count")?; + filters.push(format!("rating_count {op} {num}")); + } + + let sort = match body.sort.as_str() { + "" | "match" => None, + "score" => Some(["score:desc"].as_slice()), + "heat" => Some(["heat:desc"].as_slice()), + "rank" => Some(["rank:asc"].as_slice()), + _ => return Err(ApiError::bad_request("sort not supported")), + }; + + let docs = execute_search::( + &state, + "subjects", + &body.keyword, + limit, + offset, + join_filter(&filters), + sort, + ) + .await?; + + let ids: Vec = docs.hits.iter().map(|x| x.result.id).collect(); + let data = load_subjects(&state, &ids).await?; + + let total = search_total(&docs); + + Ok(Json(SubjectSearchResponse { + page: PageInfo::new(total, limit, offset), + data, + })) +} + +#[utoipa::path( + get, + path = "/v0/subjects/{subject_id}", + params(("subject_id" = u32, Path, description = "条目 ID")), + responses( + (status = 200, description = "条目详情", body = SubjectDoc), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_subject( + State(state): State, + Extension(auth): Extension, + Path(subject_id): Path, +) -> ApiResult { + let allow_nsfw = auth.allow_nsfw; + + let mut qb = QueryBuilder::new( + "SELECT s.subject_id, s.subject_type_id, s.subject_name, s.subject_name_cn, s.field_summary, \ + s.subject_nsfw, s.subject_ban, s.subject_platform, s.field_meta_tags, s.field_volumes, s.field_eps, \ + s.subject_series, s.subject_image, s.subject_wish, s.subject_collect, s.subject_doing, s.subject_on_hold, s.subject_dropped, \ + f.field_rank, f.field_rate_1, f.field_rate_2, f.field_rate_3, f.field_rate_4, f.field_rate_5, \ + f.field_rate_6, f.field_rate_7, f.field_rate_8, f.field_rate_9, f.field_rate_10, \ + DATE_FORMAT(f.field_date, '%Y-%m-%d') AS field_date, f.field_tags \ + FROM chii_subjects s \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE s.subject_id = ", + ); + qb.push_bind(subject_id); + qb.push(" AND s.subject_ban = 0 AND f.field_redirect = 0"); + if !allow_nsfw { + qb.push(" AND s.subject_nsfw = 0"); + } + qb.push(" LIMIT 1"); + + let row: Option = + qb.build_query_as() + .fetch_optional(&state.pool) + .await + .map_err(|_| ApiError::internal("load subject failed"))?; + + let row = row.ok_or_else(|| ApiError::not_found("subject not found"))?; + Ok(Json(subject_from_row(&row))) +} + +#[utoipa::path( + get, + path = "/v0/subjects/{subject_id}/image", + params( + ("subject_id" = u32, Path, description = "条目 ID"), + ("type" = String, Query, description = "图片尺寸,可选值:small, grid, large, medium, common") + ), + responses( + (status = 302, description = "重定向到图片地址"), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_subject_image( + State(state): State, + Extension(auth): Extension, + Path(subject_id): Path, + Query(query): Query, +) -> Result { + let allow_nsfw = auth.allow_nsfw; + let repo = DbSubjectImageRepo { pool: &state.pool }; + let image_url = + resolve_subject_image_url(&repo, subject_id, allow_nsfw, &query.image_type).await?; + + Ok(Redirect::temporary(&image_url)) +} + +#[utoipa::path( + get, + path = "/v0/subjects/{subject_id}/subjects", + params(("subject_id" = u32, Path, description = "条目 ID")), + responses( + (status = 200, description = "关联条目", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_subject_related_subjects( + State(state): State, + Extension(auth): Extension, + Path(subject_id): Path, +) -> ApiResult> { + let allow_nsfw = auth.allow_nsfw; + ensure_subject_exists(&state.pool, subject_id, allow_nsfw).await?; + + let mut qb = QueryBuilder::new( + "SELECT r.rlt_relation_type AS relation_type, \ + s.subject_id AS related_subject_id, s.subject_type_id AS related_subject_type_id, \ + s.subject_name AS related_subject_name, s.subject_name_cn AS related_subject_name_cn, s.subject_image AS related_subject_image \ + FROM chii_subject_relations r \ + JOIN chii_subjects s ON s.subject_id = r.rlt_related_subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE r.rlt_subject_id = ", + ); + qb.push_bind(subject_id); + qb.push(" AND s.subject_ban = 0 AND f.field_redirect = 0"); + if !allow_nsfw { + qb.push(" AND s.subject_nsfw = 0"); + } + qb.push(" ORDER BY r.rlt_order"); + + let rows: Vec = + qb.build_query_as() + .fetch_all(&state.pool) + .await + .map_err(|_| ApiError::internal("load related subjects failed"))?; + + let data = rows + .into_iter() + .map(|row| SubjectRelatedSubject { + id: row.related_subject_id, + subject_type: row.related_subject_type_id, + name: row.related_subject_name, + name_cn: row.related_subject_name_cn, + images: subject_image(&row.related_subject_image), + relation: relation_string(row.related_subject_type_id, row.relation_type), + }) + .collect(); + + Ok(Json(data)) +} + +#[utoipa::path( + get, + path = "/v0/subjects/{subject_id}/persons", + params(("subject_id" = u32, Path, description = "条目 ID")), + responses( + (status = 200, description = "关联人物", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_subject_related_persons( + State(state): State, + Extension(auth): Extension, + Path(subject_id): Path, +) -> ApiResult> { + let allow_nsfw = auth.allow_nsfw; + let subject_type = ensure_subject_exists(&state.pool, subject_id, allow_nsfw).await?; + + let rows: Vec = sqlx::query_as( + "SELECT p.prsn_id AS person_id, p.prsn_name AS person_name, p.prsn_type AS person_type, p.prsn_img AS person_img, \ + i.prsn_position, i.prsn_appear_eps, \ + p.prsn_producer, p.prsn_mangaka, p.prsn_artist, p.prsn_seiyu, p.prsn_writer, p.prsn_illustrator, p.prsn_actor \ + FROM chii_person_cs_index i \ + JOIN chii_persons p ON p.prsn_id = i.prsn_id \ + WHERE i.subject_id = ? AND i.prsn_type = 'prsn' AND p.prsn_redirect = 0 \ + ORDER BY i.prsn_position, p.prsn_id", + ) + .bind(subject_id) + .fetch_all(&state.pool) + .await + .map_err(|_| ApiError::internal("load related persons failed"))?; + + let data = rows + .into_iter() + .map(|row| SubjectRelatedPerson { + id: row.person_id, + name: row.person_name, + person_type: row.person_type, + career: careers_from_flags( + row.prsn_writer, + row.prsn_producer, + row.prsn_mangaka, + row.prsn_artist, + row.prsn_seiyu, + row.prsn_illustrator, + row.prsn_actor, + ), + images: person_image(&row.person_img), + relation: staff_string(subject_type, row.prsn_position), + eps: row.prsn_appear_eps, + }) + .collect(); + + Ok(Json(data)) +} + +#[utoipa::path( + get, + path = "/v0/subjects/{subject_id}/characters", + params(("subject_id" = u32, Path, description = "条目 ID")), + responses( + (status = 200, description = "关联角色", body = Vec), + (status = 404, description = "未找到", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_subject_related_characters( + State(state): State, + Extension(auth): Extension, + Path(subject_id): Path, +) -> ApiResult> { + let allow_nsfw = auth.allow_nsfw; + ensure_subject_exists(&state.pool, subject_id, allow_nsfw).await?; + + let rows: Vec = sqlx::query_as( + "SELECT c.crt_id AS character_id, c.crt_name AS character_name, c.crt_role AS character_type, \ + c.crt_summary AS character_summary, c.crt_img AS character_img, i.crt_type AS relation_type \ + FROM chii_crt_subject_index i \ + JOIN chii_characters c ON c.crt_id = i.crt_id \ + WHERE i.subject_id = ? AND c.crt_redirect = 0 \ + ORDER BY i.crt_order, c.crt_id", + ) + .bind(subject_id) + .fetch_all(&state.pool) + .await + .map_err(|_| ApiError::internal("load related characters failed"))?; + + let character_ids: Vec = rows.iter().map(|x| x.character_id).collect(); + let actors = + load_subject_character_actors(&state, subject_id, &character_ids).await?; + + let data = rows + .into_iter() + .map(|row| SubjectRelatedCharacter { + id: row.character_id, + name: row.character_name, + summary: row.character_summary, + role: row.character_type, + images: person_image(&row.character_img), + relation: character_staff_string(row.relation_type), + actors: actors.get(&row.character_id).cloned().unwrap_or_default(), + }) + .collect(); + + Ok(Json(data)) +} + +async fn resolve_subject_image_url( + repo: &impl SubjectImageRepo, + subject_id: u32, + allow_nsfw: bool, + image_type: &str, +) -> Result { + let path = repo.find_subject_image_path(subject_id, allow_nsfw).await?; + let path = path.ok_or_else(|| ApiError::not_found("subject not found"))?; + let image_url = select_subject_image_url(&path, image_type) + .ok_or_else(|| ApiError::bad_request(format!("bad image type: {image_type}")))?; + + if image_url.is_empty() { + return Ok(DEFAULT_IMAGE_URL.to_string()); + } + + Ok(image_url) +} + +async fn ensure_subject_exists<'e>( + executor: impl MySqlExecutor<'e>, + subject_id: u32, + allow_nsfw: bool, +) -> Result { + let mut qb = QueryBuilder::new( + "SELECT s.subject_type_id \ + FROM chii_subjects s \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE s.subject_id = ", + ); + qb.push_bind(subject_id); + qb.push(" AND s.subject_ban = 0 AND f.field_redirect = 0"); + if !allow_nsfw { + qb.push(" AND s.subject_nsfw = 0"); + } + qb.push(" LIMIT 1"); + + qb.build_query_scalar() + .fetch_optional(executor) + .await + .map_err(|_| ApiError::internal("load subject failed"))? + .ok_or_else(|| ApiError::not_found("subject not found")) +} + +async fn load_subject_character_actors( + state: &AppState, + subject_id: u32, + character_ids: &[u32], +) -> Result>, ApiError> { + if character_ids.is_empty() { + return Ok(HashMap::new()); + } + + let mut qb = QueryBuilder::new( + "SELECT ci.crt_id AS character_id, \ + p.prsn_id AS actor_id, p.prsn_name AS actor_name, p.prsn_summary AS actor_summary, p.prsn_type AS actor_type, \ + p.prsn_img AS actor_img, p.prsn_lock AS actor_lock, \ + p.prsn_producer, p.prsn_mangaka, p.prsn_artist, p.prsn_seiyu, p.prsn_writer, p.prsn_illustrator, p.prsn_actor \ + FROM chii_crt_cast_index ci \ + JOIN chii_persons p ON p.prsn_id = ci.prsn_id \ + WHERE ci.subject_id = ", + ); + qb.push_bind(subject_id); + qb.push(" AND ci.crt_id IN ("); + { + let mut separated = qb.separated(", "); + for id in character_ids { + separated.push_bind(*id); + } + } + qb.push(") AND p.prsn_redirect = 0 ORDER BY ci.crt_id, p.prsn_id"); + + let rows: Vec = qb + .build_query_as() + .fetch_all(&state.pool) + .await + .map_err(|_| ApiError::internal("load subject actors failed"))?; + + let mut by_character: HashMap> = HashMap::new(); + for row in rows { + by_character + .entry(row.character_id) + .or_default() + .push(SubjectActor { + id: row.actor_id, + name: row.actor_name, + short_summary: row.actor_summary, + person_type: row.actor_type, + career: careers_from_flags( + row.prsn_writer, + row.prsn_producer, + row.prsn_mangaka, + row.prsn_artist, + row.prsn_seiyu, + row.prsn_illustrator, + row.prsn_actor, + ), + images: person_image(&row.actor_img), + locked: row.actor_lock != 0, + }); + } + + Ok(by_character) +} + +fn careers_from_flags( + writer: bool, + producer: bool, + mangaka: bool, + artist: bool, + seiyu: bool, + illustrator: bool, + actor: bool, +) -> Vec { + let mut items = Vec::with_capacity(7); + + if writer { + items.push("writer".to_string()); + } + if producer { + items.push("producer".to_string()); + } + if mangaka { + items.push("mangaka".to_string()); + } + if artist { + items.push("artist".to_string()); + } + if seiyu { + items.push("seiyu".to_string()); + } + if illustrator { + items.push("illustrator".to_string()); + } + if actor { + items.push("actor".to_string()); + } + + items +} + +async fn load_subjects( + state: &AppState, + ids: &[u32], +) -> Result, super::ApiError> { + if ids.is_empty() { + return Ok(Vec::new()); + } + + let mut qb = QueryBuilder::new( + "SELECT s.subject_id, s.subject_type_id, s.subject_name, s.subject_name_cn, s.field_summary, \ + s.subject_nsfw, s.subject_ban, s.subject_platform, s.field_meta_tags, s.field_volumes, s.field_eps, \ + s.subject_series, s.subject_image, s.subject_wish, s.subject_collect, s.subject_doing, s.subject_on_hold, s.subject_dropped, \ + f.field_rank, f.field_rate_1, f.field_rate_2, f.field_rate_3, f.field_rate_4, f.field_rate_5, \ + f.field_rate_6, f.field_rate_7, f.field_rate_8, f.field_rate_9, f.field_rate_10, \ + DATE_FORMAT(f.field_date, '%Y-%m-%d') AS field_date, f.field_tags \ + FROM chii_subjects s \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE s.subject_id IN (", + ); + + { + let mut separated = qb.separated(", "); + for id in ids { + separated.push_bind(*id); + } + } + qb.push(") AND s.subject_ban = 0 AND f.field_redirect = 0"); + + let rows: Vec = qb + .build_query_as() + .fetch_all(&state.pool) + .await + .map_err(|_| super::ApiError::internal("load subjects failed"))?; + + let by_id: HashMap = + rows.into_iter().map(|x| (x.subject_id, x)).collect(); + + let mut out = Vec::with_capacity(ids.len()); + for id in ids { + if let Some(row) = by_id.get(id) { + out.push(subject_from_row(row)); + } + } + + Ok(out) +} + +fn subject_from_row(row: &SubjectRow) -> SubjectDoc { + let rating = rating(row); + SubjectDoc { + id: row.subject_id, + type_id: row.subject_type_id, + name: row.subject_name.clone(), + name_cn: row.subject_name_cn.clone(), + summary: row.field_summary.clone(), + nsfw: row.subject_nsfw, + locked: row.subject_ban == 2, + platform: platform_string(row.subject_type_id, row.subject_platform), + meta_tags: split_meta_tags(&row.field_meta_tags), + volumes: row.field_volumes, + eps: row.field_eps, + series: row.subject_series, + total_episodes: i64::from(row.field_eps), + rating, + collection: Collection { + wish: row.subject_wish, + collect: row.subject_collect, + doing: row.subject_doing, + on_hold: row.subject_on_hold, + dropped: row.subject_dropped, + }, + tags: parse_subject_tags(&row.field_tags), + images: subject_image(&row.subject_image), + date: row.field_date.clone(), + } +} + +fn split_meta_tags(raw: &str) -> Vec { + raw + .split(' ') + .map(str::trim) + .filter(|x| !x.is_empty()) + .map(ToOwned::to_owned) + .collect() +} + +fn parse_subject_tags(raw: &[u8]) -> Vec { + let s = String::from_utf8_lossy(raw); + let parsed: Vec = match parse_php_serialize(&s) { + Ok(v) => v, + Err(_) => return Vec::new(), + }; + + parsed + .into_iter() + .filter_map(|x| { + x.tag_name.map(|name| SubjectTag { + name, + count: x.tag_count.unwrap_or(0), + }) + }) + .collect() +} + +fn rating(row: &SubjectRow) -> Rating { + let total = row + .field_rate_1 + .saturating_add(row.field_rate_2) + .saturating_add(row.field_rate_3) + .saturating_add(row.field_rate_4) + .saturating_add(row.field_rate_5) + .saturating_add(row.field_rate_6) + .saturating_add(row.field_rate_7) + .saturating_add(row.field_rate_8) + .saturating_add(row.field_rate_9) + .saturating_add(row.field_rate_10); + + let weighted = (row.field_rate_1 as f64) * 1.0 + + (row.field_rate_2 as f64) * 2.0 + + (row.field_rate_3 as f64) * 3.0 + + (row.field_rate_4 as f64) * 4.0 + + (row.field_rate_5 as f64) * 5.0 + + (row.field_rate_6 as f64) * 6.0 + + (row.field_rate_7 as f64) * 7.0 + + (row.field_rate_8 as f64) * 8.0 + + (row.field_rate_9 as f64) * 9.0 + + (row.field_rate_10 as f64) * 10.0; + + let score = if total == 0 { + 0.0 + } else { + ((weighted / (total as f64)) * 10.0).round() / 10.0 + }; + + Rating { + rank: row.field_rank, + total, + score, + count: RatingCount { + field1: row.field_rate_1, + field2: row.field_rate_2, + field3: row.field_rate_3, + field4: row.field_rate_4, + field5: row.field_rate_5, + field6: row.field_rate_6, + field7: row.field_rate_7, + field8: row.field_rate_8, + field9: row.field_rate_9, + field10: row.field_rate_10, + }, + } +} + +#[cfg(test)] +mod tests { + use super::{resolve_subject_image_url, ApiError}; + use crate::server::test_mocks::MockPool; + use axum::http::StatusCode; + + #[tokio::test] + async fn resolve_subject_image_url_returns_default_for_empty_path() { + let mut pool = MockPool::new(); + pool + .subject_image_repo + .expect_find_subject_image_path() + .withf(|subject_id, allow_nsfw| *subject_id == 7 && *allow_nsfw) + .times(1) + .returning(|_, _| Ok(Some(String::new()))); + + let got = resolve_subject_image_url(&pool.subject_image_repo, 7, true, "small") + .await + .expect("resolve image"); + + assert_eq!(got, "https://lain.bgm.tv/img/no_icon_subject.png"); + } + + #[tokio::test] + async fn resolve_subject_image_url_returns_not_found_when_missing() { + let mut pool = MockPool::new(); + pool + .subject_image_repo + .expect_find_subject_image_path() + .withf(|subject_id, allow_nsfw| *subject_id == 404 && !*allow_nsfw) + .times(1) + .returning(|_, _| Ok(None)); + + let err = resolve_subject_image_url(&pool.subject_image_repo, 404, false, "small") + .await + .expect_err("expect not found"); + + assert_eq!(err.status, StatusCode::NOT_FOUND); + assert_eq!(err.message, "subject not found"); + } + + #[tokio::test] + async fn resolve_subject_image_url_returns_bad_request_for_invalid_type() { + let mut pool = MockPool::new(); + pool + .subject_image_repo + .expect_find_subject_image_path() + .times(1) + .returning(|_, _| Ok(Some("ab/cd.jpg".to_string()))); + + let err = resolve_subject_image_url(&pool.subject_image_repo, 1, true, "invalid") + .await + .expect_err("expect bad request"); + + assert_eq!(err.status, StatusCode::BAD_REQUEST); + assert_eq!(err.message, "bad image type: invalid"); + } + + #[tokio::test] + async fn resolve_subject_image_url_passes_through_repo_errors() { + let mut pool = MockPool::new(); + pool + .subject_image_repo + .expect_find_subject_image_path() + .times(1) + .returning(|_, _| Err(ApiError::internal("load subject image failed"))); + + let err = resolve_subject_image_url(&pool.subject_image_repo, 1, true, "small") + .await + .expect_err("expect internal error"); + + assert_eq!(err.status, StatusCode::INTERNAL_SERVER_ERROR); + assert_eq!(err.message, "load subject image failed"); + } +} diff --git a/crates/app/src/server/test_mocks.rs b/crates/app/src/server/test_mocks.rs new file mode 100644 index 000000000..102feec66 --- /dev/null +++ b/crates/app/src/server/test_mocks.rs @@ -0,0 +1,15 @@ +pub(super) struct MockPool { + pub subject_image_repo: super::subjects::MockSubjectImageRepo, + pub character_image_repo: super::characters::MockCharacterImageRepo, + pub person_image_repo: super::persons::MockPersonImageRepo, +} + +impl MockPool { + pub(super) fn new() -> Self { + Self { + subject_image_repo: super::subjects::MockSubjectImageRepo::new(), + character_image_repo: super::characters::MockCharacterImageRepo::new(), + person_image_repo: super::persons::MockPersonImageRepo::new(), + } + } +} diff --git a/crates/app/src/server/users.rs b/crates/app/src/server/users.rs new file mode 100644 index 000000000..55f5bc36f --- /dev/null +++ b/crates/app/src/server/users.rs @@ -0,0 +1,447 @@ +use axum::{ + extract::{Extension, Path, Query, State}, + Json, +}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use utoipa::{IntoParams, ToSchema}; + +use super::media::{subject_image, SubjectImages}; +use super::{ + parse_page, ApiError, ApiResult, AppState, PageInfo, PageQuery, RequestAuth, +}; + +#[derive(Debug, Deserialize, IntoParams)] +#[into_params(parameter_in = Query)] +pub(super) struct UserCollectionsQuery { + subject_type: Option, + #[serde(rename = "type")] + collection_type: Option, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct UserSubjectCollection { + subject_id: u32, + subject_type: u8, + rate: u8, + #[serde(rename = "type")] + collection_type: u8, + comment: Option, + tags: Vec, + ep_status: u32, + vol_status: u32, + updated_at: String, + private: bool, + subject: SlimSubject, +} + +#[derive(Debug, Serialize, ToSchema)] +pub(super) struct UserCollectionsResponse { + #[serde(flatten)] + page: PageInfo, + data: Vec, +} + +#[derive(Debug, Serialize, ToSchema)] +struct SlimSubject { + id: u32, + #[serde(rename = "type")] + subject_type: u8, + name: String, + name_cn: String, + short_summary: String, + date: Option, + images: SubjectImages, + volumes: u32, + eps: u32, + collection_total: u32, + score: f64, + rank: u32, + tags: Vec, +} + +#[derive(Debug, Serialize, ToSchema)] +struct SubjectTag { + name: String, + count: u32, +} + +#[derive(Debug, FromRow)] +struct SubjectCollectionRow { + subject_id: u32, + subject_type: u8, + rate: u8, + collection_type: u8, + comment: String, + tags: String, + ep_status: u32, + vol_status: u32, + updated_at: String, + private: u8, + subject_name: String, + subject_name_cn: String, + short_summary: String, + date: Option, + subject_image: String, + volumes: u32, + eps: u32, + collection_total: u32, + rank: u32, + rate_1: u32, + rate_2: u32, + rate_3: u32, + rate_4: u32, + rate_5: u32, + rate_6: u32, + rate_7: u32, + rate_8: u32, + rate_9: u32, + rate_10: u32, + field_tags: Vec, +} + +#[utoipa::path( + get, + path = "/v0/users/{username}/collections", + params( + ("username" = String, Path, description = "用户名"), + UserCollectionsQuery, + PageQuery, + ), + responses( + (status = 200, description = "用户收藏列表", body = UserCollectionsResponse), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 404, description = "用户不存在", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn list_user_collections( + State(state): State, + Extension(auth): Extension, + Path(username): Path, + query: Query, + page: Query, +) -> ApiResult { + let user_id = find_user_id_by_username(&state, &username).await?; + + let (subject_type, collection_type) = parse_collection_filters(&query.0)?; + let (limit, offset) = parse_page(page); + let show_private = auth.user_id == Some(user_id); + + let total = count_subject_collections( + &state, + user_id, + subject_type, + collection_type, + show_private, + ) + .await?; + + let rows = list_subject_collections( + &state, + user_id, + subject_type, + collection_type, + show_private, + limit, + offset, + ) + .await?; + + Ok(Json(UserCollectionsResponse { + page: PageInfo::new(total as usize, limit, offset), + data: rows.into_iter().map(map_row_to_collection).collect(), + })) +} + +#[utoipa::path( + get, + path = "/v0/users/{username}/collections/{subject_id}", + params( + ("username" = String, Path, description = "用户名"), + ("subject_id" = u32, Path, description = "条目 ID") + ), + responses( + (status = 200, description = "用户收藏", body = UserSubjectCollection), + (status = 400, description = "请求参数错误", body = super::ErrorBody), + (status = 404, description = "用户不存在或未收藏", body = super::ErrorBody), + (status = 500, description = "服务错误", body = super::ErrorBody) + ) +)] +pub(super) async fn get_user_collection( + State(state): State, + Extension(auth): Extension, + Path((username, subject_id)): Path<(String, u32)>, +) -> ApiResult { + let user_id = find_user_id_by_username(&state, &username).await?; + let show_private = auth.user_id == Some(user_id); + + let row = + get_subject_collection(&state, user_id, subject_id, show_private, auth.allow_nsfw) + .await?; + Ok(Json(map_row_to_collection(row))) +} + +async fn find_user_id_by_username( + state: &AppState, + username: &str, +) -> Result { + sqlx::query_scalar::<_, u32>( + "SELECT uid FROM chii_members WHERE username = ? LIMIT 1", + ) + .bind(username) + .fetch_optional(state.pool()) + .await + .map_err(|_| ApiError::internal("load user failed"))? + .ok_or_else(|| ApiError::not_found("user doesn't exist or has been removed")) +} + +fn parse_collection_filters( + query: &UserCollectionsQuery, +) -> Result<(Option, Option), ApiError> { + if let Some(subject_type) = query.subject_type { + if !matches!(subject_type, 1 | 2 | 3 | 4 | 6) { + return Err(ApiError::bad_request("invalid query param `subject_type`")); + } + } + + if let Some(collection_type) = query.collection_type { + if !(1..=5).contains(&collection_type) { + return Err(ApiError::bad_request("invalid query param `type`")); + } + } + + Ok((query.subject_type, query.collection_type)) +} + +async fn count_subject_collections( + state: &AppState, + user_id: u32, + subject_type: Option, + collection_type: Option, + show_private: bool, +) -> Result { + let mut sql = String::from( + "SELECT COUNT(*) AS cnt FROM chii_subject_interests i WHERE i.interest_uid = ? AND i.interest_type != 0", + ); + + if subject_type.is_some() { + sql.push_str(" AND i.interest_subject_type = ?"); + } + if collection_type.is_some() { + sql.push_str(" AND i.interest_type = ?"); + } + if !show_private { + sql.push_str(" AND i.interest_private = 0"); + } + + let mut q = sqlx::query_scalar::<_, i64>(&sql).bind(user_id); + if let Some(v) = subject_type { + q = q.bind(v); + } + if let Some(v) = collection_type { + q = q.bind(v); + } + + q.fetch_one(state.pool()) + .await + .map_err(|_| ApiError::internal("count user collections failed")) +} + +async fn list_subject_collections( + state: &AppState, + user_id: u32, + subject_type: Option, + collection_type: Option, + show_private: bool, + limit: usize, + offset: usize, +) -> Result, ApiError> { + let mut sql = String::from( + "SELECT i.interest_subject_id AS subject_id, i.interest_subject_type AS subject_type, i.interest_rate AS rate, \ + i.interest_type AS collection_type, i.interest_comment AS comment, i.interest_tag AS tags, \ + i.interest_ep_status AS ep_status, i.interest_vol_status AS vol_status, \ + DATE_FORMAT(FROM_UNIXTIME(i.interest_lasttouch), '%Y-%m-%dT%H:%i:%s+08:00') AS updated_at, \ + i.interest_private AS private, \ + s.subject_name, s.subject_name_cn, s.field_summary AS short_summary, DATE_FORMAT(f.field_date, '%Y-%m-%d') AS date, \ + s.subject_image, s.field_volumes AS volumes, s.field_eps AS eps, s.subject_collect AS collection_total, \ + f.field_rank AS rank, \ + f.field_rate_1 AS rate_1, f.field_rate_2 AS rate_2, f.field_rate_3 AS rate_3, f.field_rate_4 AS rate_4, f.field_rate_5 AS rate_5, \ + f.field_rate_6 AS rate_6, f.field_rate_7 AS rate_7, f.field_rate_8 AS rate_8, f.field_rate_9 AS rate_9, f.field_rate_10 AS rate_10, \ + f.field_tags \ + FROM chii_subject_interests i \ + JOIN chii_subjects s ON s.subject_id = i.interest_subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE i.interest_uid = ? AND i.interest_type != 0 AND s.subject_ban = 0 AND f.field_redirect = 0", + ); + + if subject_type.is_some() { + sql.push_str(" AND i.interest_subject_type = ?"); + } + if collection_type.is_some() { + sql.push_str(" AND i.interest_type = ?"); + } + if !show_private { + sql.push_str(" AND i.interest_private = 0"); + } + + sql.push_str(" ORDER BY i.interest_lasttouch DESC LIMIT ? OFFSET ?"); + + let mut q = sqlx::query_as::<_, SubjectCollectionRow>(&sql).bind(user_id); + if let Some(v) = subject_type { + q = q.bind(v); + } + if let Some(v) = collection_type { + q = q.bind(v); + } + + q.bind(limit as i64) + .bind(offset as i64) + .fetch_all(state.pool()) + .await + .map_err(|_| ApiError::internal("list user collections failed")) +} + +async fn get_subject_collection( + state: &AppState, + user_id: u32, + subject_id: u32, + show_private: bool, + allow_nsfw: bool, +) -> Result { + let mut sql = String::from( + "SELECT i.interest_subject_id AS subject_id, i.interest_subject_type AS subject_type, i.interest_rate AS rate, \ + i.interest_type AS collection_type, i.interest_comment AS comment, i.interest_tag AS tags, \ + i.interest_ep_status AS ep_status, i.interest_vol_status AS vol_status, \ + DATE_FORMAT(FROM_UNIXTIME(i.interest_lasttouch), '%Y-%m-%dT%H:%i:%s+08:00') AS updated_at, \ + i.interest_private AS private, \ + s.subject_name, s.subject_name_cn, s.field_summary AS short_summary, DATE_FORMAT(f.field_date, '%Y-%m-%d') AS date, \ + s.subject_image, s.field_volumes AS volumes, s.field_eps AS eps, s.subject_collect AS collection_total, \ + f.field_rank AS rank, \ + f.field_rate_1 AS rate_1, f.field_rate_2 AS rate_2, f.field_rate_3 AS rate_3, f.field_rate_4 AS rate_4, f.field_rate_5 AS rate_5, \ + f.field_rate_6 AS rate_6, f.field_rate_7 AS rate_7, f.field_rate_8 AS rate_8, f.field_rate_9 AS rate_9, f.field_rate_10 AS rate_10, \ + f.field_tags \ + FROM chii_subject_interests i \ + JOIN chii_subjects s ON s.subject_id = i.interest_subject_id \ + JOIN chii_subject_fields f ON f.field_sid = s.subject_id \ + WHERE i.interest_uid = ? AND i.interest_subject_id = ? AND i.interest_type != 0 \ + AND s.subject_ban = 0 AND f.field_redirect = 0", + ); + + if !allow_nsfw { + sql.push_str(" AND s.subject_nsfw = 0"); + } + + let row = sqlx::query_as::<_, SubjectCollectionRow>(&sql) + .bind(user_id) + .bind(subject_id) + .fetch_optional(state.pool()) + .await + .map_err(|_| ApiError::internal("load user collection failed"))? + .ok_or_else(|| ApiError::not_found("subject is not collected by user"))?; + + if row.private != 0 && !show_private { + return Err(ApiError::not_found("subject is not collected by user")); + } + + Ok(row) +} + +fn map_row_to_collection(row: SubjectCollectionRow) -> UserSubjectCollection { + let score = rating_score(&row); + + UserSubjectCollection { + subject_id: row.subject_id, + subject_type: row.subject_type, + rate: row.rate, + collection_type: row.collection_type, + comment: if row.comment.is_empty() { + None + } else { + Some(row.comment) + }, + tags: split_tags(&row.tags), + ep_status: row.ep_status, + vol_status: row.vol_status, + updated_at: row.updated_at, + private: row.private != 0, + subject: SlimSubject { + id: row.subject_id, + subject_type: row.subject_type, + name: row.subject_name, + name_cn: row.subject_name_cn, + short_summary: row.short_summary, + date: row.date, + images: subject_image(&row.subject_image), + volumes: row.volumes, + eps: row.eps, + collection_total: row.collection_total, + score, + rank: row.rank, + tags: parse_subject_tags(&row.field_tags), + }, + } +} + +fn split_tags(raw: &str) -> Vec { + raw + .split(' ') + .map(str::trim) + .filter(|x| !x.is_empty()) + .map(ToOwned::to_owned) + .collect() +} + +fn parse_subject_tags(raw: &[u8]) -> Vec { + let s = String::from_utf8_lossy(raw); + let parsed: Vec = match php_serialize::from_str(&s) { + Ok(v) => v, + Err(_) => return Vec::new(), + }; + + parsed + .into_iter() + .filter_map(|x| { + x.tag_name.map(|name| SubjectTag { + name, + count: x.tag_count.unwrap_or(0), + }) + }) + .collect() +} + +fn rating_score(row: &SubjectCollectionRow) -> f64 { + let total = row.rate_1 + + row.rate_2 + + row.rate_3 + + row.rate_4 + + row.rate_5 + + row.rate_6 + + row.rate_7 + + row.rate_8 + + row.rate_9 + + row.rate_10; + + if total == 0 { + return 0.0; + } + + let weighted = (row.rate_1 as f64) * 1.0 + + (row.rate_2 as f64) * 2.0 + + (row.rate_3 as f64) * 3.0 + + (row.rate_4 as f64) * 4.0 + + (row.rate_5 as f64) * 5.0 + + (row.rate_6 as f64) * 6.0 + + (row.rate_7 as f64) * 7.0 + + (row.rate_8 as f64) * 8.0 + + (row.rate_9 as f64) * 9.0 + + (row.rate_10 as f64) * 10.0; + + ((weighted / (total as f64)) * 10.0).round() / 10.0 +} + +#[derive(Debug, Deserialize)] +struct SubjectTagItem { + tag_name: Option, + tag_count: Option, +} diff --git a/crates/app/tests/common/mod.rs b/crates/app/tests/common/mod.rs new file mode 100644 index 000000000..fda4cd239 --- /dev/null +++ b/crates/app/tests/common/mod.rs @@ -0,0 +1,66 @@ +use app::server::{build_router, state_from_env, AppState}; +use axum::Router; +use meilisearch_sdk::client::Client as MeiliClient; +use sqlx::{mysql::MySqlPoolOptions, MySql, Transaction}; + +pub async fn test_router() -> anyhow::Result { + let state = if use_real_dependencies() { + state_from_env().await? + } else { + offline_state()? + }; + + Ok(build_router(state)) +} + +#[allow(dead_code)] +pub async fn test_state() -> anyhow::Result { + if use_real_dependencies() { + state_from_env().await + } else { + offline_state() + } +} + +pub fn use_real_dependencies() -> bool { + matches!( + std::env::var("RUST_TEST_REAL_DEPS").as_deref(), + Ok("1") | Ok("true") | Ok("TRUE") | Ok("yes") | Ok("YES") + ) +} + +#[allow(dead_code)] +pub fn allow_write_tests() -> bool { + matches!( + std::env::var("RUST_TEST_ALLOW_WRITE").as_deref(), + Ok("1") | Ok("true") | Ok("TRUE") | Ok("yes") | Ok("YES") + ) +} + +#[allow(dead_code)] +pub async fn begin_write_transaction( + state: &AppState, +) -> anyhow::Result> { + if !allow_write_tests() { + anyhow::bail!("write tests are disabled, set RUST_TEST_ALLOW_WRITE=1 to enable"); + } + + state.pool().begin().await.map_err(|e| anyhow::anyhow!(e)) +} + +#[allow(dead_code)] +pub fn env_var(name: &str) -> Option { + std::env::var(name).ok().filter(|v| !v.trim().is_empty()) +} + +fn offline_state() -> anyhow::Result { + let meili = MeiliClient::new("http://127.0.0.1:17700", Some("test-key".to_string())) + .map_err(|e| anyhow::anyhow!(e))?; + + let pool = MySqlPoolOptions::new() + .max_connections(1) + .connect_lazy("mysql://root:root@127.0.0.1:13306/test") + .map_err(|e| anyhow::anyhow!(e))?; + + Ok(AppState::new(meili, pool)) +} diff --git a/crates/app/tests/server_real_deps.rs b/crates/app/tests/server_real_deps.rs new file mode 100644 index 000000000..a6a6bdb0d --- /dev/null +++ b/crates/app/tests/server_real_deps.rs @@ -0,0 +1,260 @@ +mod common; + +use axum::body::to_bytes; +use axum::body::Body; +use axum::http::{Request, StatusCode}; +use tower::util::ServiceExt; + +const DEFAULT_IMAGE_URL: &str = "https://lain.bgm.tv/img/no_icon_subject.png"; + +#[tokio::test] +async fn image_routes_return_404_for_nonexistent_id() { + if !common::use_real_dependencies() { + eprintln!("skip: RUST_TEST_REAL_DEPS is not enabled"); + return; + } + + let app = common::test_router().await.expect("build test router"); + let missing_id = u32::MAX; + + for prefix in ["subjects", "characters", "persons"] { + let uri = format!("/v0/{prefix}/{missing_id}/image?type=small"); + let response = app + .clone() + .oneshot( + Request::builder() + .uri(&uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::NOT_FOUND, "uri: {uri}"); + } +} + +#[tokio::test] +async fn configured_image_fixtures_redirect() { + if !common::use_real_dependencies() { + eprintln!("skip: RUST_TEST_REAL_DEPS is not enabled"); + return; + } + + let app = common::test_router().await.expect("build test router"); + + assert_redirect_for_configured_fixture( + &app, + "subjects", + common::env_var("RUST_TEST_SUBJECT_IMAGE_ID"), + common::env_var("RUST_TEST_SUBJECT_IMAGE_EXPECT_DEFAULT").as_deref(), + ) + .await; + + assert_redirect_for_configured_fixture( + &app, + "characters", + common::env_var("RUST_TEST_CHARACTER_IMAGE_ID"), + common::env_var("RUST_TEST_CHARACTER_IMAGE_EXPECT_DEFAULT").as_deref(), + ) + .await; + + assert_redirect_for_configured_fixture( + &app, + "persons", + common::env_var("RUST_TEST_PERSON_IMAGE_ID"), + common::env_var("RUST_TEST_PERSON_IMAGE_EXPECT_DEFAULT").as_deref(), + ) + .await; +} + +#[tokio::test] +async fn related_routes_return_404_for_nonexistent_id() { + if !common::use_real_dependencies() { + eprintln!("skip: RUST_TEST_REAL_DEPS is not enabled"); + return; + } + + let app = common::test_router().await.expect("build test router"); + let missing_id = u32::MAX; + + let uris = [ + format!("/v0/subjects/{missing_id}/persons"), + format!("/v0/subjects/{missing_id}/characters"), + format!("/v0/subjects/{missing_id}/subjects"), + format!("/v0/characters/{missing_id}/subjects"), + format!("/v0/characters/{missing_id}/persons"), + format!("/v0/persons/{missing_id}/subjects"), + format!("/v0/persons/{missing_id}/characters"), + ]; + + for uri in uris { + let response = app + .clone() + .oneshot( + Request::builder() + .uri(&uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::NOT_FOUND, "uri: {uri}"); + } +} + +#[tokio::test] +async fn configured_related_fixtures_return_json_array() { + if !common::use_real_dependencies() { + eprintln!("skip: RUST_TEST_REAL_DEPS is not enabled"); + return; + } + + let app = common::test_router().await.expect("build test router"); + + assert_related_array_for_configured_fixture( + &app, + "subject", + common::env_var("RUST_TEST_SUBJECT_RELATED_ID"), + &["persons", "characters", "subjects"], + ) + .await; + + assert_related_array_for_configured_fixture( + &app, + "character", + common::env_var("RUST_TEST_CHARACTER_RELATED_ID"), + &["subjects", "persons"], + ) + .await; + + assert_related_array_for_configured_fixture( + &app, + "person", + common::env_var("RUST_TEST_PERSON_RELATED_ID"), + &["subjects", "characters"], + ) + .await; +} + +#[tokio::test] +async fn write_transaction_template_can_rollback() { + if !common::use_real_dependencies() || !common::allow_write_tests() { + eprintln!("skip: RUST_TEST_REAL_DEPS=1 and RUST_TEST_ALLOW_WRITE=1 are required"); + return; + } + + let state = common::test_state().await.expect("build test state"); + let mut tx = common::begin_write_transaction(&state) + .await + .expect("begin write transaction"); + + let ping: Option = sqlx::query_scalar("SELECT 1") + .fetch_optional(&mut *tx) + .await + .expect("execute in transaction"); + + assert_eq!(ping, Some(1)); + + tx.rollback().await.expect("rollback transaction"); +} + +async fn assert_redirect_for_configured_fixture( + app: &axum::Router, + prefix: &str, + configured_id: Option, + expect_default: Option<&str>, +) { + let Some(configured_id) = configured_id else { + eprintln!("skip fixture for {prefix}: ID env var is not configured"); + return; + }; + + let id = configured_id + .parse::() + .expect("fixture env var must be a positive integer"); + let uri = format!("/v0/{prefix}/{id}/image?type=small"); + + let response = app + .clone() + .oneshot( + Request::builder() + .uri(&uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::FOUND, "uri: {uri}"); + + let location = response + .headers() + .get("location") + .and_then(|h| h.to_str().ok()) + .unwrap_or_default(); + + assert!(!location.is_empty(), "uri: {uri}, location header missing"); + + let expect_default = matches!( + expect_default, + Some("1") | Some("true") | Some("TRUE") | Some("yes") | Some("YES") + ); + if expect_default { + assert_eq!(location, DEFAULT_IMAGE_URL, "uri: {uri}"); + } +} + +async fn assert_related_array_for_configured_fixture( + app: &axum::Router, + entity: &str, + configured_id: Option, + suffixes: &[&str], +) { + let Some(configured_id) = configured_id else { + eprintln!("skip fixture for {entity}: related ID env var is not configured"); + return; + }; + + let id = configured_id + .parse::() + .expect("fixture env var must be a positive integer"); + + let base = match entity { + "subject" => "subjects", + "character" => "characters", + "person" => "persons", + _ => panic!("unsupported entity"), + }; + + for suffix in suffixes { + let uri = format!("/v0/{base}/{id}/{suffix}"); + let response = app + .clone() + .oneshot( + Request::builder() + .uri(&uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::OK, "uri: {uri}"); + + let body = to_bytes(response.into_body(), 1024 * 1024) + .await + .expect("read response body"); + let text = String::from_utf8(body.to_vec()).expect("utf8 json body"); + + assert!( + text.starts_with('[') && text.ends_with(']'), + "uri: {uri}, body: {text}" + ); + } +} diff --git a/crates/app/tests/server_smoke.rs b/crates/app/tests/server_smoke.rs new file mode 100644 index 000000000..b9bba1fc0 --- /dev/null +++ b/crates/app/tests/server_smoke.rs @@ -0,0 +1,235 @@ +mod common; + +use axum::body::Body; +use axum::http::{Request, StatusCode}; +use tower::util::ServiceExt; + +#[tokio::test] +async fn openapi_json_is_available() { + let app = common::test_router().await.expect("build test router"); + + let response = app + .oneshot( + Request::builder() + .uri("/openapi.json") + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::OK); +} + +#[tokio::test] +async fn image_routes_require_type_query_like_go() { + let app = common::test_router().await.expect("build test router"); + + let cases = [ + "/v0/subjects/1/image", + "/v0/characters/1/image", + "/v0/persons/1/image", + ]; + + for uri in cases { + let response = app + .clone() + .oneshot( + Request::builder() + .uri(uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST, "uri: {uri}"); + } +} + +#[tokio::test] +async fn image_routes_reject_non_numeric_id_like_go() { + let app = common::test_router().await.expect("build test router"); + + let cases = [ + "/v0/subjects/abc/image?type=small", + "/v0/characters/abc/image?type=small", + "/v0/persons/abc/image?type=small", + ]; + + for uri in cases { + let response = app + .clone() + .oneshot( + Request::builder() + .uri(uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST, "uri: {uri}"); + } +} + +#[tokio::test] +async fn subject_related_routes_reject_non_numeric_id_like_go() { + let app = common::test_router().await.expect("build test router"); + + let cases = [ + "/v0/subjects/abc/persons", + "/v0/subjects/abc/characters", + "/v0/subjects/abc/subjects", + ]; + + for uri in cases { + let response = app + .clone() + .oneshot( + Request::builder() + .uri(uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST, "uri: {uri}"); + } +} + +#[tokio::test] +async fn character_person_related_routes_reject_non_numeric_id_like_go() { + let app = common::test_router().await.expect("build test router"); + + let cases = [ + "/v0/characters/abc/subjects", + "/v0/characters/abc/persons", + "/v0/persons/abc/subjects", + "/v0/persons/abc/characters", + ]; + + for uri in cases { + let response = app + .clone() + .oneshot( + Request::builder() + .uri(uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST, "uri: {uri}"); + } +} + +#[tokio::test] +async fn character_collect_requires_auth_like_go() { + let app = common::test_router().await.expect("build test router"); + + let response = app + .clone() + .oneshot( + Request::builder() + .uri("/v0/characters/1/collect") + .method("POST") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn character_collect_rejects_non_numeric_id_like_go() { + let app = common::test_router().await.expect("build test router"); + + let response = app + .clone() + .oneshot( + Request::builder() + .uri("/v0/characters/abc/collect") + .method("POST") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn person_collect_requires_auth_like_go() { + let app = common::test_router().await.expect("build test router"); + + let response = app + .clone() + .oneshot( + Request::builder() + .uri("/v0/persons/1/collect") + .method("POST") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn person_collect_rejects_non_numeric_id_like_go() { + let app = common::test_router().await.expect("build test router"); + + let response = app + .clone() + .oneshot( + Request::builder() + .uri("/v0/persons/abc/collect") + .method("POST") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn user_collection_routes_reject_invalid_params_like_go() { + let app = common::test_router().await.expect("build test router"); + + let cases = [ + "/v0/users/test/collections?type=99", + "/v0/users/test/collections?subject_type=99", + "/v0/users/test/collections/abc", + ]; + + for uri in cases { + let response = app + .clone() + .oneshot( + Request::builder() + .uri(uri) + .method("GET") + .body(Body::empty()) + .expect("build request"), + ) + .await + .expect("perform request"); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST, "uri: {uri}"); + } +} diff --git a/docs/rust-migration-plan.md b/docs/rust-migration-plan.md index 71b15b1c9..c189f8322 100644 --- a/docs/rust-migration-plan.md +++ b/docs/rust-migration-plan.md @@ -34,6 +34,15 @@ Initial implementation goals: - Event and payload schemas are defined in Rust types for timeline/canal. - Golden fixtures can be added for parity tests. +### M3: Server `/v0` read-path baseline +- Search APIs for `subjects` / `characters` / `persons` are implemented in Rust. +- Detail/image/related read APIs for `subjects` / `characters` / `persons` are implemented in Rust. +- OpenAPI paths are generated from Rust handlers and exposed via `/openapi.json`. + +### M4: Server write-path baseline (in progress) +- `POST/DELETE /v0/characters/{character_id}/collect` implemented. +- Write-path executor pattern supports pool/transaction entry points for test rollback scenarios. + ## Out of scope (for this commit) - Production traffic switching @@ -42,7 +51,7 @@ Initial implementation goals: ## Next implementation tasks -1. Implement real search/session/redis side effects in canal handlers (consume loop and commit semantics are already in place). -2. Implement timeline publish API and payload parity tests. -3. Add sqlx pool and first read-only repository for subject read-path. +1. Continue migrating remaining `/v0` endpoints with same-prefix priority and keep behavior parity with Go. +2. Expand write-path integration tests for collect/uncollect with transaction rollback assertions. +3. Implement timeline publish API and payload parity tests. 4. Add CI jobs to build Rust workspace and run tests. diff --git a/docs/rust-workspace.md b/docs/rust-workspace.md index 40d782f6e..e213f6638 100644 --- a/docs/rust-workspace.md +++ b/docs/rust-workspace.md @@ -28,3 +28,36 @@ cargo run -p app -- server - `worker canal`: real Kafka consume loop with Debezium payload parsing, table-based dispatch, and commit-after-success behavior. - `worker timeline`: producer bootstrap and reusable timeline producer module are ready. + +## Server API migration status (`/v0`) + +Implemented in Rust (`crates/app/src/server`): + +- Search: + - `POST /v0/search/subjects` + - `POST /v0/search/characters` + - `POST /v0/search/persons` +- Subject read APIs: + - `GET /v0/subjects/{subject_id}` + - `GET /v0/subjects/{subject_id}/image` + - `GET /v0/subjects/{subject_id}/subjects` + - `GET /v0/subjects/{subject_id}/persons` + - `GET /v0/subjects/{subject_id}/characters` +- Character read/write APIs: + - `GET /v0/characters/{character_id}` + - `GET /v0/characters/{character_id}/image` + - `GET /v0/characters/{character_id}/subjects` + - `GET /v0/characters/{character_id}/persons` + - `POST /v0/characters/{character_id}/collect` + - `DELETE /v0/characters/{character_id}/collect` +- Person read APIs: + - `GET /v0/persons/{person_id}` + - `GET /v0/persons/{person_id}/image` + - `GET /v0/persons/{person_id}/subjects` + - `GET /v0/persons/{person_id}/characters` + +## Behavior and test parity notes + +- Request-scoped auth is resolved once in middleware and injected through request extensions (`RequestAuth`). +- OAuth token lookup no longer relies on SQL `CAST`; `user_id` is read as string and validated/parsed in Rust before member lookup. +- Route behavior parity tests are in place (`server_smoke`, `server_real_deps`) and currently passing with `cargo test -p app`. From 09b186826679d318bbfd37f5be2a79a56f8bbafb Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 20 Feb 2026 00:07:16 +0800 Subject: [PATCH 4/7] fmt --- .github/workflows/rust-ci.yaml | 130 ++++++++++++++++----------------- docs/rust-migration-plan.md | 119 +++++++++++++++--------------- docs/rust-workspace.md | 126 ++++++++++++++++---------------- 3 files changed, 190 insertions(+), 185 deletions(-) diff --git a/.github/workflows/rust-ci.yaml b/.github/workflows/rust-ci.yaml index 1579d5eee..9489b23df 100644 --- a/.github/workflows/rust-ci.yaml +++ b/.github/workflows/rust-ci.yaml @@ -1,65 +1,65 @@ -name: Rust CI - -on: - push: - branches: - - master - merge_group: - pull_request: - branches: - - master - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - rust: - runs-on: ubuntu-24.04 - env: - TARGET: x86_64-unknown-linux-gnu - steps: - - uses: actions/checkout@v6 - with: - submodules: recursive - - - name: Set up Rust toolchain - uses: actions-rust-lang/setup-rust-toolchain@v1 - with: - target: ${{ env.TARGET }} - - - name: Check format - run: cargo fmt --all -- --check - - - name: Run clippy - run: cargo clippy --workspace --all-targets --locked - - - name: Run tests - run: cargo test --workspace --locked - - - name: Build release binary - run: cargo build --release --locked --target $TARGET -p app - - docker: - runs-on: ubuntu-24.04 - needs: rust - env: - TARGET: x86_64-unknown-linux-gnu - steps: - - uses: actions/checkout@v6 - with: - submodules: recursive - - - name: Set up Rust toolchain - uses: actions-rust-lang/setup-rust-toolchain@v1 - with: - target: ${{ env.TARGET }} - - - name: Build release binary - run: cargo build --release --locked --target $TARGET -p app - - - name: Build rust docker image - run: docker build --build-arg TARGET=$TARGET -t app-rust -f etc/Dockerfile.rust . - - - name: Smoke run - run: docker run --rm app-rust --help +name: Rust CI + +on: + push: + branches: + - master + merge_group: + pull_request: + branches: + - master + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust: + runs-on: ubuntu-24.04 + env: + TARGET: x86_64-unknown-linux-gnu + steps: + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Set up Rust toolchain + uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + target: ${{ env.TARGET }} + + - name: Check format + run: cargo fmt --all -- --check + + - name: Run clippy + run: cargo clippy --workspace --all-targets --locked + + - name: Run tests + run: cargo test --workspace --locked + + - name: Build release binary + run: cargo build --release --locked --target $TARGET -p app + + docker: + runs-on: ubuntu-24.04 + needs: rust + env: + TARGET: x86_64-unknown-linux-gnu + steps: + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Set up Rust toolchain + uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + target: ${{ env.TARGET }} + + - name: Build release binary + run: cargo build --release --locked --target $TARGET -p app + + - name: Build rust docker image + run: docker build --build-arg TARGET=$TARGET -t app-rust -f etc/Dockerfile.rust . + + - name: Smoke run + run: docker run --rm app-rust --help diff --git a/docs/rust-migration-plan.md b/docs/rust-migration-plan.md index c189f8322..b45b3ef57 100644 --- a/docs/rust-migration-plan.md +++ b/docs/rust-migration-plan.md @@ -1,57 +1,62 @@ -# Rust Migration Implementation Plan - -Date: 2026-02-19 -Status: in-progress -Target stack: `tokio` + `rdkafka` + `sqlx` - -## Scope (Phase 0 -> Phase 1) - -This repository keeps Go as the primary runtime while introducing a Rust workspace for gradual migration. - -Initial implementation goals: - -1. Add a Rust workspace integrated at repository root. -2. Add a single app executable with top-level subcommands: - - `worker` (contains `canal` / `timeline` placeholder runtime loops) - - `server` (placeholder runtime loop) -3. Add shared config loading and connection bootstrap for MySQL/Kafka. -4. Keep all changes non-invasive to existing Go startup and deployment. - -## Delivery milestones - -### M0: Bootstrap -- Rust workspace compiles. -- `cargo run -p app -- worker canal` starts and exits gracefully. -- `cargo run -p app -- worker timeline` starts and exits gracefully. -- `cargo run -p app -- server` starts and exits gracefully. - -### M1: Infra baseline -- Shared config supports environment variables. -- Kafka/MySQL clients can be initialized from config. -- Structured logging and basic shutdown signal handling are in place. - -### M2: Contract baseline -- Event and payload schemas are defined in Rust types for timeline/canal. -- Golden fixtures can be added for parity tests. - -### M3: Server `/v0` read-path baseline -- Search APIs for `subjects` / `characters` / `persons` are implemented in Rust. -- Detail/image/related read APIs for `subjects` / `characters` / `persons` are implemented in Rust. -- OpenAPI paths are generated from Rust handlers and exposed via `/openapi.json`. - -### M4: Server write-path baseline (in progress) -- `POST/DELETE /v0/characters/{character_id}/collect` implemented. -- Write-path executor pattern supports pool/transaction entry points for test rollback scenarios. - -## Out of scope (for this commit) - -- Production traffic switching -- Full endpoint migration -- Replacing Go DAL with sqlx queries - -## Next implementation tasks - -1. Continue migrating remaining `/v0` endpoints with same-prefix priority and keep behavior parity with Go. -2. Expand write-path integration tests for collect/uncollect with transaction rollback assertions. -3. Implement timeline publish API and payload parity tests. -4. Add CI jobs to build Rust workspace and run tests. +# Rust Migration Implementation Plan + +Date: 2026-02-19 +Status: in-progress +Target stack: `tokio` + `rdkafka` + `sqlx` + +## Scope (Phase 0 -> Phase 1) + +This repository keeps Go as the primary runtime while introducing a Rust workspace for gradual migration. + +Initial implementation goals: + +1. Add a Rust workspace integrated at repository root. +2. Add a single app executable with top-level subcommands: + - `worker` (contains `canal` / `timeline` placeholder runtime loops) + - `server` (placeholder runtime loop) +3. Add shared config loading and connection bootstrap for MySQL/Kafka. +4. Keep all changes non-invasive to existing Go startup and deployment. + +## Delivery milestones + +### M0: Bootstrap + +- Rust workspace compiles. +- `cargo run -p app -- worker canal` starts and exits gracefully. +- `cargo run -p app -- worker timeline` starts and exits gracefully. +- `cargo run -p app -- server` starts and exits gracefully. + +### M1: Infra baseline + +- Shared config supports environment variables. +- Kafka/MySQL clients can be initialized from config. +- Structured logging and basic shutdown signal handling are in place. + +### M2: Contract baseline + +- Event and payload schemas are defined in Rust types for timeline/canal. +- Golden fixtures can be added for parity tests. + +### M3: Server `/v0` read-path baseline + +- Search APIs for `subjects` / `characters` / `persons` are implemented in Rust. +- Detail/image/related read APIs for `subjects` / `characters` / `persons` are implemented in Rust. +- OpenAPI paths are generated from Rust handlers and exposed via `/openapi.json`. + +### M4: Server write-path baseline (in progress) + +- `POST/DELETE /v0/characters/{character_id}/collect` implemented. +- Write-path executor pattern supports pool/transaction entry points for test rollback scenarios. + +## Out of scope (for this commit) + +- Production traffic switching +- Full endpoint migration +- Replacing Go DAL with sqlx queries + +## Next implementation tasks + +1. Continue migrating remaining `/v0` endpoints with same-prefix priority and keep behavior parity with Go. +2. Expand write-path integration tests for collect/uncollect with transaction rollback assertions. +3. Implement timeline publish API and payload parity tests. +4. Add CI jobs to build Rust workspace and run tests. diff --git a/docs/rust-workspace.md b/docs/rust-workspace.md index e213f6638..81ff544cd 100644 --- a/docs/rust-workspace.md +++ b/docs/rust-workspace.md @@ -1,63 +1,63 @@ -# Rust Migration Workspace - -Rust migration code is integrated into repository root. - -## Crates - -- `common`: shared config/bootstrap/helpers -- `app`: single executable with top-level subcommands (`worker`, `server`) - -## Environment variables - -- `RUST_MYSQL_DSN` (required) -- `RUST_KAFKA_BROKERS` (required, fallback: `KAFKA_BROKER`) -- `RUST_KAFKA_TOPICS` (required for `worker canal`, comma-separated) -- `RUST_KAFKA_GROUP_ID` (optional) -- `RUST_KAFKA_TOPIC` (optional, default: `timeline`) -- `RUST_LOG` (optional, default: `info`) - -## Run - -```bash -cargo run -p app -- worker canal -cargo run -p app -- worker timeline -cargo run -p app -- server -``` - -## Current migration status - -- `worker canal`: real Kafka consume loop with Debezium payload parsing, table-based dispatch, and commit-after-success behavior. -- `worker timeline`: producer bootstrap and reusable timeline producer module are ready. - -## Server API migration status (`/v0`) - -Implemented in Rust (`crates/app/src/server`): - -- Search: - - `POST /v0/search/subjects` - - `POST /v0/search/characters` - - `POST /v0/search/persons` -- Subject read APIs: - - `GET /v0/subjects/{subject_id}` - - `GET /v0/subjects/{subject_id}/image` - - `GET /v0/subjects/{subject_id}/subjects` - - `GET /v0/subjects/{subject_id}/persons` - - `GET /v0/subjects/{subject_id}/characters` -- Character read/write APIs: - - `GET /v0/characters/{character_id}` - - `GET /v0/characters/{character_id}/image` - - `GET /v0/characters/{character_id}/subjects` - - `GET /v0/characters/{character_id}/persons` - - `POST /v0/characters/{character_id}/collect` - - `DELETE /v0/characters/{character_id}/collect` -- Person read APIs: - - `GET /v0/persons/{person_id}` - - `GET /v0/persons/{person_id}/image` - - `GET /v0/persons/{person_id}/subjects` - - `GET /v0/persons/{person_id}/characters` - -## Behavior and test parity notes - -- Request-scoped auth is resolved once in middleware and injected through request extensions (`RequestAuth`). -- OAuth token lookup no longer relies on SQL `CAST`; `user_id` is read as string and validated/parsed in Rust before member lookup. -- Route behavior parity tests are in place (`server_smoke`, `server_real_deps`) and currently passing with `cargo test -p app`. +# Rust Migration Workspace + +Rust migration code is integrated into repository root. + +## Crates + +- `common`: shared config/bootstrap/helpers +- `app`: single executable with top-level subcommands (`worker`, `server`) + +## Environment variables + +- `RUST_MYSQL_DSN` (required) +- `RUST_KAFKA_BROKERS` (required, fallback: `KAFKA_BROKER`) +- `RUST_KAFKA_TOPICS` (required for `worker canal`, comma-separated) +- `RUST_KAFKA_GROUP_ID` (optional) +- `RUST_KAFKA_TOPIC` (optional, default: `timeline`) +- `RUST_LOG` (optional, default: `info`) + +## Run + +```bash +cargo run -p app -- worker canal +cargo run -p app -- worker timeline +cargo run -p app -- server +``` + +## Current migration status + +- `worker canal`: real Kafka consume loop with Debezium payload parsing, table-based dispatch, and commit-after-success behavior. +- `worker timeline`: producer bootstrap and reusable timeline producer module are ready. + +## Server API migration status (`/v0`) + +Implemented in Rust (`crates/app/src/server`): + +- Search: + - `POST /v0/search/subjects` + - `POST /v0/search/characters` + - `POST /v0/search/persons` +- Subject read APIs: + - `GET /v0/subjects/{subject_id}` + - `GET /v0/subjects/{subject_id}/image` + - `GET /v0/subjects/{subject_id}/subjects` + - `GET /v0/subjects/{subject_id}/persons` + - `GET /v0/subjects/{subject_id}/characters` +- Character read/write APIs: + - `GET /v0/characters/{character_id}` + - `GET /v0/characters/{character_id}/image` + - `GET /v0/characters/{character_id}/subjects` + - `GET /v0/characters/{character_id}/persons` + - `POST /v0/characters/{character_id}/collect` + - `DELETE /v0/characters/{character_id}/collect` +- Person read APIs: + - `GET /v0/persons/{person_id}` + - `GET /v0/persons/{person_id}/image` + - `GET /v0/persons/{person_id}/subjects` + - `GET /v0/persons/{person_id}/characters` + +## Behavior and test parity notes + +- Request-scoped auth is resolved once in middleware and injected through request extensions (`RequestAuth`). +- OAuth token lookup no longer relies on SQL `CAST`; `user_id` is read as string and validated/parsed in Rust before member lookup. +- Route behavior parity tests are in place (`server_smoke`, `server_real_deps`) and currently passing with `cargo test -p app`. From 1b0c4913c292b731b00be14abfd7121908487b6b Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 20 Feb 2026 00:15:45 +0800 Subject: [PATCH 5/7] ci: update CI workflow to use cargo-nextest for testing --- .github/workflows/rust-ci.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rust-ci.yaml b/.github/workflows/rust-ci.yaml index 9489b23df..588f4114f 100644 --- a/.github/workflows/rust-ci.yaml +++ b/.github/workflows/rust-ci.yaml @@ -28,6 +28,9 @@ jobs: with: target: ${{ env.TARGET }} + - name: Install cargo-nextest + uses: taiki-e/install-action@nextest + - name: Check format run: cargo fmt --all -- --check @@ -35,7 +38,7 @@ jobs: run: cargo clippy --workspace --all-targets --locked - name: Run tests - run: cargo test --workspace --locked + run: cargo nextest run --workspace --locked - name: Build release binary run: cargo build --release --locked --target $TARGET -p app From 64af2d858a8ff61e2cd4cd476a55f9b334aff502 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 20 Feb 2026 00:24:16 +0800 Subject: [PATCH 6/7] ci: reorganize CI jobs for linting and testing, add cargo-nextest installation --- .github/workflows/rust-ci.yaml | 35 ++++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/.github/workflows/rust-ci.yaml b/.github/workflows/rust-ci.yaml index 588f4114f..a74df1ccf 100644 --- a/.github/workflows/rust-ci.yaml +++ b/.github/workflows/rust-ci.yaml @@ -14,7 +14,7 @@ concurrency: cancel-in-progress: true jobs: - rust: + rust-lint: runs-on: ubuntu-24.04 env: TARGET: x86_64-unknown-linux-gnu @@ -28,24 +28,43 @@ jobs: with: target: ${{ env.TARGET }} - - name: Install cargo-nextest - uses: taiki-e/install-action@nextest - - name: Check format run: cargo fmt --all -- --check - name: Run clippy run: cargo clippy --workspace --all-targets --locked + rust-test: + runs-on: ubuntu-24.04 + env: + TARGET: x86_64-unknown-linux-gnu + steps: + - run: git clone https://github.com/bangumi/dev-env $HOME/dev-env --branch=gh-pages + - run: cd ~/dev-env && docker compose up -d + + - uses: actions/checkout@v6 + with: + submodules: recursive + + - name: Set up Rust toolchain + uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + target: ${{ env.TARGET }} + + - name: Install cargo-nextest + uses: taiki-e/install-action@nextest + + - run: bash $HOME/dev-env/wait_mysql_ready.sh + - name: Run tests run: cargo nextest run --workspace --locked - - - name: Build release binary - run: cargo build --release --locked --target $TARGET -p app + env: + RUST_MYSQL_DSN: mysql://user:password@127.0.0.1:3306/bangumi + MYSQL_DSN: mysql://user:password@127.0.0.1:3306/bangumi docker: runs-on: ubuntu-24.04 - needs: rust + needs: [rust-lint, rust-test] env: TARGET: x86_64-unknown-linux-gnu steps: From 85e8a420672df225b82197908618d46773ec66d4 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 20 Feb 2026 00:39:49 +0800 Subject: [PATCH 7/7] feat: enhance type safety by introducing SubjectType and SubjectCollectionType enums, update deserialization for image types --- crates/app/src/server/characters.rs | 15 ++++- crates/app/src/server/mod.rs | 89 ++++++++++++++++++++++++++--- crates/app/src/server/persons.rs | 15 ++++- crates/app/src/server/subjects.rs | 21 +++++-- crates/app/src/server/users.rs | 27 ++------- 5 files changed, 127 insertions(+), 40 deletions(-) diff --git a/crates/app/src/server/characters.rs b/crates/app/src/server/characters.rs index d30b1ff9e..3e3e0ac40 100644 --- a/crates/app/src/server/characters.rs +++ b/crates/app/src/server/characters.rs @@ -5,7 +5,7 @@ use axum::{ response::Redirect, Json, }; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; use sqlx::QueryBuilder; use std::collections::HashMap; use utoipa::ToSchema; @@ -140,10 +140,21 @@ struct RelatedPersonRow { #[derive(Debug, Deserialize, ToSchema)] pub(super) struct ImageQuery { - #[serde(rename = "type")] + #[serde(rename = "type", deserialize_with = "deserialize_image_type")] image_type: String, } +fn deserialize_image_type<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let value = String::deserialize(deserializer)?; + match value.as_str() { + "small" | "grid" | "large" | "medium" => Ok(value), + _ => Err(serde::de::Error::custom("invalid query param `type`")), + } +} + #[cfg_attr(test, automock)] #[async_trait] pub(super) trait CharacterImageRepo: Send + Sync { diff --git a/crates/app/src/server/mod.rs b/crates/app/src/server/mod.rs index b63a2c495..265239d77 100644 --- a/crates/app/src/server/mod.rs +++ b/crates/app/src/server/mod.rs @@ -73,6 +73,70 @@ pub(super) struct PageQuery { offset: Option, } +#[derive(Debug, Clone, Copy, Serialize, Deserialize, utoipa::ToSchema)] +#[serde(try_from = "u8", into = "u8")] +#[repr(u8)] +pub(super) enum SubjectType { + Book = 1, + Anime = 2, + Music = 3, + Game = 4, + Real = 6, +} + +impl TryFrom for SubjectType { + type Error = &'static str; + + fn try_from(value: u8) -> Result { + match value { + 1 => Ok(Self::Book), + 2 => Ok(Self::Anime), + 3 => Ok(Self::Music), + 4 => Ok(Self::Game), + 6 => Ok(Self::Real), + _ => Err("invalid subject type"), + } + } +} + +impl From for u8 { + fn from(value: SubjectType) -> Self { + value as u8 + } +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, utoipa::ToSchema)] +#[serde(try_from = "u8", into = "u8")] +#[repr(u8)] +pub(super) enum SubjectCollectionType { + Wish = 1, + Done = 2, + Doing = 3, + OnHold = 4, + Dropped = 5, +} + +impl TryFrom for SubjectCollectionType { + type Error = &'static str; + + fn try_from(value: u8) -> Result { + match value { + 1 => Ok(Self::Wish), + 2 => Ok(Self::Done), + 3 => Ok(Self::Doing), + 4 => Ok(Self::OnHold), + 5 => Ok(Self::Dropped), + _ => Err("invalid collection type"), + } + } +} + +impl From for u8 { + fn from(value: SubjectCollectionType) -> Self { + value as u8 + } +} + #[derive(Debug, Serialize, utoipa::ToSchema)] pub(super) struct ErrorBody { error: String, @@ -657,30 +721,37 @@ fn staff_map() -> &'static HashMap> { }) } -fn subject_type_string(subject_type: u8) -> &'static str { +fn subject_type_string(subject_type: SubjectType) -> &'static str { match subject_type { - 1 => "书籍", - 2 => "动画", - 3 => "音乐", - 4 => "游戏", - 6 => "三次元", - _ => "unknown subject type", + SubjectType::Book => "书籍", + SubjectType::Anime => "动画", + SubjectType::Music => "音乐", + SubjectType::Game => "游戏", + SubjectType::Real => "三次元", } } +fn subject_type_string_or_unknown(subject_type: u8) -> &'static str { + SubjectType::try_from(subject_type) + .map(subject_type_string) + .unwrap_or("unknown subject type") +} + pub(super) fn relation_string( destination_subject_type: u8, relation_type: u16, ) -> String { if relation_type == 1 { - return subject_type_string(destination_subject_type).to_string(); + return subject_type_string_or_unknown(destination_subject_type).to_string(); } relation_map() .get(&destination_subject_type) .and_then(|m| m.get(&relation_type)) .cloned() - .unwrap_or_else(|| subject_type_string(destination_subject_type).to_string()) + .unwrap_or_else(|| { + subject_type_string_or_unknown(destination_subject_type).to_string() + }) } pub(super) fn staff_string(subject_type: u8, staff_type: u16) -> String { diff --git a/crates/app/src/server/persons.rs b/crates/app/src/server/persons.rs index 1204eb46e..9249fe298 100644 --- a/crates/app/src/server/persons.rs +++ b/crates/app/src/server/persons.rs @@ -5,7 +5,7 @@ use axum::{ response::Redirect, Json, }; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; use sqlx::QueryBuilder; use std::collections::HashMap; use utoipa::ToSchema; @@ -130,10 +130,21 @@ struct RelatedCharacterRow { #[derive(Debug, Deserialize, ToSchema)] pub(super) struct ImageQuery { - #[serde(rename = "type")] + #[serde(rename = "type", deserialize_with = "deserialize_image_type")] image_type: String, } +fn deserialize_image_type<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let value = String::deserialize(deserializer)?; + match value.as_str() { + "small" | "grid" | "large" | "medium" => Ok(value), + _ => Err(serde::de::Error::custom("invalid query param `type`")), + } +} + #[cfg_attr(test, automock)] #[async_trait] pub(super) trait PersonImageRepo: Send + Sync { diff --git a/crates/app/src/server/subjects.rs b/crates/app/src/server/subjects.rs index 3ec072b8d..a9d64b4bb 100644 --- a/crates/app/src/server/subjects.rs +++ b/crates/app/src/server/subjects.rs @@ -5,7 +5,7 @@ use axum::{ Json, }; use php_serialize::from_str as parse_php_serialize; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; use sqlx::QueryBuilder; use std::collections::HashMap; use utoipa::ToSchema; @@ -21,7 +21,7 @@ use super::{ character_staff_string, execute_search, join_filter, parse_date_filter, parse_float_filter, parse_integer_filter, parse_page, platform_string, quote_str, relation_string, search_total, staff_string, ApiError, ApiResult, AppState, - MySqlExecutor, PageInfo, PageQuery, RequestAuth, + MySqlExecutor, PageInfo, PageQuery, RequestAuth, SubjectType, }; #[derive(Debug, Deserialize, Default, ToSchema)] @@ -36,7 +36,7 @@ pub(super) struct SubjectReq { #[derive(Debug, Deserialize, Default, ToSchema)] pub(super) struct SubjectFilter { #[serde(default)] - r#type: Vec, + r#type: Vec, #[serde(default)] tag: Vec, #[serde(default)] @@ -172,10 +172,21 @@ struct SubjectTagItem { #[derive(Debug, Deserialize, ToSchema)] pub(super) struct ImageQuery { - #[serde(rename = "type")] + #[serde(rename = "type", deserialize_with = "deserialize_image_type")] image_type: String, } +fn deserialize_image_type<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let value = String::deserialize(deserializer)?; + match value.as_str() { + "small" | "grid" | "large" | "medium" | "common" => Ok(value), + _ => Err(serde::de::Error::custom("invalid query param `type`")), + } +} + #[cfg_attr(test, automock)] #[async_trait] pub(super) trait SubjectImageRepo: Send + Sync { @@ -362,7 +373,7 @@ pub(super) async fn search_subjects( if !body.filter.r#type.is_empty() { let mut or_items = Vec::new(); for t in &body.filter.r#type { - or_items.push(format!("type = {t}")); + or_items.push(format!("type = {}", u8::from(*t))); } filters.push(format!("({})", or_items.join(" OR "))); } diff --git a/crates/app/src/server/users.rs b/crates/app/src/server/users.rs index 55f5bc36f..8ddd2d458 100644 --- a/crates/app/src/server/users.rs +++ b/crates/app/src/server/users.rs @@ -9,14 +9,15 @@ use utoipa::{IntoParams, ToSchema}; use super::media::{subject_image, SubjectImages}; use super::{ parse_page, ApiError, ApiResult, AppState, PageInfo, PageQuery, RequestAuth, + SubjectCollectionType, SubjectType, }; #[derive(Debug, Deserialize, IntoParams)] #[into_params(parameter_in = Query)] pub(super) struct UserCollectionsQuery { - subject_type: Option, + subject_type: Option, #[serde(rename = "type")] - collection_type: Option, + collection_type: Option, } #[derive(Debug, Serialize, ToSchema)] @@ -122,9 +123,9 @@ pub(super) async fn list_user_collections( query: Query, page: Query, ) -> ApiResult { + let subject_type = query.subject_type.map(u8::from); + let collection_type = query.collection_type.map(u8::from); let user_id = find_user_id_by_username(&state, &username).await?; - - let (subject_type, collection_type) = parse_collection_filters(&query.0)?; let (limit, offset) = parse_page(page); let show_private = auth.user_id == Some(user_id); @@ -196,24 +197,6 @@ async fn find_user_id_by_username( .ok_or_else(|| ApiError::not_found("user doesn't exist or has been removed")) } -fn parse_collection_filters( - query: &UserCollectionsQuery, -) -> Result<(Option, Option), ApiError> { - if let Some(subject_type) = query.subject_type { - if !matches!(subject_type, 1 | 2 | 3 | 4 | 6) { - return Err(ApiError::bad_request("invalid query param `subject_type`")); - } - } - - if let Some(collection_type) = query.collection_type { - if !(1..=5).contains(&collection_type) { - return Err(ApiError::bad_request("invalid query param `type`")); - } - } - - Ok((query.subject_type, query.collection_type)) -} - async fn count_subject_collections( state: &AppState, user_id: u32,