commit 9b608f1a5410a705710ed2f1d19cecb875ace5c0 Author: Hare Date: Sat Aug 30 02:58:52 2025 +0900 add: 0.1.0 code from other project diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..8392d15 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..8f1def3 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,2354 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "cc" +version = "1.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.60.2", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "eventsource-stream" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab" +dependencies = [ + "futures-core", + "nom", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi 0.14.3+wasi-0.2.4", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "handlebars" +version = "5.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d08485b96a0e6393e9e4d1b8d48cf74ad6c063cd905eb33f42c1ce3f0377539b" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "io-uring" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" +dependencies = [ + "bitflags 2.9.3", + "cfg-if", + "libc", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.175" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "libredox" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" +dependencies = [ + "bitflags 2.9.3", + "libc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pest" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" +dependencies = [ + "memchr", + "thiserror 2.0.16", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "potential_utf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" +dependencies = [ + "zerovec", +] + +[[package]] +name = "proc-macro2" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags 2.9.3", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 2.0.16", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "regex" +version = "1.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-rustls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustix" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +dependencies = [ + "bitflags 2.9.3", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.60.2", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.143" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "syn" +version = "2.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15b61f8f20e3a6f7e0649d825294eaf317edce30f82cf6026e7e4cb9222a7d1e" +dependencies = [ + "fastrand", + "getrandom 0.3.3", + "once_cell", + "rustix", + "windows-sys 0.60.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +dependencies = [ + "thiserror-impl 2.0.16", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "slab", + "socket2 0.6.0", + "tokio-macros", + "windows-sys 0.59.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" +dependencies = [ + "getrandom 0.3.3", + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.3+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a51ae83037bdd272a9e28ce236db8c07016dd0d50c27038b3f407533c030c95" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.3", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wit-bindgen" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "052283831dbae3d879dc7f51f3d92703a316ca49f91540417d38591826127814" + +[[package]] +name = "worker" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-stream", + "async-trait", + "bytes", + "chrono", + "dirs", + "eventsource-stream", + "futures", + "futures-util", + "handlebars", + "log", + "regex", + "reqwest", + "schemars", + "serde", + "serde_json", + "serde_yaml", + "strum", + "strum_macros", + "tempfile", + "thiserror 2.0.16", + "tokio", + "tokio-util", + "toml", + "tracing", + "tracing-subscriber", + "uuid", + "worker-macros", + "worker-types", + "xdg", +] + +[[package]] +name = "worker-macros" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "schemars", + "syn", + "tokio", + "worker-types", +] + +[[package]] +name = "worker-types" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "chrono", + "schemars", + "serde", + "serde_json", + "thiserror 2.0.16", + "tracing", + "uuid", +] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "xdg" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fb433233f2df9344722454bc7e96465c9d03bff9d77c248f9e7523fe79585b5" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..cc8b8cd --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,7 @@ +[workspace] +resolver = "2" +members = [ + "worker", + "worker-types", + "worker-macros", +] diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..3cebd2b --- /dev/null +++ b/flake.lock @@ -0,0 +1,77 @@ +{ + "nodes": { + "flake-compat": { + "locked": { + "lastModified": 1747046372, + "narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1751011381, + "narHash": "sha256-krGXKxvkBhnrSC/kGBmg5MyupUUT5R6IBCLEzx9jhMM=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "30e2e2857ba47844aa71991daa6ed1fc678bcbb7", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-compat": "flake-compat", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..a2c6439 --- /dev/null +++ b/flake.nix @@ -0,0 +1,31 @@ +{ + inputs = { + nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + flake-compat.url = "github:edolstra/flake-compat"; + }; + + outputs = + { nixpkgs, flake-utils, ... }: + flake-utils.lib.eachDefaultSystem ( + system: + let + pkgs = import nixpkgs { inherit system; }; + in + { + devShells.default = pkgs.mkShell { + packages = with pkgs; [ + nixfmt + deno + git + rustc + cargo + ]; + buildInputs = with pkgs; [ + pkg-config + openssl + ]; + }; + } + ); +} diff --git a/worker-macros/Cargo.toml b/worker-macros/Cargo.toml new file mode 100644 index 0000000..668210c --- /dev/null +++ b/worker-macros/Cargo.toml @@ -0,0 +1,18 @@ + +[package] +name = "worker-macros" +version = "0.1.0" +edition = "2024" + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "2.0", features = ["full"] } +quote = "1.0" +proc-macro2 = "1.0" +worker-types = { path = "../worker-types" } + +[dev-dependencies] +tokio = { version = "1.0", features = ["full"] } +schemars = "1.0.3" diff --git a/worker-macros/src/lib.rs b/worker-macros/src/lib.rs new file mode 100644 index 0000000..57b3aa5 --- /dev/null +++ b/worker-macros/src/lib.rs @@ -0,0 +1,327 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{ + Attribute, ItemFn, LitStr, + parse::{Parse, ParseStream}, + parse_macro_input, +}; + +struct ToolAttributeArgs { + name: Option, +} + +impl Parse for ToolAttributeArgs { + fn parse(input: ParseStream) -> syn::Result { + let mut name = None; + + if !input.is_empty() { + let name_ident: syn::Ident = input.parse()?; + if name_ident != "name" { + return Err(syn::Error::new_spanned( + name_ident, + "Only 'name' attribute is supported", + )); + } + input.parse::()?; + let name_str: LitStr = input.parse()?; + name = Some(name_str.value()); + } + + Ok(ToolAttributeArgs { name }) + } +} + +#[proc_macro_attribute] +pub fn tool(attr: TokenStream, item: TokenStream) -> TokenStream { + let args = parse_macro_input!(attr as ToolAttributeArgs); + let func = parse_macro_input!(item as ItemFn); + + let description = { + let doc_comments = extract_doc_comments(&func.attrs); + if doc_comments.is_empty() { + format!("Tool function: {}", func.sig.ident) + } else { + doc_comments + } + }; + + // Validate function signature + if let Err(e) = validate_function_signature(&func) { + return e.to_compile_error().into(); + } + + let fn_name = &func.sig.ident; + let fn_name_str = fn_name.to_string(); + + // Use provided name or fall back to CamelCase function name + let tool_name_str = args.name.unwrap_or_else(|| to_camel_case(&fn_name_str)); + + // Extract arg_type and output_type safely after validation + let arg_type = if let syn::FnArg::Typed(pat_type) = &func.sig.inputs[0] { + &pat_type.ty + } else { + // This case should be caught by validate_function_signature + return syn::Error::new_spanned(&func.sig.inputs[0], "Expected typed argument") + .to_compile_error() + .into(); + }; + + if let syn::ReturnType::Type(_, _) = &func.sig.output { + } else { + // This case should be caught by validate_function_signature + return syn::Error::new_spanned(&func.sig.output, "Expected return type") + .to_compile_error() + .into(); + }; + + // Generate struct name from function name (e.g., read_file -> ReadFileTool) + let tool_struct_name = { + let fn_name_str = fn_name.to_string(); + let camel_case = to_camel_case(&fn_name_str); + syn::Ident::new(&format!("{}Tool", camel_case), fn_name.span()) + }; + + let expanded = quote! { + // Keep the original function + #func + + // Generate Tool struct + pub struct #tool_struct_name; + + impl #tool_struct_name { + pub fn new() -> Self { + Self + } + } + + // Implement Tool trait + #[::worker_types::async_trait::async_trait] + impl ::worker_types::Tool for #tool_struct_name { + fn name(&self) -> &str { + #tool_name_str + } + + fn description(&self) -> &str { + #description + } + + fn parameters_schema(&self) -> ::worker_types::serde_json::Value { + ::worker_types::serde_json::to_value(::worker_types::schemars::schema_for!(#arg_type)).unwrap() + } + + async fn execute(&self, args: ::worker_types::serde_json::Value) -> ::worker_types::ToolResult<::worker_types::serde_json::Value> { + let typed_args: #arg_type = ::worker_types::serde_json::from_value(args)?; + let result = #fn_name(typed_args).await?; + // Use Display formatting instead of JSON serialization + let formatted_result = format!("{}", result); + Ok(::worker_types::serde_json::Value::String(formatted_result)) + } + } + + }; + + TokenStream::from(expanded) +} + +fn validate_function_signature(func: &ItemFn) -> syn::Result<()> { + if func.sig.asyncness.is_none() { + return Err(syn::Error::new_spanned( + &func.sig, + "Tool function must be async", + )); + } + + if func.sig.inputs.len() != 1 { + return Err(syn::Error::new_spanned( + &func.sig.inputs, + "Tool function must have exactly one argument", + )); + } + + let arg = &func.sig.inputs[0]; + if !matches!(arg, syn::FnArg::Typed(_)) { + return Err(syn::Error::new_spanned( + arg, + "Argument must be a typed pattern (e.g., `args: MyArgs`)", + )); + } + + if let syn::ReturnType::Default = func.sig.output { + return Err(syn::Error::new_spanned( + &func.sig, + "Tool function must have a return type, typically Result", + )); + } + + Ok(()) +} + +fn extract_doc_comments(attrs: &[Attribute]) -> String { + let mut doc_lines = Vec::new(); + + for attr in attrs { + if attr.path().is_ident("doc") { + if let syn::Meta::NameValue(meta) = &attr.meta { + if let syn::Expr::Lit(syn::ExprLit { + lit: syn::Lit::Str(lit_str), + .. + }) = &meta.value + { + let content = lit_str.value(); + let trimmed = content.trim_start(); + doc_lines.push(trimmed.to_string()); + } + } + } + } + + if doc_lines.is_empty() { + return String::new(); + } + + doc_lines.join("\n").trim().to_string() +} + +fn to_camel_case(snake_case: &str) -> String { + snake_case + .split('_') + .map(|word| { + let mut chars = word.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_uppercase().collect::() + chars.as_str(), + } + }) + .collect() +} + +// Hook attribute arguments parser +struct HookAttributeArgs { + hook_type: String, + matcher: Option, +} + +impl Parse for HookAttributeArgs { + fn parse(input: ParseStream) -> syn::Result { + let mut hook_type = None; + let mut matcher = None; + + while !input.is_empty() { + let name: syn::Ident = input.parse()?; + input.parse::()?; + let value: LitStr = input.parse()?; + + match name.to_string().as_str() { + "hook_type" => hook_type = Some(value.value()), + "matcher" => matcher = Some(value.value()), + _ => return Err(syn::Error::new_spanned(name, "Unknown hook attribute")), + } + + if input.peek(syn::Token![,]) { + input.parse::()?; + } + } + + let hook_type = hook_type.ok_or_else(|| input.error("Hook type is required"))?; + + Ok(HookAttributeArgs { hook_type, matcher }) + } +} + +#[proc_macro_attribute] +pub fn hook(attr: TokenStream, item: TokenStream) -> TokenStream { + let args = parse_macro_input!(attr as HookAttributeArgs); + let func = parse_macro_input!(item as ItemFn); + + // Validate function signature for hooks + if let Err(e) = validate_hook_function_signature(&func) { + return e.to_compile_error().into(); + } + + let fn_name = &func.sig.ident; + let fn_name_str = fn_name.to_string(); + let hook_type = &args.hook_type; + let matcher = args.matcher.as_deref().unwrap_or(""); + + // Generate struct name from function name + let hook_struct_name = { + let fn_name_str = fn_name.to_string(); + let camel_case = to_camel_case(&fn_name_str); + // 既に "_hook" で終わっている場合は、それを削除してから "Hook" を追加 + let cleaned_name = if camel_case.ends_with("Hook") { + camel_case.strip_suffix("Hook").unwrap_or(&camel_case) + } else { + &camel_case + }; + syn::Ident::new(&format!("{}Hook", cleaned_name), fn_name.span()) + }; + + let expanded = quote! { + // Keep the original function + #func + + // Generate Hook struct + pub struct #hook_struct_name; + + impl #hook_struct_name { + pub fn new() -> Self { + Self + } + } + + // Implement WorkerHook trait + #[::worker_types::async_trait::async_trait] + impl ::worker_types::WorkerHook for #hook_struct_name { + fn name(&self) -> &str { + #fn_name_str + } + + fn hook_type(&self) -> &str { + #hook_type + } + + fn matcher(&self) -> &str { + #matcher + } + + async fn execute(&self, context: ::worker_types::HookContext) -> (::worker_types::HookContext, ::worker_types::HookResult) { + #fn_name(context).await + } + } + }; + + TokenStream::from(expanded) +} + +fn validate_hook_function_signature(func: &ItemFn) -> syn::Result<()> { + if func.sig.asyncness.is_none() { + return Err(syn::Error::new_spanned( + &func.sig, + "Hook function must be async", + )); + } + + if func.sig.inputs.len() != 1 { + return Err(syn::Error::new_spanned( + &func.sig.inputs, + "Hook function must have exactly one argument of type HookContext", + )); + } + + let arg = &func.sig.inputs[0]; + if !matches!(arg, syn::FnArg::Typed(_)) { + return Err(syn::Error::new_spanned( + arg, + "Argument must be a typed pattern (e.g., `context: HookContext`)", + )); + } + + if let syn::ReturnType::Default = func.sig.output { + return Err(syn::Error::new_spanned( + &func.sig, + "Hook function must return (HookContext, HookResult)", + )); + } + + Ok(()) +} diff --git a/worker-types/Cargo.toml b/worker-types/Cargo.toml new file mode 100644 index 0000000..e525472 --- /dev/null +++ b/worker-types/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "worker-types" +version = "0.1.0" +edition = "2024" + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +schemars = "1.0.3" +async-trait = "0.1.88" +thiserror = "2.0.12" +anyhow = "1.0" +chrono = { version = "0.4", features = ["serde"] } +uuid = { version = "1.10", features = ["v4", "serde"] } +tracing = "0.1.40" diff --git a/worker-types/src/lib.rs b/worker-types/src/lib.rs new file mode 100644 index 0000000..5aef49a --- /dev/null +++ b/worker-types/src/lib.rs @@ -0,0 +1,1120 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use uuid; + +// Re-export for tool macros +pub use async_trait; +pub use schemars; +pub use serde_json; + +// Tool system types +pub type ToolResult = Result>; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Task { + pub messages: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct Message { + pub role: Role, + pub content: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_call_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub timestamp: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct MessageMetadata { + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub provider: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub duration_ms: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub completion_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_tokens: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct TokenUsage { + #[serde(skip_serializing_if = "Option::is_none")] + pub input: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub output: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total: Option, +} + +impl Default for MessageMetadata { + fn default() -> Self { + Self { + model: None, + provider: None, + tokens: None, + duration_ms: None, + completion_tokens: None, + prompt_tokens: None, + total_tokens: None, + } + } +} + +impl MessageMetadata { + pub fn new() -> Self { + Self::default() + } + + pub fn with_model(mut self, model: String, provider: String) -> Self { + self.model = Some(model); + self.provider = Some(provider); + self + } + + pub fn with_tokens(mut self, input: u32, output: u32) -> Self { + self.tokens = Some(TokenUsage { + input: Some(input), + output: Some(output), + total: Some(input + output), + }); + self.prompt_tokens = Some(input); + self.completion_tokens = Some(output); + self.total_tokens = Some(input + output); + self + } + + pub fn with_duration(mut self, duration_ms: u64) -> Self { + self.duration_ms = Some(duration_ms); + self + } +} + +impl Message { + /// Create a simple message without tool information + pub fn new(role: Role, content: String) -> Self { + Self { + role, + content, + tool_calls: None, + tool_call_id: None, + metadata: None, + timestamp: None, + } + } + + /// Create a message with tool calls + pub fn with_tool_calls(role: Role, content: String, tool_calls: Vec) -> Self { + Self { + role, + content, + tool_calls: Some(tool_calls), + tool_call_id: None, + metadata: None, + timestamp: None, + } + } + + /// Create a message with metadata and timestamp + pub fn with_metadata(role: Role, content: String, metadata: MessageMetadata) -> Self { + Self { + role, + content, + tool_calls: None, + tool_call_id: None, + metadata: Some(metadata), + timestamp: Some(Utc::now()), + } + } + + /// Update metadata for existing message + pub fn set_metadata(&mut self, metadata: MessageMetadata) { + self.metadata = Some(metadata); + if self.timestamp.is_none() { + self.timestamp = Some(Utc::now()); + } + } + + /// Set timestamp for message + pub fn set_timestamp(&mut self, timestamp: DateTime) { + self.timestamp = Some(timestamp); + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum Role { + User, + Model, + Tool, + System, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)] +pub enum LlmProvider { + Gemini, + Claude, + OpenAI, + Ollama, + XAI, +} + +impl LlmProvider { + pub fn as_str(&self) -> &'static str { + match self { + LlmProvider::Gemini => "gemini", + LlmProvider::Claude => "claude", + LlmProvider::OpenAI => "openai", + LlmProvider::Ollama => "ollama", + LlmProvider::XAI => "xai", + } + } + + pub fn from_str(s: &str) -> Option { + match s.to_lowercase().as_str() { + "gemini" | "google" => Some(LlmProvider::Gemini), + "claude" | "anthropic" => Some(LlmProvider::Claude), + "openai" => Some(LlmProvider::OpenAI), + "ollama" => Some(LlmProvider::Ollama), + "xai" => Some(LlmProvider::XAI), + _ => None, + } + } + + pub fn requires_api_key(&self) -> bool { + match self { + LlmProvider::Gemini => true, + LlmProvider::Claude => true, + LlmProvider::OpenAI => true, + LlmProvider::Ollama => false, // Ollamaはローカル実行のためAPIキー不要 + LlmProvider::XAI => true, + } + } +} + +impl std::fmt::Display for LlmProvider { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LlmResponse { + pub content: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ToolCall { + pub name: String, + pub arguments: String, +} + +#[async_trait::async_trait] +pub trait Tool: Send + Sync { + /// ツール名を返す + fn name(&self) -> &str; + + /// ツールの説明を返す + fn description(&self) -> &str; + + /// パラメータのJSONスキーマを返す + fn parameters_schema(&self) -> serde_json::Value; + + /// ツールを実行する + async fn execute(&self, args: serde_json::Value) -> ToolResult; +} + +#[derive(Debug, Clone)] +pub struct DynamicToolDefinition { + pub name: String, + pub description: String, + pub parameters_schema: serde_json::Value, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelInfo { + pub id: String, + pub name: String, + pub provider: LlmProvider, + pub supports_tools: bool, + pub supports_function_calling: bool, + pub supports_vision: bool, + pub supports_multimodal: bool, + pub context_length: Option, + pub training_cutoff: Option, + pub capabilities: Vec, + pub description: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LlmDebug { + pub enabled: bool, + pub show_request_body: bool, + pub show_response_body: bool, +} + +impl LlmDebug { + /// デバッグが有効な場合、リクエストのJSONデータを送信 + pub fn debug_request( + &self, + _model: &str, + title: &str, + data: &serde_json::Value, + ) -> Option { + if self.enabled && self.show_request_body { + Some(StreamEvent::DebugJson { + title: format!("{} Request", title), + data: data.clone(), + }) + } else { + None + } + } + + /// デバッグが有効な場合、レスポンスのJSONデータを送信 + pub fn debug_response( + &self, + _model: &str, + title: &str, + data: &serde_json::Value, + ) -> Option { + if self.enabled && self.show_response_body { + Some(StreamEvent::DebugJson { + title: format!("{} Response", title), + data: data.clone(), + }) + } else { + None + } + } + + /// デバッグが有効な場合、任意のJSONデータを送信 + pub fn debug_json(&self, title: &str, data: &serde_json::Value) -> Option { + if self.enabled { + Some(StreamEvent::DebugJson { + title: title.to_string(), + data: data.clone(), + }) + } else { + None + } + } +} + +#[derive(Debug, Clone)] +pub enum StreamEvent { + Chunk(String), + ToolCall(ToolCall), + ToolResult { + tool_name: String, + result: Result, + }, + Completion(Message), + Error(String), + DebugRequest { + model: String, + body: String, + }, + DebugResponse { + model: String, + body: String, + }, + DebugJson { + title: String, + data: serde_json::Value, + }, + HookMessage { + hook_name: String, + content: String, + role: Role, + }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RoleConfig { + pub name: String, + pub description: String, + #[serde(default)] + pub version: Option, + // 新しいテンプレートベースの設定 + pub template: Option, + pub partials: Option>, + // 従来の prompt フィールドもサポート(後方互換性のため) + pub prompt: Option, + #[serde(skip)] + pub path: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PartialConfig { + pub path: String, + #[serde(default)] + pub description: Option, +} + +impl Default for RoleConfig { + fn default() -> Self { + Self { + name: String::new(), + description: String::new(), + version: None, + template: None, + partials: None, + prompt: None, + path: None, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[serde(untagged)] +pub enum PromptComponent { + #[default] + None, + Single(PromptComponentDetail), + Multiple(Vec), +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct PromptComponentDetail { + pub path: String, + #[serde(flatten)] + pub inner: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct PromptConfig { + #[serde(rename = "ROLE_DEFINE")] + pub role_define: Option, + #[serde(rename = "BASIS")] + pub basis: Option, + #[serde(rename = "TOOL_USE")] + pub tool_use: Option, + #[serde(rename = "SECTIONS")] + pub sections: Option, +} + +// Session management types +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SessionData { + pub id: String, + pub model_provider: String, + pub model_name: String, + + // 履歴データ + pub display_history: Vec, // UI表示用の全履歴 + pub context: Vec, // Worker用の抽象化されたメッセージ + + // セッション情報 + pub workspace_path: String, + pub git_branch: Option, + pub created: DateTime, + pub modified: DateTime, + pub title: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum DisplayEntry { + UserMessage { + id: String, + content: String, + timestamp: DateTime, + user_annotations: Option>, + }, + + AiResponse { + id: String, + content: String, + timestamp: DateTime, + metadata: Option, + status: ResponseStatus, + }, + + ToolUse { + id: String, + tool_name: String, + arguments: serde_json::Value, + timestamp: DateTime, + status: ToolStatus, + }, + + ToolResult { + id: String, + tool_name: String, + tool_use_id: String, // 対応するToolUseのID + result: serde_json::Value, + success: bool, + timestamp: DateTime, + }, + + HookLog { + id: String, + hook_name: String, + hook_type: String, + message: String, + data: Option, + timestamp: DateTime, + log_level: LogLevel, + }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum ResponseStatus { + Streaming, + Complete, + Error(String), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum ToolStatus { + Calling, + Success, + Error(String), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum LogLevel { + Info, + Warning, + Error, + Debug, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserAnnotation { + pub id: String, + pub text: String, + pub created: DateTime, + pub annotation_type: AnnotationType, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum AnnotationType { + Important, + Bookmark, + Note, + Question, +} + +impl SessionData { + /// 新しいセッションを作成 + pub fn new( + id: String, + model_provider: String, + model_name: String, + workspace_path: String, + ) -> Self { + let now = Utc::now(); + Self { + id, + model_provider, + model_name, + display_history: Vec::new(), + context: Vec::new(), + workspace_path, + git_branch: None, + created: now, + modified: now, + title: None, + } + } + + /// ユーザーメッセージを追加 + pub fn add_user_message(&mut self, content: String) -> String { + let id = format!("user_{}", uuid::Uuid::new_v4()); + let timestamp = Utc::now(); + + // Display historyに追加 + let display_entry = DisplayEntry::UserMessage { + id: id.clone(), + content: content.clone(), + timestamp, + user_annotations: None, + }; + self.display_history.push(display_entry); + + // Contextに追加(Worker用の標準形式) + let context_message = Message { + role: Role::User, + content, + tool_calls: None, + tool_call_id: None, + metadata: None, + timestamp: Some(timestamp), + }; + self.context.push(context_message); + + self.modified = timestamp; + id + } + + /// AIレスポンスを追加(display_historyのみ、contextは別途管理) + pub fn add_ai_response_display( + &mut self, + content: String, + metadata: Option, + ) -> String { + let id = format!("ai_{}", uuid::Uuid::new_v4()); + let timestamp = Utc::now(); + + let display_entry = DisplayEntry::AiResponse { + id: id.clone(), + content, + timestamp, + metadata, + status: ResponseStatus::Complete, + }; + self.display_history.push(display_entry); + + self.modified = timestamp; + id + } + + /// AIメッセージをcontextに追加(tool_callsも含む) + pub fn add_ai_message_to_context( + &mut self, + content: String, + tool_calls: Option>, + metadata: Option, + ) { + let timestamp = Utc::now(); + + let context_message = Message { + role: Role::Model, + content, + tool_calls, + tool_call_id: None, + metadata, + timestamp: Some(timestamp), + }; + self.context.push(context_message); + + self.modified = timestamp; + } + + /// ツール使用を記録(display_historyのみ) + pub fn add_tool_use(&mut self, tool_name: String, arguments: serde_json::Value) -> String { + let id = format!("tool_use_{}", uuid::Uuid::new_v4()); + let timestamp = Utc::now(); + + let display_entry = DisplayEntry::ToolUse { + id: id.clone(), + tool_name, + arguments, + timestamp, + status: ToolStatus::Calling, + }; + self.display_history.push(display_entry); + + self.modified = timestamp; + id + } + + /// ツール結果を記録 + pub fn add_tool_result( + &mut self, + tool_name: String, + tool_use_id: String, + result: serde_json::Value, + success: bool, + ) -> String { + let id = format!("tool_result_{}", uuid::Uuid::new_v4()); + let timestamp = Utc::now(); + + // Display historyに追加 + let display_entry = DisplayEntry::ToolResult { + id: id.clone(), + tool_name: tool_name.clone(), + tool_use_id, + result: result.clone(), + success, + timestamp, + }; + self.display_history.push(display_entry); + + self.modified = timestamp; + id + } + + /// ツール結果をcontextに追加 + pub fn add_tool_result_to_context(&mut self, tool_call_id: String, content: String) { + let timestamp = Utc::now(); + + let context_message = Message { + role: Role::Tool, + content, + tool_calls: None, + tool_call_id: Some(tool_call_id), + metadata: None, + timestamp: Some(timestamp), + }; + self.context.push(context_message); + + self.modified = timestamp; + } + + /// フックログを追加(display_historyのみ) + pub fn add_hook_log( + &mut self, + hook_name: String, + hook_type: String, + message: String, + data: Option, + log_level: LogLevel, + ) -> String { + let id = format!("hook_{}", uuid::Uuid::new_v4()); + let timestamp = Utc::now(); + + let display_entry = DisplayEntry::HookLog { + id: id.clone(), + hook_name, + hook_type, + message, + data, + timestamp, + log_level, + }; + self.display_history.push(display_entry); + + self.modified = timestamp; + id + } + + /// Display historyを取得(UI用) + pub fn get_display_history(&self) -> &Vec { + &self.display_history + } + + /// Worker用のコンテキストを取得 + pub fn get_context_for_worker(&self) -> &Vec { + &self.context + } + + /// エントリのステータスを更新 + pub fn update_response_status( + &mut self, + entry_id: &str, + status: ResponseStatus, + ) -> Result<(), String> { + if let Some(entry) = self.display_history.iter_mut().find(|e| match e { + DisplayEntry::AiResponse { id, .. } => id == entry_id, + _ => false, + }) { + if let DisplayEntry::AiResponse { status: s, .. } = entry { + *s = status; + self.modified = Utc::now(); + Ok(()) + } else { + Err("Entry is not an AI response".to_string()) + } + } else { + Err(format!("Entry with id {} not found", entry_id)) + } + } + + /// ツールステータスを更新 + pub fn update_tool_status(&mut self, entry_id: &str, status: ToolStatus) -> Result<(), String> { + if let Some(entry) = self.display_history.iter_mut().find(|e| match e { + DisplayEntry::ToolUse { id, .. } => id == entry_id, + _ => false, + }) { + if let DisplayEntry::ToolUse { status: s, .. } = entry { + *s = status; + self.modified = Utc::now(); + Ok(()) + } else { + Err("Entry is not a tool use".to_string()) + } + } else { + Err(format!("Entry with id {} not found", entry_id)) + } + } + + /// アノテーションを追加 + pub fn add_annotation( + &mut self, + entry_id: &str, + annotation: UserAnnotation, + ) -> Result<(), String> { + if let Some(entry) = self.display_history.iter_mut().find(|e| match e { + DisplayEntry::UserMessage { id, .. } => id == entry_id, + _ => false, + }) { + if let DisplayEntry::UserMessage { + user_annotations, .. + } = entry + { + if user_annotations.is_none() { + *user_annotations = Some(Vec::new()); + } + user_annotations.as_mut().unwrap().push(annotation); + self.modified = Utc::now(); + Ok(()) + } else { + Err("Entry is not a user message".to_string()) + } + } else { + Err(format!("Entry with id {} not found", entry_id)) + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct WorkspaceConfig { + pub workspaces: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct WorkspaceData { + pub sessions: Vec, + pub trusted: bool, + pub message_history: Vec, +} + +impl SessionData { + pub fn update_modified(&mut self) { + self.modified = Utc::now(); + } + + pub fn set_title(&mut self, title: String) { + self.title = Some(title); + self.update_modified(); + } +} + +// Hook system types +#[derive(Debug, Clone)] +pub struct HookContext { + pub content: String, + pub workspace_path: String, + pub message_history: Vec, + pub tools: Vec, + pub variables: HashMap, +} + +impl HookContext { + pub fn new( + content: String, + workspace_path: String, + message_history: Vec, + tools: Vec, + ) -> Self { + Self { + content, + workspace_path, + message_history, + tools, + variables: HashMap::new(), + } + } + + /// Workerの実行コンテキストを設定(内部用) + pub fn with_worker_executor(self, _executor: F) -> Self + where + F: Fn( + &str, + serde_json::Value, + ) -> std::pin::Pin< + Box< + dyn std::future::Future> + Send + '_, + >, + > + Send + + Sync + + 'static, + { + // 将来的にWorkerのツール実行機能を統合するためのプレースホルダー + self + } + + /// ワークスペースでコマンドを実行する + pub async fn run_command(&self, command: &str) -> Result { + use std::process::Command; + + let output = Command::new("sh") + .arg("-c") + .arg(command) + .current_dir(&self.workspace_path) + .output() + .map_err(|e| format!("Failed to execute command: {}", e))?; + + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(String::from_utf8_lossy(&output.stderr).to_string()) + } + } + + /// その場でツールを利用し、結果をコンテキストに残す + pub async fn run_tool( + &mut self, + _tool_name: &str, + _args: serde_json::Value, + ) -> Result { + // Note: 実際の実装では、Workerのexecute_toolメソッドを呼び出す必要があります + // ここではプレースホルダーとして定義しています + Err("Tool execution not implemented in HookContext yet".to_string()) + } + + /// 純粋にユーザーの送信としてコンテキストにメッセージを追加する + pub fn add_message(&mut self, content: String, role: Role) { + self.message_history.push(Message::new(role, content)); + } + + /// コンテンツを書き換える + pub fn set_content(&mut self, new_content: String) { + self.content = new_content; + } + + /// 変数を設定する + pub fn set_variable(&mut self, key: String, value: String) { + self.variables.insert(key, value); + } + + /// 変数を取得する + pub fn get_variable(&self, key: &str) -> Option<&String> { + self.variables.get(key) + } +} + +#[derive(Debug, Clone)] +pub enum HookResult { + /// 処理を続行する + Continue, + /// コンテンツを変更して続行する + ModifyContent(String), + /// メッセージを追加して続行する + AddMessage(String, Role), + /// ユーザーメッセージの前にメッセージを挿入する + AddPreMessage(String, Role), + /// ユーザーメッセージの後にメッセージを挿入する + AddPostMessage(String, Role), + /// ターンをその場で完了させる + Complete, + /// エラーを表示してターンを終了する + Error(String), +} + +#[async_trait::async_trait] +pub trait WorkerHook: Send + Sync { + /// フック名を返す + fn name(&self) -> &str; + + /// フックのタイプを返す(OnTurnCompleted, OnMessageSend, PreToolUse, PostToolUse等) + fn hook_type(&self) -> &str; + + /// マッチャーパターンを返す(ツール名のパターンなど) + fn matcher(&self) -> &str; + + /// フックを実行する + async fn execute(&self, context: HookContext) -> (HookContext, HookResult); +} + +/// フックのイベントタイプ +#[derive(Debug, Clone, PartialEq)] +pub enum HookEvent { + OnMessageSend, + PreToolUse(String), // ツール名 + PostToolUse(String), // ツール名 + OnTurnCompleted, +} + +/// フックマネージャー +pub struct HookManager { + hooks: Vec>, +} + +impl HookManager { + pub fn new() -> Self { + Self { hooks: Vec::new() } + } + + /// フックを登録する + pub fn register_hook(&mut self, hook: Box) { + self.hooks.push(hook); + } + + /// 複数のフックを一度に登録する + pub fn register_hooks(&mut self, hooks: Vec>) { + self.hooks.extend(hooks); + } + + /// 指定されたイベントタイプに対応するフックを実行する + pub async fn execute_hooks( + &self, + event: HookEvent, + mut context: HookContext, + ) -> (HookContext, Vec, Vec<(String, String, Role)>) { + tracing::info!( + "HookManager::execute_hooks called with event: {:?}, total hooks: {}", + event, + self.hooks.len() + ); + let mut results = Vec::new(); + let mut additional_messages = Vec::new(); + + // ツール名をコンテキストに設定 + match &event { + HookEvent::PreToolUse(tool_name) | HookEvent::PostToolUse(tool_name) => { + context.set_variable("current_tool".to_string(), tool_name.clone()); + } + _ => {} + } + + for (i, hook) in self.hooks.iter().enumerate() { + // フックタイプが一致するかチェック + let should_execute = match &event { + HookEvent::OnMessageSend => hook.hook_type() == "OnMessageSend", + HookEvent::PreToolUse(tool_name) => { + hook.hook_type() == "PreToolUse" + && self.matches_pattern(hook.matcher(), tool_name) + } + HookEvent::PostToolUse(tool_name) => { + hook.hook_type() == "PostToolUse" + && self.matches_pattern(hook.matcher(), tool_name) + } + HookEvent::OnTurnCompleted => { + hook.hook_type() == "OnTurnCompleted" + || (hook.hook_type() == "OnTurnCompleted" && hook.matcher().is_empty()) + } + }; + + tracing::debug!( + "Hook {}: name='{}', type='{}', matcher='{}', should_execute={}", + i, + hook.name(), + hook.hook_type(), + hook.matcher(), + should_execute + ); + + if should_execute { + tracing::info!( + "Executing hook '{}' (type: {})", + hook.name(), + hook.hook_type() + ); + + // フック実行前のメッセージ数を記録 + let before_message_count = context.message_history.len(); + tracing::debug!( + "Before hook execution: message_count={}", + before_message_count + ); + + // フックを実行(コンテキストを消費し、変更されたコンテキストと結果を取得) + let hook_context = context.clone(); + let (updated_context, result) = hook.execute(hook_context).await; + tracing::info!("Hook '{}' returned result: {:?}", hook.name(), result); + + // フック実行後のメッセージ数を確認 + let after_message_count = updated_context.message_history.len(); + tracing::debug!( + "After hook execution: message_count={}", + after_message_count + ); + + // フックで追加されたメッセージを検出して記録 + if after_message_count > before_message_count { + for new_message in updated_context + .message_history + .iter() + .skip(before_message_count) + { + tracing::info!( + "Hook '{}' added message: {} chars, role={:?}", + hook.name(), + new_message.content.len(), + new_message.role + ); + // TUIストリームイベント用に記録 + additional_messages.push(( + hook.name().to_string(), + new_message.content.clone(), + new_message.role.clone(), + )); + } + } + + // フックで変更されたコンテキストを元のコンテキストに反映 + context = updated_context; + + // 結果に応じてコンテキストを更新 + match &result { + HookResult::ModifyContent(new_content) => { + context.set_content(new_content.clone()); + } + HookResult::AddMessage(message, role) => { + context.add_message(message.clone(), role.clone()); + additional_messages.push(( + hook.name().to_string(), + message.clone(), + role.clone(), + )); + } + HookResult::AddPreMessage(message, role) + | HookResult::AddPostMessage(message, role) => { + // PreとPostメッセージは後で処理するため、ここではresultsに追加するだけ + additional_messages.push(( + hook.name().to_string(), + message.clone(), + role.clone(), + )); + } + HookResult::Complete | HookResult::Error(_) => { + results.push(result); + break; // これ以上のフックは実行しない + } + _ => {} + } + + results.push(result); + } + } + + tracing::info!( + "HookManager::execute_hooks completed. Total results: {}, final message count: {}, additional_messages: {}", + results.len(), + context.message_history.len(), + additional_messages.len() + ); + + (context, results, additional_messages) + } + + /// パターンマッチングを行う(簡単な実装) + fn matches_pattern(&self, pattern: &str, target: &str) -> bool { + if pattern.is_empty() { + return true; + } + + // "|" で区切られたパターンをチェック + pattern + .split('|') + .any(|p| p.trim() == target || target.contains(p.trim())) + } + + /// 登録されているフックの数を取得 + pub fn hook_count(&self) -> usize { + self.hooks.len() + } + + /// 登録されているフックの一覧を取得 + pub fn list_hooks(&self) -> Vec<(&str, &str, &str)> { + self.hooks + .iter() + .map(|hook| (hook.name(), hook.hook_type(), hook.matcher())) + .collect() + } +} diff --git a/worker/Cargo.toml b/worker/Cargo.toml new file mode 100644 index 0000000..dcc9ca5 --- /dev/null +++ b/worker/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "worker" +version = "0.1.0" +edition = "2024" + +[dependencies] +worker-types = { path = "../worker-types" } +worker-macros = { path = "../worker-macros" } +schemars = "1.0.3" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tokio = { version = "1", features = ["full"] } +anyhow = "1.0" +reqwest = { version = "0.11", default-features = false, features = [ + "json", + "rustls-tls", + "stream", +] } +toml = "0.8" +thiserror = "2.0.12" +futures-util = "0.3" +async-stream = "0.3" +bytes = "1" +async-trait = "0.1.88" +serde_yaml = "0.9.33" +log = "0.4" +dirs = "6.0.0" +strum = { version = "0.27.1", features = ["derive"] } +strum_macros = "0.27.1" +tracing = "0.1.40" +eventsource-stream = "0.2.3" +xdg = "3.0.0" +chrono = { version = "0.4", features = ["serde"] } +handlebars = "5.1.2" +regex = "1.10.2" +uuid = { version = "1.10", features = ["v4", "serde"] } +tokio-util = { version = "0.7", features = ["codec"] } +futures = "0.3" + +[dev-dependencies] +tempfile = "3.10.1" +tracing-subscriber = "0.3" diff --git a/worker/README.md b/worker/README.md new file mode 100644 index 0000000..94e5053 --- /dev/null +++ b/worker/README.md @@ -0,0 +1,150 @@ +# `worker` クレート + +`worker` クレートは、大規模言語モデル (LLM) を利用したアプリケーションのバックエンド機能を提供するコアコンポーネントです。LLMプロバイダーの抽象化、ツール利用、柔軟なプロンプト管理、フックシステムなど、高度な機能をカプセル化し、アプリケーション開発を簡素化します。 + +## 主な機能 + +- **マルチプロバイダー対応**: Gemini, Claude, OpenAI, Ollama, XAIなど、複数のLLMプロバイダーを統一されたインターフェースで利用できます。 +- **ツール利用 (Function Calling)**: LLMが外部ツールを呼び出す機能をサポートします。独自のツールを簡単に定義して `Worker` に登録できます。 +- **ストリーミング処理**: LLMの応答やツール実行結果を `StreamEvent` として非同期に受け取ることができます。これにより、リアルタイムなUI更新が可能になります。 +- **フックシステム**: `Worker` の処理フローの特定のタイミング(例: メッセージ送信前、ツール使用後)にカスタムロジックを介入させることができます。 +- **セッション管理**: 会話履歴やワークスペースの状態を管理し、永続化する機能を提供します。 +- **柔軟なプロンプト管理**: 設定ファイルを用いて、ロールやコンテキストに応じたシステムプロンプトを動的に構築します。 + +## 主な概念 + +### `Worker` +このクレートの中心的な構造体です。LLMとの対話、ツールの登録と実行、セッション管理など、すべての主要な機能を担当します。 + +### `LlmProvider` +サポートしているLLMプロバイダー(`Gemini`, `Claude`, `OpenAI` など)を表すenumです。 + +### `Tool` トレイト +`Worker` が利用できるツールを定義するためのインターフェースです。このトレイトを実装することで、任意の機能をツールとして `Worker` に追加できます。 + +```rust +pub trait Tool: Send + Sync { + fn name(&self) -> &str; + fn description(&self) -> &str; + fn parameters_schema(&self) -> serde_json::Value; + async fn execute(&self, args: serde_json::Value) -> Result; +} +``` + +### `WorkerHook` トレイト +`Worker` のライフサイクルイベントに介入するためのフックを定義するインターフェースです。特定のイベント(例: `OnMessageSend`, `PostToolUse`)に対して処理を追加できます。 + +### `StreamEvent` +`Worker` の処理結果を非同期ストリームで受け取るためのenumです。LLMの応答チャンク、ツール呼び出し、エラーなど、さまざまなイベントを表します。 + +## アプリケーションへの組み込み方法 + +### 1. Workerの初期化 + +まず、`Worker` のインスタンスを作成します。これには `LlmProvider`、モデル名、APIキーが必要です。 + +```rust +use worker::{Worker, LlmProvider}; +use std::collections::HashMap; + +// APIキーを準備 +let mut api_keys = HashMap::new(); +api_keys.insert("openai".to_string(), "your_openai_api_key".to_string()); +api_keys.insert("claude".to_string(), "your_claude_api_key".to_string()); + +// Workerを作成 +let mut worker = Worker::new( + LlmProvider::OpenAI, + "gpt-4o", + &api_keys, + None // RoleConfigはオプション +).expect("Workerの作成に失敗しました"); +``` + +### 2. ツールの定義と登録 + +`Tool` トレイトを実装してカスタムツールを作成し、`Worker` に登録します。 + +```rust +use worker::{Tool, ToolResult}; +use worker::schemars::{self, JsonSchema}; +use worker::serde_json::{self, json, Value}; +use async_trait::async_trait; + +// ツールの引数を定義 +#[derive(Debug, serde::Deserialize, JsonSchema)] +struct FileSystemToolArgs { + path: String, +} + +// カスタムツールを定義 +struct ListFilesTool; + +#[async_trait] +impl Tool for ListFilesTool { + fn name(&self) -> &str { "list_files" } + fn description(&self) -> &str { "指定されたパスのファイル一覧を表示します" } + + fn parameters_schema(&self) -> Value { + serde_json::to_value(schemars::schema_for!(FileSystemToolArgs)).unwrap() + } + + async fn execute(&self, args: Value) -> ToolResult { + let tool_args: FileSystemToolArgs = serde_json::from_value(args)?; + // ここで実際のファイル一覧取得処理を実装 + let files = vec!["file1.txt", "file2.txt"]; + Ok(json!({ "files": files })) + } +} + +// 作成したツールをWorkerに登録 +worker.register_tool(Box::new(ListFilesTool)).unwrap(); +``` + +### 3. 対話処理の実行 + +`process_task_with_history` メソッドを呼び出して、ユーザーメッセージを処理します。このメソッドはイベントのストリームを返します。 + +```rust +use futures_util::StreamExt; + +let user_message = "カレントディレクトリのファイルを教えて".to_string(); + +let mut stream = worker.process_task_with_history(user_message, None).await; + +while let Some(event_result) = stream.next().await { + match event_result { + Ok(event) => { + // StreamEventに応じた処理 + match event { + worker::StreamEvent::Chunk(chunk) => { + print!("{}", chunk); + } + worker::StreamEvent::ToolCall(tool_call) => { + println!("\n[Tool Call: {} with args {}]", tool_call.name, tool_call.arguments); + } + worker::StreamEvent::ToolResult { tool_name, result } => { + println!("\n[Tool Result: {} -> {:?}]", tool_name, result); + } + _ => {} + } + } + Err(e) => { + eprintln!("\n[Error: {}]", e); + break; + } + } +} +``` + +### 4. (オプション) フックの登録 + +`WorkerHook` トレイトを実装してカスタムフックを作成し、`Worker` に登録することで、処理フローをカスタマイズできます。 + +```rust +// (WorkerHookの実装は省略) +// let my_hook = MyCustomHook::new(); +// worker.register_hook(Box::new(my_hook)); +``` + +これで、アプリケーションの要件に応じて `Worker` を中心とした強力なLLM連携機能を構築できます。 diff --git a/worker/src/config_parser.rs b/worker/src/config_parser.rs new file mode 100644 index 0000000..7fd145d --- /dev/null +++ b/worker/src/config_parser.rs @@ -0,0 +1,110 @@ +use crate::prompt_types::*; +use std::fs; +use std::path::Path; + +/// 設定ファイルのパーサー +pub struct ConfigParser; + +impl ConfigParser { + /// YAML設定ファイルを読み込んでパースする + pub fn parse_from_file>(path: P) -> Result { + let content = fs::read_to_string(path.as_ref()).map_err(|e| { + PromptError::FileNotFound(format!("{}: {}", path.as_ref().display(), e)) + })?; + + Self::parse_from_string(&content) + } + + /// YAML文字列をパースしてPromptRoleConfigに変換する + pub fn parse_from_string(content: &str) -> Result { + let config: PromptRoleConfig = serde_yaml::from_str(content)?; + + // 基本的なバリデーション + Self::validate_config(&config)?; + + Ok(config) + } + + /// 設定ファイルの基本的なバリデーション + fn validate_config(config: &PromptRoleConfig) -> Result<(), PromptError> { + if config.name.is_empty() { + return Err(PromptError::VariableResolution( + "name field cannot be empty".to_string(), + )); + } + + if config.template.is_empty() { + return Err(PromptError::TemplateCompilation( + "template field cannot be empty".to_string(), + )); + } + + // パーシャルのパス検証 + if let Some(partials) = &config.partials { + for (name, partial) in partials { + if partial.path.is_empty() { + return Err(PromptError::PartialLoading(format!( + "partial '{}' has empty path", + name + ))); + } + } + } + + Ok(()) + } + + /// パスプレフィックスを解決する + pub fn resolve_path(path_str: &str) -> Result { + if path_str.starts_with("#nia/") { + // 組み込みリソース + let relative_path = path_str.strip_prefix("#nia/").unwrap(); + let project_root = std::env::current_dir() + .map_err(|e| PromptError::WorkspaceDetection(e.to_string()))?; + + // 優先順位: ./resources > ./nia-cli/resources > ../nia-cli/resources + let possible_paths = [ + project_root.join("resources").join(relative_path), + project_root + .join("nia-cli") + .join("resources") + .join(relative_path), + project_root + .parent() + .unwrap_or(&project_root) + .join("nia-cli") + .join("resources") + .join(relative_path), + ]; + + for path in &possible_paths { + if path.exists() { + return Ok(path.clone()); + } + } + + // 見つからない場合はデフォルトのパスを返す + Ok(project_root + .join("nia-cli") + .join("resources") + .join(relative_path)) + } else if path_str.starts_with("#workspace/") { + // ワークスペース固有 + let relative_path = path_str.strip_prefix("#workspace/").unwrap(); + let project_root = std::env::current_dir() + .map_err(|e| PromptError::WorkspaceDetection(e.to_string()))?; + Ok(project_root.join(".nia").join(relative_path)) + } else if path_str.starts_with("#user/") { + // ユーザー設定 + let relative_path = path_str.strip_prefix("#user/").unwrap(); + let base_dirs = xdg::BaseDirectories::with_prefix("nia"); + let config_home = base_dirs.get_config_home().ok_or_else(|| { + PromptError::WorkspaceDetection("Could not determine XDG config home".to_string()) + })?; + Ok(config_home.join(relative_path)) + } else { + // 相対パスまたは絶対パス + Ok(std::path::PathBuf::from(path_str)) + } + } +} diff --git a/worker/src/lib.rs b/worker/src/lib.rs new file mode 100644 index 0000000..b21c081 --- /dev/null +++ b/worker/src/lib.rs @@ -0,0 +1,2090 @@ +use crate::prompt_composer::PromptComposer; +use crate::prompt_types::*; +use crate::workspace_detector::WorkspaceDetector; +use async_stream::stream; +use futures_util::{Stream, StreamExt}; +use llm::{ + anthropic::AnthropicClient, gemini::GeminiClient, ollama::OllamaClient, openai::OpenAIClient, + xai::XAIClient, +}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::fs; +use std::path::PathBuf; +use thiserror::Error; +use tracing; +use uuid; +pub use worker_types::{ + DynamicToolDefinition, HookContext, HookEvent, HookManager, HookResult, LlmDebug, LlmProvider, + LlmResponse, Message, ModelInfo, PartialConfig, PromptComponent, PromptComponentDetail, + PromptConfig, Role, RoleConfig, SessionData, StreamEvent, Task, Tool, ToolCall, ToolResult, + WorkerHook, WorkspaceConfig, WorkspaceData, +}; + +pub mod config_parser; +pub mod llm; +pub mod mcp_config; +pub mod mcp_protocol; +pub mod mcp_tool; +pub mod prompt_composer; +pub mod prompt_types; +pub mod types; +pub mod url_config; +pub mod workspace_detector; + +#[cfg(test)] +mod tests { + mod config_tests; + // mod integration_tests; // Temporarily disabled due to missing dependencies +} + +// Re-export for tool macros +pub use schemars; +pub use serde_json; + +// Re-export MCP functionality +pub use mcp_config::{IntegrationMode, McpConfig, McpServerDefinition}; +pub use mcp_tool::{ + McpDynamicTool, McpServerConfig, SingleMcpTool, create_single_mcp_tools, + get_mcp_tools_as_definitions, test_mcp_connection, +}; + +pub fn generate_tools_schema(provider: &LlmProvider, tools: &[Box]) -> serde_json::Value { + generate_tools_schema_from_definitions( + provider, + &tools + .iter() + .map(|tool| DynamicToolDefinition { + name: tool.name().to_string(), + description: tool.description().to_string(), + parameters_schema: tool.parameters_schema(), + }) + .collect::>(), + ) +} + +/// ツール定義からスキーマを生成する +fn generate_tools_schema_from_definitions( + provider: &LlmProvider, + tool_definitions: &[DynamicToolDefinition], +) -> serde_json::Value { + let tool_definitions: Vec = tool_definitions + .iter() + .map(|tool| { + let parameters_schema = tool.parameters_schema.clone(); + match provider { + LlmProvider::Gemini => { + serde_json::json!({ + "name": tool.name, + "description": tool.description, + "parameters": parameters_schema + }) + } + LlmProvider::Claude => { + serde_json::json!({ + "name": tool.name, + "description": tool.description, + "input_schema": parameters_schema + }) + } + LlmProvider::OpenAI | LlmProvider::Ollama | LlmProvider::XAI => { + serde_json::json!({ + "type": "function", + "function": { + "name": tool.name, + "description": tool.description, + "parameters": parameters_schema + } + }) + } + } + }) + .collect(); + + match provider { + LlmProvider::Gemini => { + serde_json::json!({ + "tools": [{ + "functionDeclarations": tool_definitions + }] + }) + } + _ => { + serde_json::json!(tool_definitions) + } + } +} + +pub use crate::types::WorkerError; + +impl WorkerError { + /// Check if this error is likely an authentication/API key error + pub fn is_authentication_error(&self) -> bool { + matches!(self, WorkerError::General(_)) + } + + /// Convert a generic error to a WorkerError, detecting authentication issues + pub fn from_api_error(error: String, provider: &LlmProvider) -> Self { + if Self::is_likely_auth_error(&error, provider) { + WorkerError::Config(error) + } else { + WorkerError::Network(error) + } + } + + /// Comprehensive authentication error detection + /// Many APIs return 400 Bad Request for invalid API keys instead of proper 401/403 + fn is_likely_auth_error(error_msg: &str, provider: &LlmProvider) -> bool { + let error_msg = error_msg.to_lowercase(); + tracing::debug!( + "is_likely_auth_error: Checking error message for provider {:?}: {}", + provider, + error_msg + ); + + // Standard auth error codes + let has_auth_status = error_msg.contains("unauthorized") + || error_msg.contains("forbidden") + || error_msg.contains("401") + || error_msg.contains("403"); + + // API key related error messages + let has_api_key_error = error_msg.contains("api key") + || error_msg.contains("invalid key") + || error_msg.contains("authentication") + || error_msg.contains("token"); + + // Bad request that might be auth related + let has_bad_request = error_msg.contains("400") || error_msg.contains("bad request"); + + // Common API key error patterns (case insensitive) + let has_key_patterns = error_msg.contains("incorrect api key") + || error_msg.contains("invalid api key") + || error_msg.contains("api key not found") + || error_msg.contains("missing api key") + || error_msg.contains("api key required") + || error_msg.contains("authentication failed") + || error_msg.contains("access denied") + || error_msg.contains("insufficient permissions") + || error_msg.contains("quota exceeded") + || error_msg.contains("rate limit") + || error_msg.contains("expired") + || error_msg.contains("revoked") + || error_msg.contains("suspended") + // Generic "invalid" but exclude credit balance specific messages + || (error_msg.contains("invalid") && !error_msg.contains("credit balance")); + + // Exclude credit balance errors - these are not authentication errors + let is_credit_balance_error = error_msg.contains("credit balance") + || error_msg.contains("billing") + || error_msg.contains("upgrade") + || error_msg.contains("purchase credits") + // Also exclude Anthropic's specific credit balance error pattern + || (error_msg.contains("invalid_request_error") && error_msg.contains("credit balance")); + + // Provider-specific patterns + let has_provider_patterns = match provider { + LlmProvider::OpenAI => { + error_msg.contains("invalid_api_key") + || (error_msg.contains("invalid_request_error") + && !error_msg.contains("credit balance")) + } + LlmProvider::Claude => { + // Anthropic specific auth error patterns + (error_msg.contains("invalid_x_api_key") || error_msg.contains("x-api-key")) + // But exclude credit balance issues which are not auth errors + && !error_msg.contains("credit balance") + && !error_msg.contains("billing") + && !error_msg.contains("upgrade") + && !error_msg.contains("purchase credits") + } + LlmProvider::Gemini => { + error_msg.contains("invalid_argument") || error_msg.contains("credentials") + } + LlmProvider::Ollama => false, // Ollama typically doesn't have API keys + LlmProvider::XAI => { + error_msg.contains("invalid_api_key") || error_msg.contains("unauthorized") + } + }; + + // Generic patterns + let has_generic_patterns = + error_msg.contains("credentials") || error_msg.contains("authorization"); + + // Provider-specific bad request handling + // Some providers return 400 for auth issues instead of proper status codes + let provider_specific_bad_request = match provider { + LlmProvider::OpenAI => { + has_bad_request + && (error_msg.contains("invalid") + || error_msg.contains("api") + || error_msg.contains("key")) + } + LlmProvider::Claude => { + has_bad_request + && (error_msg.contains("invalid") + || error_msg.contains("x-api-key") + || error_msg.contains("authentication")) + } + LlmProvider::Gemini => { + has_bad_request + && (error_msg.contains("invalid") + || error_msg.contains("credentials") + || error_msg.contains("key")) + } + LlmProvider::Ollama => false, + LlmProvider::XAI => { + has_bad_request + && (error_msg.contains("invalid") + || error_msg.contains("api") + || error_msg.contains("key") + || error_msg.contains("unauthorized")) + } + }; + + // If it's a bad request with auth-related keywords, treat as auth error + // This handles APIs that incorrectly return 400 for auth issues + // But exclude credit balance errors which are not authentication issues + let result = (has_auth_status + || has_api_key_error + || has_key_patterns + || has_provider_patterns + || has_generic_patterns + || provider_specific_bad_request) + && !is_credit_balance_error; + + tracing::debug!( + "is_likely_auth_error result: {} (credit_balance_error: {})", + result, + is_credit_balance_error + ); + result + } +} + +pub trait LlmProviderExt { + fn create_client(&self, model_name: &str, api_key: &str) -> Result; +} + +impl LlmProviderExt for LlmProvider { + fn create_client(&self, model_name: &str, api_key: &str) -> Result { + match self { + LlmProvider::Gemini => Ok(LlmClient::Gemini(GeminiClient::new(api_key, model_name))), + LlmProvider::Claude => Ok(LlmClient::Anthropic(AnthropicClient::new( + api_key, model_name, + ))), + LlmProvider::Ollama => Ok(LlmClient::Ollama(OllamaClient::new_with_key( + api_key, model_name, + ))), + LlmProvider::OpenAI => Ok(LlmClient::OpenAI(OpenAIClient::new(api_key, model_name))), + LlmProvider::XAI => Ok(LlmClient::XAI(XAIClient::new(api_key, model_name))), + } + } +} + +pub fn get_supported_providers() -> Vec { + vec![ + LlmProvider::Gemini.to_string(), + LlmProvider::Claude.to_string(), + LlmProvider::Ollama.to_string(), + LlmProvider::OpenAI.to_string(), + LlmProvider::XAI.to_string(), + ] +} + +/// Validate if a provider name is supported +pub fn is_provider_supported(provider_name: &str) -> bool { + LlmProvider::from_str(provider_name).is_some() +} + +/// Validate API key for a specific provider +/// Returns: +/// - Some(true): API key is valid +/// - Some(false): API key is invalid +/// - None: Unable to validate (e.g., no official validation endpoint) +pub async fn validate_api_key( + provider: LlmProvider, + api_key: &str, +) -> Result, WorkerError> { + if !provider.requires_api_key() { + return Ok(Some(true)); + } + + if api_key.is_empty() { + return Ok(Some(false)); + } + + // Only perform validation if provider has a simple, official validation method + match provider { + LlmProvider::Claude => { + // Anthropic doesn't have a dedicated validation endpoint + // Simple format check: should start with "sk-ant-" + if api_key.starts_with("sk-ant-") && api_key.len() > 20 { + tracing::debug!("validate_api_key: Anthropic API key format appears valid"); + Ok(None) // Cannot validate without making a request + } else { + tracing::debug!("validate_api_key: Anthropic API key format is invalid"); + Ok(Some(false)) + } + } + LlmProvider::OpenAI => { + // OpenAI: simple format check + if api_key.starts_with("sk-") && api_key.len() > 20 { + tracing::debug!("validate_api_key: OpenAI API key format appears valid"); + Ok(None) // Cannot validate without making a request + } else { + tracing::debug!("validate_api_key: OpenAI API key format is invalid"); + Ok(Some(false)) + } + } + LlmProvider::Gemini => { + // Gemini: simple format check + if api_key.len() > 20 { + tracing::debug!("validate_api_key: Gemini API key format appears valid"); + Ok(None) // Cannot validate without making a request + } else { + tracing::debug!("validate_api_key: Gemini API key format is invalid"); + Ok(Some(false)) + } + } + LlmProvider::Ollama => { + // Ollama typically doesn't require API keys + Ok(Some(true)) + } + LlmProvider::XAI => { + // xAI: simple format check + if api_key.starts_with("xai-") && api_key.len() > 20 { + tracing::debug!("validate_api_key: xAI API key format appears valid"); + Ok(None) // Cannot validate without making a request + } else { + tracing::debug!("validate_api_key: xAI API key format is invalid"); + Ok(Some(false)) + } + } + } +} + +// Models configuration structures +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ModelsConfig { + pub models: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ModelDefinition { + pub model: String, + pub name: String, + pub meta: ModelMeta, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ModelMeta { + pub tool_support: bool, + pub function_calling: bool, + pub vision: bool, + pub multimodal: bool, + pub context_length: Option, + pub description: Option, +} + +// Get models config path +fn get_models_config_path() -> Result { + let home_dir = dirs::home_dir().ok_or_else(|| { + WorkerError::ConfigurationError("Could not determine home directory".to_string()) + })?; + Ok(home_dir.join(".config").join("nia").join("models.yaml")) +} + +// Load models configuration +fn load_models_config() -> Result { + let config_path = get_models_config_path()?; + + if !config_path.exists() { + tracing::warn!( + "Models config file not found at {:?}, using defaults", + config_path + ); + return Ok(ModelsConfig { models: vec![] }); + } + + let content = fs::read_to_string(&config_path).map_err(|e| { + WorkerError::ConfigurationError(format!("Failed to read models config: {}", e)) + })?; + + let config: ModelsConfig = serde_yaml::from_str(&content).map_err(|e| { + WorkerError::ConfigurationError(format!("Failed to parse models config: {}", e)) + })?; + + Ok(config) +} + +// Tool support detection using configuration +pub async fn supports_native_tools( + provider: &LlmProvider, + model_name: &str, + _api_key: &str, +) -> Result { + // Load models configuration + let config = load_models_config()?; + + // Look for the specific model in configuration + let model_id = format!( + "{}/{}", + match provider { + LlmProvider::Claude => "anthropic", + LlmProvider::OpenAI => "openai", + LlmProvider::Gemini => "gemini", + LlmProvider::Ollama => "ollama", + LlmProvider::XAI => "xai", + }, + model_name + ); + + // Find model in config and check function_calling setting + for model_def in &config.models { + if model_def.model == model_id || model_def.model.contains(model_name) { + tracing::debug!( + "Found model config: model={}, function_calling={}", + model_def.model, + model_def.meta.function_calling + ); + return Ok(model_def.meta.function_calling); + } + } + + tracing::warn!( + "Model not found in config: {} ({}), using provider defaults", + model_id, + model_name + ); + + // Fallback to provider-based detection if model not found in config + // But prioritize setting over provider defaults + tracing::warn!( + "Using provider-based fallback - this should be configured in models.yaml: provider={:?}, model={}", + provider, + model_name + ); + + let supports_tools = match provider { + LlmProvider::Claude => true, + LlmProvider::OpenAI => !model_name.contains("gpt-3.5-turbo-instruct"), + LlmProvider::Gemini => !model_name.contains("gemini-pro-vision"), + LlmProvider::Ollama => false, // Default to XML-based tools for Ollama + LlmProvider::XAI => true, + }; + + tracing::debug!( + "Fallback tool support check: provider={:?}, model={}, supports_tools={}", + provider, + model_name, + supports_tools + ); + Ok(supports_tools) +} + +// LlmClient trait - 共通インターフェース +#[async_trait::async_trait] +pub trait LlmClientTrait: Send + Sync { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + >; + + async fn check_connection(&self) -> Result<(), WorkerError>; + + fn provider(&self) -> LlmProvider; + + fn get_model_name(&self) -> String; +} + +// LlmClient enumを使用してdyn互換性の問題を解決 +pub enum LlmClient { + Anthropic(AnthropicClient), + Gemini(GeminiClient), + Ollama(OllamaClient), + OpenAI(OpenAIClient), + XAI(XAIClient), +} + +// LlmClient enumに対するメソッド実装を削除 +// 代わりにLlmClientTraitの実装のみを使用 + +// 委譲マクロでボイラープレートを削減 +macro_rules! delegate_to_client { + // 引数ありの場合 + ($self:expr, $method:ident, $($arg:expr),+) => { + match $self { + LlmClient::Anthropic(client) => client.$method($($arg),*), + LlmClient::Gemini(client) => client.$method($($arg),*), + LlmClient::Ollama(client) => client.$method($($arg),*), + LlmClient::OpenAI(client) => client.$method($($arg),*), + LlmClient::XAI(client) => client.$method($($arg),*), + } + }; + // 引数なしの場合 + ($self:expr, $method:ident) => { + match $self { + LlmClient::Anthropic(client) => client.$method(), + LlmClient::Gemini(client) => client.$method(), + LlmClient::Ollama(client) => client.$method(), + LlmClient::OpenAI(client) => client.$method(), + LlmClient::XAI(client) => client.$method(), + } + }; +} + +// LlmClient enum にtraitを実装 - マクロで簡潔な委譲 +#[async_trait::async_trait] +impl LlmClientTrait for LlmClient { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + match self { + LlmClient::Anthropic(client) => client.chat_stream(messages, tools, llm_debug).await, + LlmClient::Gemini(client) => client.chat_stream(messages, tools, llm_debug).await, + LlmClient::Ollama(client) => client.chat_stream(messages, tools, llm_debug).await, + LlmClient::OpenAI(client) => client.chat_stream(messages, tools, llm_debug).await, + LlmClient::XAI(client) => client.chat_stream(messages, tools, llm_debug).await, + } + } + + async fn check_connection(&self) -> Result<(), WorkerError> { + match self { + LlmClient::Anthropic(client) => client.check_connection().await, + LlmClient::Gemini(client) => client.check_connection().await, + LlmClient::Ollama(client) => client.check_connection().await, + LlmClient::OpenAI(client) => client.check_connection().await, + LlmClient::XAI(client) => client.check_connection().await, + } + } + + fn provider(&self) -> LlmProvider { + delegate_to_client!(self, provider) + } + + fn get_model_name(&self) -> String { + delegate_to_client!(self, get_model_name) + } +} + +pub struct Worker { + llm_client: Box, + composer: PromptComposer, + tools: Vec>, + api_key: String, + provider_str: String, + model_name: String, + role_config: Option, + config: Option, + workspace_context: Option, + message_history: Vec, + hook_manager: crate::types::HookManager, + mcp_lazy_configs: Vec, +} + +impl Worker { + pub fn new( + provider: LlmProvider, + model_name: &str, + api_keys: &HashMap, + role_config: Option, + ) -> Result { + let provider_str = provider.as_str(); + let api_key = api_keys.get(provider_str).cloned().unwrap_or_default(); + let llm_client = provider.create_client(model_name, &api_key)?; + + // ワークスペースコンテキストを取得 + let workspace_context = WorkspaceDetector::detect_workspace().ok(); + + // プロンプトコンテキストを作成 + let prompt_context = Self::create_prompt_context_static( + &workspace_context, + provider.clone(), + model_name, + &[], + ); + + // デフォルト設定またはcli-assistant設定を使用 + let composer = match Self::try_load_default_config(prompt_context.clone()) { + Ok(composer) => { + tracing::info!("Loaded default CLI assistant configuration"); + composer + } + Err(e) => { + tracing::warn!("Failed to load default config, using fallback: {}", e); + let default_config = PromptRoleConfig::default(); + PromptComposer::from_config(default_config, prompt_context) + .map_err(|e| WorkerError::ConfigurationError(e.to_string()))? + } + }; + + let mut worker = Self { + llm_client: Box::new(llm_client), + composer, + tools: Vec::new(), + api_key, + provider_str: provider_str.to_string(), + model_name: model_name.to_string(), + role_config, + config: None, + workspace_context, + message_history: Vec::new(), + hook_manager: crate::types::HookManager::new(), + mcp_lazy_configs: Vec::new(), + }; + + // セッション開始時のシステムプロンプト初期化 + worker + .initialize_session() + .map_err(|e| WorkerError::ConfigurationError(e.to_string()))?; + + Ok(worker) + } + + /// ツールリストをロードする + pub fn load_tools(&mut self, tools: Vec>) -> Result<(), WorkerError> { + self.tools.extend(tools); + tracing::info!("Loaded {} tools", self.tools.len()); + Ok(()) + } + + /// フックを登録する + pub fn register_hook(&mut self, hook: Box) { + let hook_name = hook.name().to_string(); + self.hook_manager.register_hook(hook); + tracing::info!("Registered hook: {}", hook_name); + } + + /// 複数のフックを一度に登録する + pub fn register_hooks(&mut self, hooks: Vec>) { + tracing::info!("Registering {} hooks", hooks.len()); + self.hook_manager.register_hooks(hooks); + } + + /// MCPサーバーをツールとして登録する + pub fn register_mcp_server( + &mut self, + config: mcp_tool::McpServerConfig, + ) -> Result<(), WorkerError> { + let mcp_tool = mcp_tool::McpDynamicTool::new(config.clone()); + self.register_tool(Box::new(mcp_tool))?; + tracing::info!("Registered MCP server as tool: {}", config.name); + Ok(()) + } + + /// MCPサーバーから個別のツールを登録する + pub async fn register_mcp_tools( + &mut self, + config: mcp_tool::McpServerConfig, + ) -> Result<(), WorkerError> { + let tools = mcp_tool::create_single_mcp_tools(&config).await?; + let tool_count = tools.len(); + + for tool in tools { + self.register_tool(tool)?; + } + + tracing::info!( + "Registered {} individual MCP tools from server: {}", + tool_count, + config.name + ); + Ok(()) + } + + /// MCP サーバーを並列初期化キューに追加 + pub fn queue_mcp_server(&mut self, config: mcp_tool::McpServerConfig) { + tracing::info!("Queuing MCP server: {}", config.name); + self.mcp_lazy_configs.push(config); + } + + /// MCP サーバーがキューにあるかチェック + pub fn has_mcp_configs(&self) -> bool { + !self.mcp_lazy_configs.is_empty() + } + + /// キューにある MCP サーバーを並列初期化 + pub async fn init_mcp_servers(&mut self) -> Result<(), WorkerError> { + if self.mcp_lazy_configs.is_empty() { + return Ok(()); + } + + let configs = std::mem::take(&mut self.mcp_lazy_configs); + tracing::info!( + "Starting parallel initialization of {} MCP servers", + configs.len() + ); + + // すべてのMCPサーバーを並列で初期化 + let tasks: Vec<_> = configs + .into_iter() + .map(|config| { + let config_name = config.name.clone(); + tokio::spawn(async move { + tracing::info!("Parallel initializing MCP server: {}", config_name); + + match mcp_tool::create_single_mcp_tools(&config).await { + Ok(tools) => { + tracing::info!( + "Successfully initialized {} tools from MCP server: {}", + tools.len(), + config_name + ); + Ok((config_name, tools)) + } + Err(e) => { + tracing::warn!( + "Failed to initialize MCP server '{}': {}", + config_name, + e + ); + Err((config_name, e)) + } + } + }) + }) + .collect(); + + // すべてのタスクの完了を待つ + let results = futures::future::join_all(tasks).await; + + let mut total_tools_added = 0; + for result in results { + match result { + Ok(Ok((server_name, tools))) => { + let tool_count = tools.len(); + for tool in tools { + if let Err(e) = self.register_tool(tool) { + tracing::warn!( + "Failed to register tool from MCP server '{}': {}", + server_name, + e + ); + } else { + total_tools_added += 1; + } + } + tracing::info!( + "Registered {} tools from MCP server: {}", + tool_count, + server_name + ); + } + Ok(Err((server_name, e))) => { + tracing::warn!("MCP server '{}' initialization failed: {}", server_name, e); + } + Err(e) => { + tracing::warn!("MCP server task failed: {}", e); + } + } + } + + tracing::info!( + "Parallel MCP initialization completed. Added {} tools total", + total_tools_added + ); + Ok(()) + } + + /// 登録されているフックの数を取得 + pub fn hook_count(&self) -> usize { + self.hook_manager.hook_count() + } + + /// フック実行のヘルパーメソッド + async fn execute_hooks( + &self, + event: crate::types::HookEvent, + content: String, + ) -> ( + String, + Vec, + Vec, + Vec, + Vec, + bool, + ) { + tracing::info!( + "execute_hooks called with event: {:?}, content length: {}", + event, + content.len() + ); + + let hook_count = self.hook_manager.hook_count(); + tracing::info!("Hook manager has {} registered hooks", hook_count); + + let workspace_path = std::env::current_dir() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + + let hook_context = crate::types::HookContext::new( + content, + workspace_path, + self.message_history.clone(), + self.get_tools(), + ); + + let original_message_count = self.message_history.len(); + tracing::debug!("Original message count: {}", original_message_count); + + let (updated_context, results, additional_messages) = self + .hook_manager + .execute_hooks(event.clone(), hook_context) + .await; + tracing::info!( + "Hook execution completed. Results count: {}, new message count: {}, additional hook messages: {}", + results.len(), + updated_context.message_history.len(), + additional_messages.len() + ); + + // フック実行結果を処理 + let mut should_terminate = false; + let mut hook_events = Vec::new(); + let mut pre_messages = Vec::new(); + let mut post_messages = Vec::new(); + + for result in &results { + match result { + crate::types::HookResult::Error(error) => { + tracing::error!("Hook execution error: {}", error); + // エラーが発生した場合は処理を中断 + should_terminate = true; + break; + } + crate::types::HookResult::Complete => { + tracing::info!("Hook requested turn completion"); + // Completeが返された場合は処理を中断 + should_terminate = true; + break; + } + crate::types::HookResult::Continue => { + tracing::debug!("Hook execution completed successfully"); + } + crate::types::HookResult::ModifyContent(_) => { + tracing::debug!("Hook modified content"); + } + crate::types::HookResult::AddMessage(_, _) => { + tracing::debug!("Hook added message"); + } + crate::types::HookResult::AddPreMessage(message, role) => { + tracing::debug!("Hook added pre-message"); + pre_messages.push(Message::new(role.clone(), message.clone())); + } + crate::types::HookResult::AddPostMessage(message, role) => { + tracing::debug!("Hook added post-message"); + post_messages.push(Message::new(role.clone(), message.clone())); + } + } + } + + // フックマネージャーから取得した追加メッセージでHookMessageイベントを作成 + for (hook_name, content, role) in &additional_messages { + tracing::info!( + "Creating HookMessage event for hook '{}': {} chars, role: {:?}", + hook_name, + content.len(), + role + ); + hook_events.push(StreamEvent::HookMessage { + hook_name: hook_name.clone(), + content: content.clone(), + role: role.clone(), + }); + } + + // メッセージ履歴から追加されたメッセージも抽出(フックの結果以外で追加された可能性) + // ただし、ユーザーメッセージは除外する(重複を避けるため) + let context_added_messages: Vec = updated_context + .message_history + .iter() + .skip(original_message_count) + .filter(|msg| msg.role != crate::types::Role::User) + .cloned() + .collect(); + + tracing::info!( + "execute_hooks returning: content_len={}, pre_messages={}, post_messages={}, context_added={}, hook_events={}, should_terminate={}", + updated_context.content.len(), + pre_messages.len(), + post_messages.len(), + context_added_messages.len(), + hook_events.len(), + should_terminate + ); + + ( + updated_context.content, + pre_messages, + context_added_messages, + post_messages, + hook_events, + should_terminate, + ) + } + + /// 設定を読み込む + pub fn load_config>( + &mut self, + config_path: P, + ) -> Result<(), WorkerError> { + use crate::config_parser::ConfigParser; + + // 設定ファイルを読み込み + let config = ConfigParser::parse_from_file(config_path) + .map_err(|e| WorkerError::ConfigurationError(e.to_string()))?; + + // プロンプトコンテキストを構築 + let prompt_context = self.create_prompt_context()?; + + // DynamicPromptComposerを作成 + let composer = PromptComposer::from_config(config.clone(), prompt_context) + .map_err(|e| WorkerError::ConfigurationError(e.to_string()))?; + + self.config = Some(config); + self.composer = composer; + + // 設定変更後にセッション再初期化 + self.initialize_session() + .map_err(|e| WorkerError::ConfigurationError(e.to_string()))?; + + tracing::info!("Dynamic configuration loaded successfully"); + Ok(()) + } + + /// 静的プロンプトコンテキストを作成(構築時用) + fn create_prompt_context_static( + workspace_context: &Option, + provider: LlmProvider, + model_name: &str, + tools: &[String], + ) -> PromptContext { + let supports_native_tools = match provider { + LlmProvider::Claude => true, + LlmProvider::OpenAI => !model_name.contains("gpt-3.5-turbo-instruct"), + LlmProvider::Gemini => !model_name.contains("gemini-pro-vision"), + LlmProvider::Ollama => model_name.contains("llama") || model_name.contains("mistral"), + LlmProvider::XAI => true, + }; + + let model_context = ModelContext { + provider, + model_name: model_name.to_string(), + capabilities: ModelCapabilities { + supports_tools: supports_native_tools, + supports_function_calling: supports_native_tools, + supports_vision: false, + supports_multimodal: Some(false), + context_length: None, + capabilities: vec![], + needs_verification: Some(false), + }, + supports_native_tools, + }; + + let session_context = SessionContext { + conversation_id: None, + message_count: 0, + active_tools: tools.to_vec(), + user_preferences: None, + }; + + let workspace_context = workspace_context.clone().unwrap_or_default(); + + PromptContext { + workspace: workspace_context, + model: model_context, + session: session_context, + variables: HashMap::new(), + } + } + + /// プロンプトコンテキストを作成 + fn create_prompt_context(&self) -> Result { + let provider = LlmProvider::from_str(&self.provider_str).ok_or_else(|| { + WorkerError::ConfigurationError(format!("Unknown provider: {}", self.provider_str)) + })?; + + // モデル能力を静的に判定 + let supports_native_tools = match provider { + LlmProvider::Claude => true, + LlmProvider::OpenAI => !self.model_name.contains("gpt-3.5-turbo-instruct"), + LlmProvider::Gemini => !self.model_name.contains("gemini-pro-vision"), + LlmProvider::Ollama => { + self.model_name.contains("llama") || self.model_name.contains("mistral") + } + LlmProvider::XAI => true, + }; + + let model_context = ModelContext { + provider, + model_name: self.model_name.clone(), + capabilities: ModelCapabilities { + supports_tools: supports_native_tools, + supports_function_calling: supports_native_tools, + supports_vision: false, // 簡略化 + supports_multimodal: Some(false), + context_length: None, + capabilities: vec![], + needs_verification: Some(false), + }, + supports_native_tools, + }; + + let session_context = SessionContext { + conversation_id: None, + message_count: 0, + active_tools: self.tools.iter().map(|t| t.name().to_string()).collect(), + user_preferences: None, + }; + + let workspace_context = self.workspace_context.clone().unwrap_or_default(); + + Ok(PromptContext { + workspace: workspace_context, + model: model_context, + session: session_context, + variables: HashMap::new(), + }) + } + + /// モデルを変更する + pub fn change_model( + &mut self, + provider: LlmProvider, + model_name: &str, + api_key: &str, + ) -> Result<(), WorkerError> { + // 新しいLLMクライアントを作成 + let new_client = provider.create_client(model_name, api_key)?; + + // 古いクライアントを新しいものに置き換え + self.llm_client = Box::new(new_client); + self.provider_str = provider.as_str().to_string(); + self.model_name = model_name.to_string(); + self.api_key = api_key.to_string(); + + tracing::info!("Model changed to {}/{}", provider.as_str(), model_name); + Ok(()) + } + + /// ツールを動的に登録する + pub fn register_tool(&mut self, tool: Box) -> Result<(), WorkerError> { + // 同名のツールが既に存在するかチェック + if self.tools.iter().any(|t| t.name() == tool.name()) { + return Err(WorkerError::ToolExecutionError(format!( + "Tool '{}' is already registered", + tool.name() + ))); + } + + self.tools.push(tool); + tracing::info!( + "Tool '{}' registered successfully", + self.tools.last().unwrap().name() + ); + Ok(()) + } + + /// 登録されているツールの一覧を取得 + pub fn get_tools(&self) -> Vec { + self.tools + .iter() + .map(|tool| DynamicToolDefinition { + name: tool.name().to_string(), + description: tool.description().to_string(), + parameters_schema: tool.parameters_schema(), + }) + .collect() + } + + /// ツールを名前で検索して実行 + pub async fn execute_tool( + &self, + tool_name: &str, + args: serde_json::Value, + ) -> Result { + match self.tools.iter().find(|tool| tool.name() == tool_name) { + Some(tool) => tool.execute(args).await.map_err(WorkerError::from), + None => Err(WorkerError::ToolExecutionError(format!( + "Tool '{}' not found", + tool_name + ))), + } + } + + /// Execute a tool manually (for direct tool invocation from TUI) + pub async fn execute_tool_manually( + &self, + tool_name: &str, + args: serde_json::Value, + ) -> Result { + tracing::info!( + "Manually executing tool '{}' with args: {}", + tool_name, + args + ); + self.execute_tool(tool_name, args).await + } + + /// Get the model name for tool support detection + pub fn get_model_name(&self) -> String { + self.llm_client.get_model_name() + } + + pub fn get_provider_name(&self) -> String { + self.llm_client.provider().to_string() + } + + /// Get configuration information for task delegation + pub fn get_config(&self) -> (LlmProvider, &str, &str, &Option) { + ( + self.llm_client.provider(), + &self.model_name, + &self.api_key, + &self.role_config, + ) + } + + /// Get tool names (used to filter out specific tools) + pub fn get_tool_names(&self) -> Vec { + self.tools + .iter() + .map(|tool| tool.name().to_string()) + .collect() + } + + /// 簡素化された非同期処理(Arcの代わりにシンプルなAPIを使用) + pub async fn process_with_shared_state<'a>( + worker_arc: std::sync::Arc>, + user_message: String, + llm_debug: Option, + ) -> impl futures::Stream> + 'a { + use async_stream::stream; + use futures::StreamExt; + + // stream!マクロ用に変数をキャプチャ + let worker = worker_arc; + let message = user_message; + let debug = llm_debug; + + stream! { + // Lock the worker briefly to create the message and execute hooks + let message_history = { + let worker_clone = worker.clone(); + let mut w = worker_clone.lock().await; + + // OnMessageSendフックを実行 + let (processed_message, pre_messages, context_messages, post_messages, hook_events, _should_terminate) = w + .execute_hooks(crate::types::HookEvent::OnMessageSend, message.clone()) + .await; + + // フックイベントを送信 + for hook_event in hook_events { + yield Ok(hook_event); + } + + // メッセージを正しい順序で履歴に追加 + // 1. Pre messages + for msg in pre_messages { + w.message_history.push(msg); + } + + // 2. Context messages + for msg in context_messages { + w.message_history.push(msg); + } + + // 3. 処理済みユーザーメッセージ + w.message_history.push(crate::types::Message::new( + crate::types::Role::User, + processed_message, + )); + + // 4. Post messages + for msg in post_messages { + w.message_history.push(msg); + } + + w.message_history.clone() + }; + + // Create a temporary worker for processing without holding the lock + let (llm_client, composer, tool_definitions, api_key, _provider_str, _model_name) = { + let w_locked = worker.lock().await; + let llm_client = w_locked.llm_client.provider().create_client(&w_locked.model_name, &w_locked.api_key); + match llm_client { + Ok(client) => { + let tool_defs = w_locked.tools.iter().map(|tool| crate::types::DynamicToolDefinition { + name: tool.name().to_string(), + description: tool.description().to_string(), + parameters_schema: tool.parameters_schema(), + }).collect::>(); + + ( + client, + w_locked.composer.clone(), + tool_defs, + w_locked.api_key.clone(), + w_locked.provider_str.clone(), + w_locked.model_name.clone() + ) + }, + Err(e) => { + yield Err(e); + return; + } + } + }; + + // Process the conversation using a simplified approach + let mut conversation_messages = message_history; + + loop { + let provider = llm_client.provider(); + let model_name = llm_client.get_model_name(); + let supports_native = match supports_native_tools(&provider, &model_name, &api_key).await { + Ok(supports) => supports, + Err(e) => { + tracing::warn!("Failed to check native tool support: {}", e); + false + } + }; + + let (composed_messages, tools_for_llm) = if supports_native { + let messages = match composer.compose(&conversation_messages) { + Ok(m) => m, + Err(e) => { + yield Err(WorkerError::ConfigurationError(e.to_string())); + return; + } + }; + (messages, Some(tool_definitions.as_slice())) + } else { + // Generate tools schema for non-native tool support + let tools_schema = generate_tools_schema_from_definitions(&provider, &tool_definitions); + let messages = match composer.compose_with_tools(&conversation_messages, &tools_schema) { + Ok(m) => m, + Err(e) => { + yield Err(WorkerError::ConfigurationError(e.to_string())); + return; + } + }; + (messages, None) + }; + + let stream_result = llm_client.chat_stream(composed_messages, tools_for_llm, debug.clone()).await; + + let mut stream = match stream_result { + Ok(s) => s, + Err(e) => { + yield Err(e); + return; + } + }; + + let mut tool_calls = Vec::new(); + let mut response_content = String::new(); + let mut has_tool_calls = false; + + while let Some(event_result) = stream.next().await { + match event_result { + Ok(crate::types::StreamEvent::Chunk(chunk)) => { + response_content.push_str(&chunk); + yield Ok(crate::types::StreamEvent::Chunk(chunk)); + } + Ok(crate::types::StreamEvent::ToolCall(tool_call)) => { + // ツール呼び出しイベントをそのまま流す + yield Ok(crate::types::StreamEvent::ToolCall(tool_call.clone())); + tool_calls.push(tool_call); + has_tool_calls = true; + } + Ok(other_event) => { + yield Ok(other_event); + } + Err(e) => { + yield Err(e); + return; + } + } + } + + // Add assistant response to history + if !response_content.is_empty() || has_tool_calls { + let model_message = if has_tool_calls { + crate::types::Message::with_tool_calls( + crate::types::Role::Model, + response_content.clone(), + tool_calls.clone(), + ) + } else { + crate::types::Message::new( + crate::types::Role::Model, + response_content.clone(), + ) + }; + conversation_messages.push(model_message); + } + + // If no tool calls, we're done + if !has_tool_calls { + break; + } + + // Execute all tool calls and add their results to conversation + for tool_call in tool_calls { + let tool_result = { + let worker_locked = worker.lock().await; + match serde_json::from_str(&tool_call.arguments) { + Ok(args) => worker_locked.execute_tool(&tool_call.name, args).await, + Err(e) => Err(crate::WorkerError::JsonError(e)), + } + }; + + // Send tool result as stream event + let stream_result = match &tool_result { + Ok(result) => Ok(result.clone()), + Err(e) => Err(e.to_string()), + }; + yield Ok(crate::types::StreamEvent::ToolResult { + tool_name: tool_call.name.clone(), + result: stream_result, + }); + + // Add tool result to conversation + let tool_result_message = match tool_result { + Ok(result) => { + crate::types::Message::new( + crate::types::Role::Tool, + format!("Tool '{}' executed successfully. Result: {}", + tool_call.name, + serde_json::to_string(&result).unwrap_or_default()), + ) + } + Err(e) => { + crate::types::Message::new( + crate::types::Role::Tool, + format!("Tool '{}' execution failed: {}", tool_call.name, e), + ) + } + }; + conversation_messages.push(tool_result_message); + } + + // Continue conversation after tool calls + // The loop will continue with the updated conversation_messages + } + + // Update the original worker with the final conversation + { + let worker_clone = worker.clone(); + let mut w = worker_clone.lock().await; + w.message_history = conversation_messages; + } + } + } + + /// メッセージ履歴を使った会話処理(履歴に追加される) + pub async fn process_task_with_history<'a>( + &'a mut self, + user_message: String, + llm_debug: Option, + ) -> impl Stream> + 'a { + // OnMessageSendフックを実行 + let ( + processed_message, + pre_messages, + context_messages, + post_messages, + _hook_events, + _should_terminate, + ) = self + .execute_hooks(crate::types::HookEvent::OnMessageSend, user_message) + .await; + + // メッセージを正しい順序で履歴に追加 + // 1. Pre messages (例: Summary) + for msg in pre_messages { + self.message_history.push(msg); + } + + // 2. Context messages + for msg in context_messages { + self.message_history.push(msg); + } + + // 3. 処理済みユーザーメッセージ + self.message_history + .push(Message::new(crate::types::Role::User, processed_message)); + + // 4. Post messages (例: ファイル内容) + for msg in post_messages { + self.message_history.push(msg); + } + + // 履歴を使って処理を実行 + self.process_task_stream_with_history(llm_debug).await + } + + /// 履歴を使った内部処理メソッド (1st occurrence) + async fn process_task_stream_with_history<'a>( + &'a mut self, + llm_debug: Option, + ) -> impl Stream> + 'a { + stream! { + let mut conversation_messages = self.message_history.clone(); + + loop { + let tools = self.get_tools(); + let provider = self.llm_client.provider(); + let model_name = self.get_model_name(); + tracing::debug!("Checking native tool support: provider={:?}, model_name={}, api_key_len={}, provider_str={}", provider, model_name, self.api_key.len(), self.provider_str); + let supports_native = match supports_native_tools(&provider, &model_name, &self.api_key).await { + Ok(supports) => supports, + Err(e) => { + tracing::warn!("Failed to check native tool support: {}", e); + false + } + }; + + tracing::info!("Model {} supports native tools: {}", model_name, supports_native); + + let (composed_messages, tools_for_llm) = if supports_native { + // Native tools - basic composition + let messages = match self.composer.compose(&conversation_messages) { + Ok(m) => m, + Err(e) => { + yield Err(WorkerError::ConfigurationError(e.to_string())); + return; + } + }; + (messages, Some(tools.as_slice())) + } else { + // Text-based tools - composition with tool schema + let tools_schema = generate_tools_schema(&provider, &self.tools); + let messages = match self.composer.compose_with_tools(&conversation_messages, &tools_schema) { + Ok(m) => m, + Err(e) => { + yield Err(WorkerError::ConfigurationError(e.to_string())); + return; + } + }; + (messages, None) + }; + + let stream_result = self.llm_client.chat_stream(composed_messages, tools_for_llm, llm_debug.clone()).await; + + let mut stream = match stream_result { + Ok(s) => s, + Err(e) => { + yield Err(e); + return; + } + }; + + let mut tool_calls = Vec::new(); + let mut response_content = String::new(); + let mut has_tool_calls = false; + + while let Some(event_result) = stream.next().await { + match event_result { + Ok(StreamEvent::ToolCall(tool_call)) => { + // ツール呼び出しイベントをそのまま流す + yield Ok(StreamEvent::ToolCall(tool_call.clone())); + tool_calls.push(tool_call); + has_tool_calls = true; + } + Ok(StreamEvent::Chunk(chunk)) => { + response_content.push_str(&chunk); + yield Ok(StreamEvent::Chunk(chunk)); + } + Ok(StreamEvent::Completion(message)) => { + yield Ok(StreamEvent::Completion(message)); + } + Ok(StreamEvent::Error(error)) => { + yield Ok(StreamEvent::Error(error)); + } + Ok(StreamEvent::DebugRequest { model, body }) => { + yield Ok(StreamEvent::DebugRequest { model, body }); + } + Ok(StreamEvent::DebugResponse { model, body }) => { + yield Ok(StreamEvent::DebugResponse { model, body }); + } + Ok(StreamEvent::DebugJson { title, data }) => { + yield Ok(StreamEvent::DebugJson { title, data }); + } + Ok(StreamEvent::ToolResult { tool_name, result }) => { + yield Ok(StreamEvent::ToolResult { tool_name, result }); + } + Ok(StreamEvent::HookMessage { hook_name, content, role }) => { + yield Ok(StreamEvent::HookMessage { hook_name, content, role }); + } + Err(e) => { + yield Err(e); + return; + } + } + } + + // Add the assistant's response to conversation history + if !response_content.is_empty() || has_tool_calls { + let model_message = if has_tool_calls { + Message::with_tool_calls( + crate::types::Role::Model, + response_content.clone(), + tool_calls.clone(), + ) + } else { + Message::new( + crate::types::Role::Model, + response_content.clone(), + ) + }; + conversation_messages.push(model_message.clone()); + } + + // If no tool calls were made, we're done + if !has_tool_calls { + + // OnTurnCompletedフックを実行(ツール呼び出しがない場合) + let (_processed_response, _pre_messages, additional_messages, _post_messages, hook_events, _should_terminate) = self + .execute_hooks( + crate::types::HookEvent::OnTurnCompleted, + response_content.clone(), + ).await; + + // HookメッセージをStreamイベントとして送信 + for hook_event in hook_events { + yield Ok(hook_event); + } + + // 追加メッセージを会話履歴に追加 + for msg in additional_messages { + conversation_messages.push(msg); + } + + break; + } + + // Execute all tool calls and add their results to conversation + for tool_call in tool_calls { + // PreToolUseフックを実行 + let tool_args_str = tool_call.arguments.clone(); + let (_processed_args, _pre_messages, _context_messages, _post_messages, pre_hook_events, _should_terminate) = self + .execute_hooks( + crate::types::HookEvent::PreToolUse(tool_call.name.clone()), + tool_args_str, + ).await; + + // PreToolUse HookメッセージをStreamイベントとして送信 + for hook_event in pre_hook_events { + yield Ok(hook_event); + } + + let tool_result: Result = match serde_json::from_str(&tool_call.arguments) { + Ok(args) => self.execute_tool(&tool_call.name, args).await, + Err(e) => Err(WorkerError::JsonError(e)), + }; + + // Send tool result as stream event + let stream_result = match &tool_result { + Ok(result) => Ok(result.clone()), + Err(e) => Err(e.to_string()), + }; + yield Ok(StreamEvent::ToolResult { + tool_name: tool_call.name.clone(), + result: stream_result, + }); + + // PostToolUseフックを実行 + let tool_result_str = match &tool_result { + Ok(result) => serde_json::to_string(result).unwrap_or_default(), + Err(e) => format!("Error: {}", e), + }; + + let (_processed_result, _pre_messages, additional_messages, _post_messages, hook_events, _should_terminate) = self + .execute_hooks( + crate::types::HookEvent::PostToolUse(tool_call.name.clone()), + tool_result_str, + ).await; + + // HookメッセージをStreamイベントとして送信 + for hook_event in hook_events { + yield Ok(hook_event); + } + + // 追加メッセージを会話履歴に追加 + for msg in additional_messages { + conversation_messages.push(msg); + } + + let tool_result_message = match tool_result { + Ok(result) => { + // Tool executed successfully + match serde_json::to_string(&result) { + Ok(result_str) => Message::new( + crate::types::Role::Tool, + format!("Tool '{}' executed successfully. Result: {}", tool_call.name, result_str), + ), + Err(e) => Message::new( + crate::types::Role::Tool, + format!("Tool '{}' executed successfully but failed to serialize result: {}", tool_call.name, e), + ), + } + } + Err(e) => { + // Tool execution failed + Message::new( + crate::types::Role::Tool, + format!("Tool '{}' execution failed: {}", tool_call.name, e), + ) + } + }; + + conversation_messages.push(tool_result_message); + } + } + + // 最終的なフック処理は個別の応答処理で既に実行済み + + // 最終的な会話履歴を保存 + self.message_history = conversation_messages; + } + } + + pub async fn process_task_stream<'a>( + &'a self, + messages: Vec, + llm_debug: Option, + ) -> impl Stream> + 'a { + stream! { + let mut conversation_messages = messages; + + loop { + let tools = self.get_tools(); + let provider = self.llm_client.provider(); + let model_name = self.get_model_name(); + tracing::debug!("Checking native tool support: provider={:?}, model_name={}, api_key_len={}, provider_str={}", provider, model_name, self.api_key.len(), self.provider_str); + let supports_native = match supports_native_tools(&provider, &model_name, &self.api_key).await { + Ok(supports) => supports, + Err(e) => { + tracing::warn!("Failed to check native tool support: {}", e); + false + } + }; + + tracing::info!("Model {} supports native tools: {}", model_name, supports_native); + + let (composed_messages, tools_for_llm) = if supports_native { + // Native tools - basic composition + let messages = match self.composer.compose(&conversation_messages) { + Ok(m) => m, + Err(e) => { + yield Err(WorkerError::ConfigurationError(e.to_string())); + return; + } + }; + (messages, Some(tools.as_slice())) + } else { + // Text-based tools - composition with tool schema + let tools_schema = generate_tools_schema(&provider, &self.tools); + let messages = match self.composer.compose_with_tools(&conversation_messages, &tools_schema) { + Ok(m) => m, + Err(e) => { + yield Err(WorkerError::ConfigurationError(e.to_string())); + return; + } + }; + (messages, None) + }; + + let stream_result = self.llm_client.chat_stream(composed_messages, tools_for_llm, llm_debug.clone()).await; + + let mut stream = match stream_result { + Ok(s) => s, + Err(e) => { + yield Err(e); + return; + } + }; + + let mut tool_calls = Vec::new(); + let mut response_content = String::new(); + let mut has_tool_calls = false; + + while let Some(event_result) = stream.next().await { + match event_result { + Ok(StreamEvent::ToolCall(tool_call)) => { + // ツール呼び出しイベントをそのまま流す + yield Ok(StreamEvent::ToolCall(tool_call.clone())); + tool_calls.push(tool_call); + has_tool_calls = true; + } + Ok(StreamEvent::Chunk(chunk)) => { + response_content.push_str(&chunk); + yield Ok(StreamEvent::Chunk(chunk)); + } + Ok(StreamEvent::Completion(message)) => { + yield Ok(StreamEvent::Completion(message)); + } + Ok(StreamEvent::Error(error)) => { + yield Ok(StreamEvent::Error(error)); + } + Ok(StreamEvent::DebugRequest { model, body }) => { + yield Ok(StreamEvent::DebugRequest { model, body }); + } + Ok(StreamEvent::DebugResponse { model, body }) => { + yield Ok(StreamEvent::DebugResponse { model, body }); + } + Ok(StreamEvent::DebugJson { title, data }) => { + yield Ok(StreamEvent::DebugJson { title, data }); + } + Ok(StreamEvent::ToolResult { tool_name, result }) => { + yield Ok(StreamEvent::ToolResult { tool_name, result }); + } + Ok(StreamEvent::HookMessage { hook_name, content, role }) => { + yield Ok(StreamEvent::HookMessage { hook_name, content, role }); + } + Err(e) => { + yield Err(e); + return; + } + } + } + + // If no tool calls were made, we're done + if !has_tool_calls { + // Note: OnTurnCompletedフックはprocess_task_with_historyで実行される + break; + } + + // Add the assistant's response to conversation history + if !response_content.is_empty() || has_tool_calls { + let model_message = if has_tool_calls { + Message::with_tool_calls( + crate::types::Role::Model, + response_content.clone(), + tool_calls.clone(), + ) + } else { + Message::new( + crate::types::Role::Model, + response_content.clone(), + ) + }; + conversation_messages.push(model_message.clone()); + } + + // Execute all tool calls and add their results to conversation + for tool_call in tool_calls { + // PreToolUseフックを実行 + let tool_args_str = tool_call.arguments.clone(); + let (_processed_args, _pre_messages, _context_messages, _post_messages, pre_hook_events, _should_terminate) = self + .execute_hooks( + crate::types::HookEvent::PreToolUse(tool_call.name.clone()), + tool_args_str, + ) + .await; + + // PreToolUse HookメッセージをStreamイベントとして送信 + for hook_event in pre_hook_events { + yield Ok(hook_event); + } + + let tool_result = match serde_json::from_str(&tool_call.arguments) { + Ok(args) => self.execute_tool(&tool_call.name, args).await, + Err(e) => Err(WorkerError::JsonError(e)), + }; + + // Send tool result as stream event + let stream_result = match &tool_result { + Ok(result) => Ok(result.clone()), + Err(e) => Err(e.to_string()), + }; + yield Ok(StreamEvent::ToolResult { + tool_name: tool_call.name.clone(), + result: stream_result, + }); + + // PostToolUseフックを実行 + let tool_result_str = match &tool_result { + Ok(result) => serde_json::to_string(result).unwrap_or_default(), + Err(e) => format!("Error: {}", e), + }; + + let (_processed_result, _pre_messages, additional_messages, _post_messages, hook_events, _should_terminate) = self + .execute_hooks( + crate::types::HookEvent::PostToolUse(tool_call.name.clone()), + tool_result_str, + ) + .await; + + // HookメッセージをStreamイベントとして送信 + for hook_event in hook_events { + yield Ok(hook_event); + } + + // 追加メッセージを会話履歴に追加 + for msg in additional_messages { + conversation_messages.push(msg); + } + + let tool_result_message = match tool_result { + Ok(result) => { + // Tool executed successfully + match serde_json::to_string(&result) { + Ok(result_str) => Message::new( + crate::types::Role::Tool, + format!("Tool '{}' executed successfully. Result: {}", tool_call.name, result_str), + ), + Err(e) => Message::new( + crate::types::Role::Tool, + format!("Tool '{}' executed successfully but failed to serialize result: {}", tool_call.name, e), + ), + } + } + Err(e) => { + // Tool execution failed + Message::new( + crate::types::Role::Tool, + format!("Tool '{}' execution failed: {}", tool_call.name, e), + ) + } + }; + + conversation_messages.push(tool_result_message); + } + } + + // Note: OnTurnCompletedフックは個別の応答処理で既に実行済み + } + } + + /// セッションデータを取得する + pub fn get_session_data(&self) -> Result { + let workspace_path = std::env::current_dir() + .map_err(|e| { + WorkerError::ConfigurationError(format!("Failed to get current directory: {}", e)) + })? + .to_string_lossy() + .to_string(); + + // Gitブランチを取得(エラーは無視) + let git_branch = self.get_git_branch().ok(); + + let session_id = uuid::Uuid::new_v4().to_string(); + let mut session_data = SessionData::new( + session_id, + self.provider_str.clone(), + self.model_name.clone(), + workspace_path, + ); + session_data.git_branch = git_branch; + + // contextに既存のmessage_historyをコピー + session_data.context = self.message_history.clone(); + + Ok(session_data) + } + + /// セッションデータから履歴を復元する + pub fn load_session(&mut self, session_data: &SessionData) -> Result<(), WorkerError> { + // モデルが異なる場合は警告をログに出す + if session_data.model_provider != self.provider_str + || session_data.model_name != self.model_name + { + tracing::warn!( + "Loading session with different model: session={}:{}, current={}:{}", + session_data.model_provider, + session_data.model_name, + self.provider_str, + self.model_name + ); + } + + self.message_history = session_data.context.clone(); + + // セッション復元時にプロンプトコンポーザーを再初期化 + self.reinitialize_session_with_history() + .map_err(|e| WorkerError::ConfigurationError(e.to_string()))?; + + Ok(()) + } + + /// メッセージ履歴を取得する + pub fn get_message_history(&self) -> &[Message] { + &self.message_history + } + + /// メッセージ履歴をクリアする + pub fn clear_message_history(&mut self) { + self.message_history.clear(); + + // 履歴クリア時にセッション再初期化 + if let Err(e) = self.initialize_session() { + tracing::warn!( + "Failed to reinitialize session after clearing history: {}", + e + ); + } + } + + /// メッセージを履歴に追加する + pub fn add_message(&mut self, message: Message) { + self.message_history.push(message); + } + + /// 複数のメッセージを履歴に追加する + pub fn add_messages(&mut self, messages: Vec) { + self.message_history.extend(messages); + } + + /// Gitブランチを取得する + fn get_git_branch(&self) -> Result { + use std::process::Command; + + let output = Command::new("git") + .arg("branch") + .arg("--show-current") + .output()?; + + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).trim().to_string()) + } else { + Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Git command failed", + )) + } + } + + /// デフォルト設定ファイルの読み込みを試行 + fn try_load_default_config( + prompt_context: PromptContext, + ) -> Result { + use crate::config_parser::ConfigParser; + + // デフォルト設定ファイルのパスを試行 + let possible_paths = [ + "#nia/config/roles/cli-assistant.yaml", + "./resources/config/roles/cli-assistant.yaml", + "./nia-cli/resources/config/roles/cli-assistant.yaml", + "../nia-cli/resources/config/roles/cli-assistant.yaml", + ]; + + for path in &possible_paths { + if let Ok(resolved_path) = ConfigParser::resolve_path(path) { + if resolved_path.exists() { + match ConfigParser::parse_from_file(&resolved_path) { + Ok(config) => { + match PromptComposer::from_config(config, prompt_context.clone()) { + Ok(composer) => { + tracing::info!( + "Successfully loaded config from: {}", + resolved_path.display() + ); + return Ok(composer); + } + Err(e) => { + tracing::warn!( + "Failed to create composer from {}: {}", + resolved_path.display(), + e + ); + } + } + } + Err(e) => { + tracing::warn!( + "Failed to parse config from {}: {}", + resolved_path.display(), + e + ); + } + } + } + } + } + + Err(WorkerError::ConfigurationError( + "No default configuration found".to_string(), + )) + } + + /// セッション初期化(Worker内部用) + fn initialize_session(&mut self) -> Result<(), crate::prompt_types::PromptError> { + // 空のメッセージでセッション初期化 + self.composer.initialize_session(&[]) + } + + /// 履歴付きセッション再初期化(Worker内部用) + fn reinitialize_session_with_history( + &mut self, + ) -> Result<(), crate::prompt_types::PromptError> { + // 現在の履歴を使ってセッション初期化 + self.composer.initialize_session(&self.message_history) + } +} + +pub async fn validate_model( + model_str: &str, + api_keys: &std::collections::HashMap, +) -> bool { + tracing::info!("validate_model: Validating model: {}", model_str); + tracing::debug!( + "validate_model: Available API keys: {:?}", + api_keys.keys().collect::>() + ); + + if let Some((provider_str, model_name)) = model_str.split_once('/') { + tracing::info!( + "validate_model: Split model string - provider: {}, model: {}", + provider_str, + model_name + ); + + if let Some(provider) = LlmProvider::from_str(provider_str) { + let api_key = api_keys.get(provider_str).cloned().unwrap_or_default(); + let requires_api_key = provider.requires_api_key(); + + tracing::info!( + "validate_model: Provider: {:?}, requires_api_key: {}, api_key_length: {}", + provider, + requires_api_key, + api_key.len() + ); + + // Check if API key is required but missing + if requires_api_key && api_key.is_empty() { + tracing::warn!( + "validate_model: API key required but missing for provider: {}", + provider_str + ); + return false; + } + + // Only validate format - model name should not be empty + let is_valid = !model_name.trim().is_empty(); + tracing::info!("validate_model: Model validation result: {}", is_valid); + is_valid + } else { + tracing::error!("validate_model: Unknown provider: {}", provider_str); + false + } + } else { + tracing::error!("validate_model: Invalid model format: {}", model_str); + false + } +} diff --git a/worker/src/llm/anthropic.rs b/worker/src/llm/anthropic.rs new file mode 100644 index 0000000..2f68b3b --- /dev/null +++ b/worker/src/llm/anthropic.rs @@ -0,0 +1,393 @@ +use crate::{ + LlmClientTrait, WorkerError, + types::{LlmProvider, Message, Role, StreamEvent, ToolCall}, + url_config::UrlConfig, +}; +use async_stream::stream; +use futures_util::{Stream, StreamExt}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug, Serialize)] +struct AnthropicRequest { + model: String, + max_tokens: i32, + messages: Vec, + stream: bool, + #[serde(skip_serializing_if = "Option::is_none")] + tools: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + system: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +struct AnthropicMessage { + role: String, + content: String, +} + +#[derive(Debug, Serialize, Clone)] +struct AnthropicTool { + name: String, + description: String, + input_schema: Value, +} + +#[derive(Debug, Deserialize)] +struct AnthropicResponse { + #[serde(rename = "type")] + response_type: String, + content: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +struct AnthropicStreamResponse { + #[serde(rename = "type")] + response_type: String, + #[serde(flatten)] + data: Value, +} + +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +enum AnthropicContent { + #[serde(rename = "text")] + Text { text: String }, + #[serde(rename = "tool_use")] + ToolUse { + id: String, + name: String, + input: Value, + }, +} + +pub struct AnthropicClient { + api_key: String, + model: String, +} + +impl AnthropicClient { + pub fn new(api_key: &str, model: &str) -> Self { + Self { + api_key: api_key.to_string(), + model: model.to_string(), + } + } + + pub fn get_model_name(&self) -> String { + self.model.clone() + } +} + +impl AnthropicClient { + pub async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + let client = Client::new(); + let url = UrlConfig::get_completion_url("anthropic"); + + // Separate system messages from other messages + let mut system_message: Option = None; + let mut anthropic_messages: Vec = Vec::new(); + + for msg in messages { + match msg.role { + Role::System => { + // Combine multiple system messages if they exist + if let Some(existing) = system_message { + system_message = Some(format!( + "{} + +{}", + existing, msg.content + )); + } else { + system_message = Some(msg.content); + } + } + Role::User => { + anthropic_messages.push(AnthropicMessage { + role: "user".to_string(), + content: msg.content, + }); + } + Role::Model => { + anthropic_messages.push(AnthropicMessage { + role: "assistant".to_string(), + content: msg.content, + }); + } + Role::Tool => { + anthropic_messages.push(AnthropicMessage { + role: "user".to_string(), + content: msg.content, + }); + } + } + } + + // Convert tools to Anthropic format + let anthropic_tools = tools.map(|tools| { + tools + .iter() + .map(|tool| AnthropicTool { + name: tool.name.clone(), + description: tool.description.clone(), + input_schema: tool.parameters_schema.clone(), + }) + .collect() + }); + + let request = AnthropicRequest { + model: self.model.clone(), + max_tokens: 4096, + messages: anthropic_messages, + stream: true, + tools: anthropic_tools, + system: system_message, + }; + + // Log request details for debugging + tracing::debug!( + "Anthropic API request: {}", + serde_json::to_string_pretty(&request).unwrap_or_default() + ); + + let response = client + .post(url) + .header("Content-Type", "application/json") + .header("x-api-key", &self.api_key) + .header("anthropic-version", "2023-06-01") + .json(&request) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Claude) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + return Err(WorkerError::from_api_error( + format!("Anthropic API error: {} - {}", status, error_body), + &crate::types::LlmProvider::Claude, + )); + } + + let stream = stream! { + // デバッグ情報を送信 + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_request(&self.model, "Anthropic", &serde_json::to_value(&request).unwrap_or_default()) { + yield Ok(debug_event); + } + } + + let mut stream = response.bytes_stream(); + let mut buffer = String::new(); + + while let Some(chunk) = stream.next().await { + match chunk { + Ok(bytes) => { + let chunk_str = String::from_utf8_lossy(&bytes); + buffer.push_str(&chunk_str); + + // Server-sent eventsを処理 + while let Some(line_end) = buffer.find('\n') { + let line = buffer[..line_end].to_string(); + buffer = buffer[line_end + 1..].to_string(); + + if line.starts_with("data: ") { + let data = &line[6..]; + if data == "[DONE]" { + break; + } + + match serde_json::from_str::(data) { + Ok(stream_response) => { + // デバッグ情報を送信 + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_response(&self.model, "Anthropic", &serde_json::to_value(&stream_response).unwrap_or_default()) { + yield Ok(debug_event); + } + } + match stream_response.response_type.as_str() { + "content_block_delta" => { + if let Some(delta) = stream_response.data.get("delta") { + if let Some(text) = delta.get("text").and_then(|t| t.as_str()) { + yield Ok(StreamEvent::Chunk(text.to_string())); + } + } + } + "content_block_start" => { + if let Some(content_block) = stream_response.data.get("content_block") { + if let Some(block_type) = content_block.get("type").and_then(|t| t.as_str()) { + if block_type == "tool_use" { + if let (Some(name), Some(input)) = ( + content_block.get("name").and_then(|n| n.as_str()), + content_block.get("input") + ) { + let tool_call = ToolCall { + name: name.to_string(), + arguments: input.to_string(), + }; + yield Ok(StreamEvent::ToolCall(tool_call)); + } + } + } + } + } + "message_start" => { + tracing::debug!("Anthropic message stream started"); + } + "message_delta" => { + if let Some(delta) = stream_response.data.get("delta") { + if let Some(stop_reason) = delta.get("stop_reason") { + tracing::debug!("Anthropic message stop reason: {}", stop_reason); + } + } + } + "message_stop" => { + tracing::debug!("Anthropic message stream stopped"); + yield Ok(StreamEvent::Completion(Message::new( + Role::Model, + "".to_string(), + ))); + break; + } + "content_block_stop" => { + tracing::debug!("Anthropic content block stopped"); + } + "ping" => { + tracing::debug!("Anthropic ping received"); + } + "error" => { + if let Some(error) = stream_response.data.get("error") { + let error_msg = error.get("message") + .and_then(|m| m.as_str()) + .unwrap_or("Unknown error"); + tracing::error!("Anthropic stream error: {}", error_msg); + yield Err(WorkerError::from_api_error( + format!("Anthropic stream error: {}", error_msg), + &crate::types::LlmProvider::Claude, + )); + } + } + _ => { + tracing::debug!("Unhandled Anthropic stream event: {}", stream_response.response_type); + } + } + } + Err(e) => { + tracing::warn!("Failed to parse Anthropic stream response: {} - Raw data: {}", e, data); + } + } + } + } + } + Err(e) => { + yield Err(WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Claude)); + break; + } + } + } + }; + + Ok(Box::new(Box::pin(stream))) + } + + pub async fn check_connection(&self) -> Result<(), WorkerError> { + let client = Client::new(); + let url = UrlConfig::get_completion_url("anthropic"); + + // Use a default valid model for connection testing if model is empty + let test_model = if self.model.is_empty() { + "claude-3-haiku-20240307".to_string() + } else { + self.model.clone() + }; + + tracing::debug!( + "Anthropic connection test: Using model '{}' with API key length: {}", + test_model, + self.api_key.len() + ); + + let test_request = AnthropicRequest { + model: test_model, + max_tokens: 1, + messages: vec![AnthropicMessage { + role: "user".to_string(), + content: "Hi".to_string(), + }], + stream: false, + tools: None, + system: None, + }; + + let response = client + .post(url) + .header("Content-Type", "application/json") + .header("x-api-key", &self.api_key) + .header("anthropic-version", "2023-06-01") + .json(&test_request) + .send() + .await + .map_err(|e| { + tracing::error!("Anthropic connection test network error: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Claude) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + tracing::error!( + "Anthropic connection test failed: Status={}, Body={}", + status, + error_body + ); + return Err(WorkerError::from_api_error( + format!( + "Anthropic connection test failed: {} - {}", + status, error_body + ), + &crate::types::LlmProvider::Claude, + )); + } + + Ok(()) + } +} + +#[async_trait::async_trait] +impl LlmClientTrait for AnthropicClient { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + self.chat_stream(messages, tools, llm_debug).await + } + + async fn check_connection(&self) -> Result<(), WorkerError> { + self.check_connection().await + } + + fn provider(&self) -> LlmProvider { + LlmProvider::Claude + } + + fn get_model_name(&self) -> String { + self.get_model_name() + } +} diff --git a/worker/src/llm/gemini.rs b/worker/src/llm/gemini.rs new file mode 100644 index 0000000..47967b3 --- /dev/null +++ b/worker/src/llm/gemini.rs @@ -0,0 +1,977 @@ +use crate::{ + LlmClientTrait, WorkerError, + types::{DynamicToolDefinition, LlmProvider, Message, Role, StreamEvent, ToolCall}, + url_config::UrlConfig, +}; +use futures_util::{Stream, StreamExt, TryStreamExt}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use tracing; + +/// Extract tool name from Tool message content +fn extract_tool_name_from_content(content: &str) -> Option { + // Look for patterns like "Tool 'tool_name' executed successfully" + if let Some(start) = content.find("Tool '") { + if let Some(end) = content[start + 6..].find("'") { + let tool_name = &content[start + 6..start + 6 + end]; + return Some(tool_name.to_string()); + } + } + None +} + +/// Parse tool call information from message content + +/// Transforms a JSON schema to be compatible with Gemini API +/// Converts 'uint' types to 'integer' types and handles nullable types +/// Also ensures the schema is in the correct format for Gemini function parameters +fn transform_schema_for_gemini(schema: serde_json::Value) -> serde_json::Value { + match schema { + serde_json::Value::Object(mut obj) => { + // Remove $schema key as it's not needed for Gemini + obj.remove("$schema"); + + // Handle type field + if let Some(type_val) = obj.get("type") { + match type_val { + // Convert 'uint' to 'integer' + serde_json::Value::String(s) if s == "uint" => { + obj.insert( + "type".to_string(), + serde_json::Value::String("integer".to_string()), + ); + // Add format for integer types as required by Gemini + obj.insert( + "format".to_string(), + serde_json::Value::String("int64".to_string()), + ); + } + // Handle array types like ["integer", "null"] + serde_json::Value::Array(arr) => { + if let Some(non_null_type) = arr.iter().find(|&t| t != "null") { + // Use the non-null type + let mut new_type = non_null_type.clone(); + // Convert 'uint' to 'integer' if needed + if let serde_json::Value::String(s) = &new_type { + if s == "uint" { + new_type = serde_json::Value::String("integer".to_string()); + } + } + obj.insert("type".to_string(), new_type.clone()); + + // Add format for integer types as required by Gemini + if let serde_json::Value::String(type_str) = &new_type { + if type_str == "integer" { + obj.insert( + "format".to_string(), + serde_json::Value::String("int64".to_string()), + ); + } + } + } + } + // Handle existing integer types + serde_json::Value::String(s) if s == "integer" => { + // Add format for integer types as required by Gemini + obj.insert( + "format".to_string(), + serde_json::Value::String("int64".to_string()), + ); + } + _ => {} + } + } + + // Handle properties and required fields + if let (Some(properties), Some(required)) = (obj.get("properties"), obj.get("required")) + { + if let (serde_json::Value::Object(props), serde_json::Value::Array(req_arr)) = + (properties, required) + { + let mut new_required = Vec::new(); + + for (prop_name, _) in props { + // Only include in required if it's not nullable + if req_arr.iter().any(|r| r == prop_name) { + // Check if this property has a nullable type + if let Some(prop_schema) = props.get(prop_name) { + if let Some(prop_type) = prop_schema.get("type") { + // If type is an array containing "null", it's nullable + let is_nullable = match prop_type { + serde_json::Value::Array(arr) => { + arr.iter().any(|t| t == "null") + } + _ => false, + }; + + // Only add to required if not nullable + if !is_nullable { + new_required + .push(serde_json::Value::String(prop_name.clone())); + } + } else { + // No type info, assume required + new_required.push(serde_json::Value::String(prop_name.clone())); + } + } + } + } + + obj.insert( + "required".to_string(), + serde_json::Value::Array(new_required), + ); + } + } + + // Recursively transform nested objects + for (_, value) in obj.iter_mut() { + *value = transform_schema_for_gemini(value.clone()); + } + + serde_json::Value::Object(obj) + } + serde_json::Value::Array(arr) => { + serde_json::Value::Array(arr.into_iter().map(transform_schema_for_gemini).collect()) + } + other => other, + } +} + +// --- Request Structures --- +#[derive(Debug, Serialize, Clone)] +pub struct GeminiTool { + #[serde(rename = "functionDeclarations")] + pub function_declarations: Vec, +} + +#[derive(Debug, Serialize, Clone)] +pub struct GeminiFunctionDeclaration { + pub name: String, + pub description: String, + pub parameters: serde_json::Value, +} + +#[derive(Debug, Serialize, Clone)] +pub struct GeminiRequest { + pub contents: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "systemInstruction")] + pub system_instruction: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct GeminiContent { + pub role: String, + #[serde(default)] + pub parts: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(untagged)] +pub enum GeminiPart { + Text { + text: String, + }, + FunctionCall { + #[serde(rename = "functionCall")] + function_call: GeminiFunctionCall, + }, + FunctionResponse { + #[serde(rename = "functionResponse")] + function_response: GeminiFunctionResponse, + }, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct GeminiFunctionCall { + pub name: String, + pub args: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct GeminiFunctionResponse { + pub name: String, + pub response: serde_json::Value, +} + +// --- Response Structures --- +#[derive(Debug, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct GeminiResponse { + #[serde(default)] + pub candidates: Vec, +} + +#[derive(Debug, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct GeminiCandidate { + pub content: GeminiContent, + #[serde(skip_serializing_if = "Option::is_none")] + pub finish_reason: Option, +} + +fn build_url(model: &str) -> String { + let base_url = UrlConfig::get_base_url("gemini"); + let action = "streamGenerateContent"; + format!("{}/v1beta/models/{}:{}", base_url, model, action) +} + +/// Finds the start and end indices of the first complete JSON object `{...}` in the buffer. +fn find_first_json_object_bounds(buffer: &[u8]) -> Option<(usize, usize)> { + if let Some(start) = buffer.iter().position(|&b| b == b'{') { + let mut brace_count = 0; + let mut in_string = false; + let mut escaped = false; + + for (i, &byte) in buffer.iter().skip(start).enumerate() { + if in_string { + if escaped { + escaped = false; + } else if byte == b'\\' { + escaped = true; + } else if byte == b'"' { + in_string = false; + } + } else { + match byte { + b'"' => in_string = true, + b'{' => brace_count += 1, + b'}' => { + brace_count -= 1; + if brace_count == 0 { + let end = start + i + 1; + return Some((start, end)); + } + } + _ => {} + } + } + } + } + None // No complete object found +} + +/// Completes a chat request with streaming, yielding StreamEvent objects. +pub(crate) fn stream_events<'a>( + api_key: &'a str, + model: &'a str, + request: GeminiRequest, + llm_debug: Option, +) -> impl Stream> + 'a { + let api_key = api_key.to_string(); + let model = model.to_string(); + + async_stream::try_stream! { + let body = serde_json::to_string_pretty(&request).unwrap_or_else(|e| e.to_string()); + tracing::debug!("Gemini Request Body: {}", body); + + if let Some(debug_settings) = &llm_debug { + if let Some(debug_event) = debug_settings.debug_request(&model, "Gemini", &serde_json::to_value(&request).unwrap_or_default()) { + yield debug_event; + } + } + + let client = Client::new(); + let url = build_url(&model); + + let response = client + .post(&url) + .header("x-goog-api-key", &api_key) + .json(&request) + .send() + .await + .map_err(|e| anyhow::anyhow!("Gemini API request failed: {}", e))?; + + let status = response.status(); + if !status.is_success() { + let error_body = response.text().await.unwrap_or_else(|_| "Could not read error body".to_string()); + let error_msg = format!("Gemini API request failed with status: {} - {}", status, error_body); + tracing::error!("{}", error_msg); + Err(anyhow::anyhow!(error_msg))?; + } else { + let mut byte_stream = response.bytes_stream(); + let mut buffer = Vec::new(); + let mut full_content = String::new(); + + while let Some(chunk_result) = byte_stream.next().await { + let chunk = chunk_result?; + buffer.extend_from_slice(&chunk); + + while let Some((start, end)) = find_first_json_object_bounds(&buffer) { + let json_slice = &buffer[start..end]; + + if let Some(debug_settings) = &llm_debug { + if let Ok(response_value) = serde_json::from_slice::(json_slice) { + if let Some(debug_event) = debug_settings.debug_response(&model, "Gemini", &response_value) { + yield debug_event; + } + } + } + + match serde_json::from_slice::(json_slice) { + Ok(response) => { + let response_text = String::from_utf8_lossy(json_slice); + tracing::debug!( + response = %response_text, + candidates_count = response.candidates.len(), + "Successfully parsed Gemini response" + ); + if response.candidates.is_empty() { + tracing::warn!( + response = %response_text, + "Received empty candidates in Gemini response" + ); + } else if let Some(candidate) = response.candidates.get(0) { + // Log finish reason for debugging + if let Some(ref finish_reason) = candidate.finish_reason { + tracing::debug!( + finish_reason = %finish_reason, + "Received finish reason in Gemini response" + ); + + // Handle specific finish reasons + match finish_reason.as_str() { + "STOP" => { + tracing::debug!("Gemini response completed with STOP"); + // Continue processing parts if any, this is normal completion + } + "MAX_TOKENS" => { + tracing::warn!("Gemini response stopped due to MAX_TOKENS"); + } + "SAFETY" => { + tracing::warn!("Gemini response stopped due to SAFETY concerns"); + } + "RECITATION" => { + tracing::warn!("Gemini response stopped due to RECITATION"); + } + other => { + tracing::warn!("Gemini response stopped with unknown reason: {}", other); + } + } + } + + if candidate.content.parts.is_empty() { + tracing::warn!( + response = %response_text, + role = %candidate.content.role, + finish_reason = ?candidate.finish_reason, + "Received empty parts in Gemini response" + ); + } else { + for part in &candidate.content.parts { + tracing::debug!("Processing Gemini part (type unknown)"); + match part { + GeminiPart::Text { text } => { + tracing::debug!("Found Text part with content length: {}", text.len()); + full_content.push_str(text); + yield StreamEvent::Chunk(text.clone()); + } + GeminiPart::FunctionCall { function_call } => { + tracing::debug!("Found FunctionCall part: name={}, args={:?}", function_call.name, function_call.args); + let tool_call = ToolCall { + name: function_call.name.clone(), + arguments: serde_json::to_string(&function_call.args) + .unwrap_or_else(|_| "{}".to_string()), + }; + yield StreamEvent::ToolCall(tool_call.clone()); + } + GeminiPart::FunctionResponse { .. } => { + // Function responses in model output are not expected + // as they're part of the input conversation history + tracing::warn!("Unexpected FunctionResponse in model output"); + } + } + } + } + } + } + Err(e) => { + let response_text = String::from_utf8_lossy(json_slice); + tracing::warn!( + error = %e, + response = %response_text, + "Failed to deserialize GeminiResponse from slice" + ); + } + } + buffer.drain(..end); + } + } + + let final_message = Message::new( + Role::Model, + full_content, + ); + yield StreamEvent::Completion(final_message); + } + } +} + +pub struct GeminiClient { + api_key: String, + model: String, +} + +impl GeminiClient { + pub fn new(api_key: &str, model: &str) -> Self { + Self { + api_key: api_key.to_string(), + model: model.to_string(), + } + } + + pub fn get_model_name(&self) -> String { + self.model.clone() + } + + /// 静的メソッド:API キーを受け取ってモデル一覧を取得 + pub async fn list_models_static( + api_key: &str, + ) -> Result, WorkerError> { + let client = Client::new(); + let url = UrlConfig::get_models_url("gemini"); + + let response = client + .get(url) + .header("x-goog-api-key", api_key) + .send() + .await + .map_err(|e| { + tracing::error!("Gemini API request failed: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Gemini) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + tracing::error!( + "Gemini list_models_static failed - Status: {}, Body: {}", + status, + error_body + ); + return Err(WorkerError::from_api_error( + format!("Failed to list Gemini models: {} - {}", status, error_body), + &crate::types::LlmProvider::Gemini, + )); + } + + let models_response: serde_json::Value = response.json().await.map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Gemini) + })?; + + let mut models = Vec::new(); + + if let Some(models_array) = models_response.get("models").and_then(|m| m.as_array()) { + for model in models_array { + if let Some(name) = model.get("name").and_then(|n| n.as_str()) { + // "models/" プレフィックスを除去 + let model_id = name.strip_prefix("models/").unwrap_or(name); + + // generateContentメソッドをサポートするモデルのみを含める + if let Some(supported_methods) = model + .get("supportedGenerationMethods") + .and_then(|m| m.as_array()) + { + let supports_generate_content = supported_methods + .iter() + .any(|method| method.as_str() == Some("generateContent")); + + if supports_generate_content { + models.push(crate::types::ModelInfo { + id: model_id.to_string(), + name: model + .get("displayName") + .and_then(|d| d.as_str()) + .unwrap_or(model_id) + .to_string(), + provider: crate::types::LlmProvider::Gemini, + supports_tools: true, + supports_function_calling: true, + supports_vision: false, + supports_multimodal: false, + context_length: None, + training_cutoff: None, + capabilities: vec!["text_generation".to_string()], + description: model + .get("description") + .and_then(|d| d.as_str()) + .map(|s| s.to_string()) + .or_else(|| Some(format!("Google Gemini model: {}", model_id))), + }); + } + } + } + } + } + + tracing::info!( + "Gemini list_models_static found {} models with metadata", + models.len() + ); + Ok(models) + } + + pub async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + // Separate system messages from regular messages + let (system_messages, regular_messages): (Vec<_>, Vec<_>) = messages + .into_iter() + .partition(|msg| matches!(msg.role, Role::System)); + + // Create system instruction from system messages + let system_instruction = if !system_messages.is_empty() { + let combined_system_content = system_messages + .into_iter() + .map(|msg| msg.content) + .collect::>() + .join("\n\n"); + + Some(GeminiContent { + role: "user".to_string(), // System instruction uses "user" role + parts: vec![GeminiPart::Text { + text: combined_system_content, + }], + }) + } else { + None + }; + + // Process regular messages with proper tool context handling + let contents = regular_messages + .into_iter() + .map(|msg| { + let (role, parts) = match msg.role { + Role::User => ( + "user".to_string(), + vec![GeminiPart::Text { text: msg.content }], + ), + Role::Model => { + if let Some(tool_calls) = &msg.tool_calls { + // Model message with tool calls - convert to FunctionCall parts + tracing::debug!( + "Converting model message with {} tool calls to FunctionCall parts", + tool_calls.len() + ); + let mut parts = Vec::new(); + + // Add text content if present + if !msg.content.is_empty() { + parts.push(GeminiPart::Text { + text: msg.content.clone(), + }); + } + + // Add function calls + for tool_call in tool_calls { + tracing::debug!( + "Adding FunctionCall part for tool: {}", + tool_call.name + ); + let args = serde_json::from_str(&tool_call.arguments) + .unwrap_or(serde_json::json!({})); + parts.push(GeminiPart::FunctionCall { + function_call: GeminiFunctionCall { + name: tool_call.name.clone(), + args, + }, + }); + } + + ("model".to_string(), parts) + } else { + // Regular model message + tracing::debug!("Converting regular model message (no tool calls)"); + ( + "model".to_string(), + vec![GeminiPart::Text { text: msg.content }], + ) + } + } + Role::Tool => { + // Tool responses should be sent as FunctionResponse + if let Some(tool_name) = extract_tool_name_from_content(&msg.content) { + // Extract result from the content + let result_value = if msg.content.contains("Result: ") { + if let Some(result_start) = msg.content.find("Result: ") { + let result_str = &msg.content[result_start + 8..]; + // Try to parse as JSON, fallback to string + serde_json::from_str(result_str) + .unwrap_or_else(|_| serde_json::json!(result_str)) + } else { + serde_json::json!(msg.content) + } + } else { + serde_json::json!(msg.content) + }; + + ( + "user".to_string(), + vec![GeminiPart::FunctionResponse { + function_response: GeminiFunctionResponse { + name: tool_name, + response: result_value, + }, + }], + ) + } else { + // Fallback to text response if tool name can't be extracted + ( + "user".to_string(), + vec![GeminiPart::Text { + text: format!("Tool Response:\n{}", msg.content), + }], + ) + } + } + Role::System => unreachable!(), // Should not reach here after partition + }; + GeminiContent { role, parts } + }) + .collect(); + + let tools = tools.map(|tools| { + vec![GeminiTool { + function_declarations: tools + .iter() + .map(|tool| { + let mut transformed_schema = + transform_schema_for_gemini(tool.parameters_schema.clone()); + + // Ensure the schema has the correct structure for Gemini + match transformed_schema { + serde_json::Value::Object(ref mut obj) => { + // Gemini expects the parameters to be an object with type: "object" + if !obj.contains_key("type") { + obj.insert( + "type".to_string(), + serde_json::Value::String("object".to_string()), + ); + } + // If there are no properties, add an empty object + if !obj.contains_key("properties") { + obj.insert( + "properties".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + } + // If there are no required fields, add an empty array + if !obj.contains_key("required") { + obj.insert( + "required".to_string(), + serde_json::Value::Array(vec![]), + ); + } + } + _ => { + // If it's not an object, create a proper object schema + let mut schema_obj = serde_json::Map::new(); + schema_obj.insert( + "type".to_string(), + serde_json::Value::String("object".to_string()), + ); + schema_obj.insert( + "properties".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + schema_obj.insert( + "required".to_string(), + serde_json::Value::Array(vec![]), + ); + transformed_schema = serde_json::Value::Object(schema_obj); + } + } + + GeminiFunctionDeclaration { + name: tool.name.clone(), + description: tool.description.clone(), + parameters: transformed_schema, + } + }) + .collect(), + }] + }); + + let request = GeminiRequest { + contents, + system_instruction, + tools, + }; + + let stream = stream_events(&self.api_key, &self.model, request, llm_debug) + .map_err(|e| WorkerError::LlmApiError(e.to_string())); + + Ok(Box::new(Box::pin(stream))) + } + + pub async fn get_model_details( + &self, + model_name: &str, + ) -> Result { + let client = Client::new(); + let url = UrlConfig::get_model_url("gemini", model_name); + + let response = client + .get(&url) + .header("x-goog-api-key", &self.api_key) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Gemini) + })?; + + if !response.status().is_success() { + return Err(WorkerError::from_api_error( + format!( + "Gemini model details request failed with status: {}", + response.status() + ), + &crate::types::LlmProvider::Gemini, + )); + } + + let model_data: serde_json::Value = response.json().await.map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Gemini) + })?; + + let name = model_data + .get("name") + .and_then(|n| n.as_str()) + .unwrap_or(model_name); + let display_name = model_data + .get("displayName") + .and_then(|d| d.as_str()) + .unwrap_or(name); + let description = model_data + .get("description") + .and_then(|d| d.as_str()) + .unwrap_or(""); + let version = model_data + .get("version") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + let input_token_limit = model_data + .get("inputTokenLimit") + .and_then(|i| i.as_u64()) + .map(|i| i as u32); + let _output_token_limit = model_data + .get("outputTokenLimit") + .and_then(|o| o.as_u64()) + .map(|o| o as u32); + + let empty_vec = Vec::new(); + let supported_methods = model_data + .get("supportedGenerationMethods") + .and_then(|s| s.as_array()) + .unwrap_or(&empty_vec); + + let supports_tools = supported_methods + .iter() + .any(|method| method.as_str() == Some("generateContent")); + + let supports_vision = false; // Will be determined dynamically + let capabilities = vec!["text_generation".to_string()]; // Basic default + + Ok(crate::types::ModelInfo { + id: model_name.to_string(), + name: display_name.to_string(), + provider: crate::types::LlmProvider::Gemini, + supports_tools, + supports_function_calling: supports_tools, + supports_vision, + supports_multimodal: supports_vision, + context_length: input_token_limit, + training_cutoff: version, + capabilities, + description: Some(if description.is_empty() { + format!("Google Gemini model: {}", display_name) + } else { + description.to_string() + }), + }) + } + + pub async fn check_connection(&self) -> Result<(), WorkerError> { + // Simple connection check - try to call the API + // For now, just return OK if model is not empty + if self.model.is_empty() { + return Err(WorkerError::ModelNotFound("No model specified".to_string())); + } + Ok(()) + } +} + +#[async_trait::async_trait] +impl LlmClientTrait for GeminiClient { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + self.chat_stream(messages, tools, llm_debug).await + } + + async fn check_connection(&self) -> Result<(), WorkerError> { + self.check_connection().await + } + + fn provider(&self) -> LlmProvider { + LlmProvider::Gemini + } + + fn get_model_name(&self) -> String { + self.get_model_name() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::env; + + #[test] + fn test_schema_transformation() { + // Test schema with various type formats including $schema + let schema = serde_json::json!({ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "id": { + "type": "uint" + }, + "optional_number": { + "type": ["integer", "null"] + }, + "required_string": { + "type": "string" + }, + "existing_integer": { + "type": "integer" + }, + "nested": { + "type": "object", + "properties": { + "count": { + "type": ["uint", "null"] + } + } + } + }, + "required": ["id", "optional_number", "required_string"] + }); + + let transformed = transform_schema_for_gemini(schema); + + // Check that the schema has the correct structure + assert_eq!(transformed["type"], "object"); + assert!(transformed["properties"].is_object()); + assert!(transformed["required"].is_array()); + + // Check that $schema key is removed + assert!(transformed.get("$schema").is_none()); + + // Check that 'uint' was transformed to 'integer' + assert_eq!(transformed["properties"]["id"]["type"], "integer"); + assert_eq!(transformed["properties"]["id"]["format"], "int64"); + + // Check that array types are converted to single types + assert_eq!( + transformed["properties"]["optional_number"]["type"], + "integer" + ); + assert_eq!( + transformed["properties"]["optional_number"]["format"], + "int64" + ); + assert_eq!( + transformed["properties"]["nested"]["properties"]["count"]["type"], + "integer" + ); + assert_eq!( + transformed["properties"]["nested"]["properties"]["count"]["format"], + "int64" + ); + + // Check that existing integer types also get format + assert_eq!( + transformed["properties"]["existing_integer"]["type"], + "integer" + ); + assert_eq!( + transformed["properties"]["existing_integer"]["format"], + "int64" + ); + + // Check that required array is updated correctly (nullable properties should be removed) + let required: Vec<&str> = transformed["required"] + .as_array() + .unwrap() + .iter() + .map(|v| v.as_str().unwrap()) + .collect(); + + assert!(required.contains(&"id")); + assert!(required.contains(&"required_string")); + assert!(!required.contains(&"optional_number")); // Should be removed because it's nullable + } + + #[test] + fn test_empty_schema_transformation() { + // Test with an empty schema as would be processed in tool generation + let schema = serde_json::json!({}); + let mut transformed = transform_schema_for_gemini(schema); + + // Apply the same logic as in tool generation + match transformed { + serde_json::Value::Object(ref mut obj) => { + if !obj.contains_key("type") { + obj.insert( + "type".to_string(), + serde_json::Value::String("object".to_string()), + ); + } + if !obj.contains_key("properties") { + obj.insert( + "properties".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + } + if !obj.contains_key("required") { + obj.insert("required".to_string(), serde_json::Value::Array(vec![])); + } + } + _ => { + let mut schema_obj = serde_json::Map::new(); + schema_obj.insert( + "type".to_string(), + serde_json::Value::String("object".to_string()), + ); + schema_obj.insert( + "properties".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + schema_obj.insert("required".to_string(), serde_json::Value::Array(vec![])); + transformed = serde_json::Value::Object(schema_obj); + } + } + + // Should be converted to a proper object schema + assert_eq!(transformed["type"], "object"); + assert!(transformed["properties"].is_object()); + assert!(transformed["required"].is_array()); + } +} diff --git a/worker/src/llm/mod.rs b/worker/src/llm/mod.rs new file mode 100644 index 0000000..db50d48 --- /dev/null +++ b/worker/src/llm/mod.rs @@ -0,0 +1,5 @@ +pub mod anthropic; +pub mod gemini; +pub mod ollama; +pub mod openai; +pub mod xai; diff --git a/worker/src/llm/ollama.rs b/worker/src/llm/ollama.rs new file mode 100644 index 0000000..222617b --- /dev/null +++ b/worker/src/llm/ollama.rs @@ -0,0 +1,801 @@ +use crate::{ + LlmClientTrait, WorkerError, + types::{DynamicToolDefinition, LlmProvider, Message, Role, StreamEvent, ToolCall}, + url_config::UrlConfig, +}; +use futures_util::{Stream, StreamExt}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +// --- Request & Response Structures --- +#[derive(Debug, Serialize, Clone)] +pub struct OllamaRequest { + pub model: String, + pub messages: Vec, + pub stream: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OllamaMessage { + pub role: String, + pub content: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OllamaToolCall { + pub function: OllamaToolCallFunction, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OllamaToolCallFunction { + pub name: String, + #[serde( + serialize_with = "serialize_arguments", + deserialize_with = "deserialize_arguments" + )] + pub arguments: String, +} + +/// Custom serializer for arguments field that serializes strings as-is +fn serialize_arguments(arguments: &str, serializer: S) -> Result +where + S: serde::Serializer, +{ + // Try to parse as JSON first, if successful serialize as raw JSON + // If not valid JSON, serialize as string + if let Ok(value) = serde_json::from_str::(arguments) { + value.serialize(serializer) + } else { + arguments.serialize(serializer) + } +} + +/// Custom deserializer for arguments field that handles both string and object formats +fn deserialize_arguments<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + use serde::de::Error; + + let value: serde_json::Value = serde::Deserialize::deserialize(deserializer)?; + + match value { + serde_json::Value::String(s) => Ok(s), + serde_json::Value::Object(_) | serde_json::Value::Array(_) => { + // If it's an object or array, serialize it back to a JSON string + serde_json::to_string(&value).map_err(D::Error::custom) + } + _ => Err(D::Error::custom("arguments must be a string or object")), + } +} + +#[derive(Debug, Serialize, Clone)] +pub struct OllamaTool { + #[serde(rename = "type")] + pub tool_type: String, + pub function: OllamaFunction, +} + +#[derive(Debug, Serialize, Clone)] +pub struct OllamaFunction { + pub name: String, + pub description: String, + pub parameters: Value, +} + +#[derive(Debug, Deserialize)] +pub struct OllamaResponse { + pub message: OllamaMessage, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct OllamaStreamResponse { + pub message: OllamaMessage, + pub done: bool, +} + +#[derive(Debug, Deserialize)] +pub struct OllamaModelShowResponse { + pub details: Option, + pub model_info: Option, + pub template: Option, + pub system: Option, + pub parameters: Option, +} + +#[derive(Debug, Deserialize)] +pub struct OllamaModelDetails { + pub format: Option, + pub family: Option, + pub families: Option>, + pub parameter_size: Option, + pub quantization_level: Option, +} + +// --- Client --- +pub struct OllamaClient { + model: String, + base_url: String, + api_key: Option, +} + +impl OllamaClient { + pub fn new(model: &str) -> Self { + Self { + model: model.to_string(), + base_url: UrlConfig::get_base_url("ollama"), + api_key: None, + } + } + + pub fn new_with_key(api_key: &str, model: &str) -> Self { + tracing::debug!( + "Ollama: Creating client with API key (length: {}), model: {}", + api_key.len(), + model + ); + Self { + model: model.to_string(), + base_url: UrlConfig::get_base_url("ollama"), + api_key: Some(api_key.to_string()), + } + } + + pub fn get_model_name(&self) -> String { + self.model.clone() + } + + fn add_auth_header(&self, request_builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder { + tracing::debug!( + "Ollama: add_auth_header called, api_key present: {}", + self.api_key.is_some() + ); + + if let Some(ref api_key) = self.api_key { + // API key詳細ログは削除(セキュリティと見づらさ解消のため) + // API keyが空でない場合のみヘッダーを追加 + if !api_key.trim().is_empty() { + // API keyがすでにフォーマットされているかチェック + if api_key.starts_with("Basic ") || api_key.starts_with("Bearer ") { + // すでにフォーマット済み(例: "Basic base64string" や "Bearer token") + // Auth header詳細ログは削除(セキュリティと見づらさ解消のため) + request_builder.header("Authorization", api_key) + } else { + // URLに基づいて認証方式を決定 + let auth_header = if self.base_url.contains("ollama.com") { + // ollama.comの場合はBearerトークンを使用 + format!("Bearer {}", api_key) + } else { + // その他の場合はBasic認証を使用(ローカル/プロキシ向け) + format!("Basic {}", api_key) + }; + // Auth header詳細ログは削除(セキュリティと見づらさ解消のため) + request_builder.header("Authorization", auth_header) + } + } else { + tracing::debug!("Ollama: Empty API key, skipping auth header"); + request_builder + } + } else { + tracing::debug!( + "Ollama: No API key provided, using unauthenticated request (typical for local Ollama)" + ); + request_builder + } + } + + /// 静的メソッド:Ollamaサーバーからモデル一覧を取得(デフォルトURL使用) + pub async fn list_models_static( + api_key: &str, + ) -> Result, WorkerError> { + let client = Client::new(); + let url = UrlConfig::get_models_url("ollama"); + + tracing::debug!("Ollama list_models_static requesting: {}", url); + + let mut request_builder = client.get(&url); + if !api_key.trim().is_empty() { + // API keyがすでにフォーマットされているかチェック + if api_key.starts_with("Basic ") || api_key.starts_with("Bearer ") { + // すでにフォーマット済み + request_builder = request_builder.header("Authorization", api_key); + } else { + // URLに基づいて認証方式を決定 + let auth_header = if url.contains("ollama.com") { + // ollama.comの場合はBearerトークンを使用 + format!("Bearer {}", api_key) + } else { + // その他の場合はBasic認証を使用(ローカル/プロキシ向け) + format!("Basic {}", api_key) + }; + // Auth header詳細ログは削除(セキュリティと見づらさ解消のため) + request_builder = request_builder.header("Authorization", auth_header); + } + } + + let response = request_builder.send().await.map_err(|e| { + tracing::error!("Ollama API request failed: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + let status = response.status(); + tracing::info!("Ollama list_models_static response status: {}", status); + + if !status.is_success() { + let error_body = response.text().await.unwrap_or_default(); + tracing::error!( + "Ollama list_models_static failed - Status: {}, Body: {}", + status, + error_body + ); + let error_msg = format!("Failed to list Ollama models: {} - {}", status, error_body); + return Err(WorkerError::from_api_error( + error_msg, + &crate::types::LlmProvider::Ollama, + )); + } + + let response_text = response.text().await.map_err(|e| { + tracing::error!("Failed to read Ollama response text: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + // Raw response詳細ログは削除(見づらさ解消のため) + + let models_response: serde_json::Value = + serde_json::from_str(&response_text).map_err(|e| { + tracing::error!( + "Failed to parse Ollama JSON response: {} - Response: {}", + e, + response_text + ); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + let mut models = Vec::new(); + + if let Some(models_array) = models_response.get("models").and_then(|m| m.as_array()) { + for model in models_array { + if let Some(name) = model.get("name").and_then(|n| n.as_str()) { + models.push(crate::types::ModelInfo { + id: name.to_string(), + name: name.to_string(), + provider: crate::types::LlmProvider::Ollama, + supports_tools: true, // Will be determined by config + supports_function_calling: true, + supports_vision: false, // Will be determined by config + supports_multimodal: false, + context_length: None, + training_cutoff: None, + capabilities: vec!["text_generation".to_string()], + description: Some(format!("Ollama model: {}", name)), + }); + } + } + } + + tracing::info!( + "Ollama list_models_static found {} models with metadata", + models.len() + ); + Ok(models) + } + + // list_models_with_info was removed - models should be configured in models.yaml + // This private method is kept for future reference if needed + #[allow(dead_code)] + async fn list_models_with_info_internal( + &self, + ) -> Result, WorkerError> { + let client = Client::new(); + let url = format!("{}/api/tags", self.base_url); + + tracing::debug!("Ollama list_models requesting: {}", url); + + let request = self.add_auth_header(client.get(&url)); + tracing::debug!("Ollama list_models_with_info sending request to: {}", &url); + + let response = request.send().await.map_err(|e| { + tracing::error!("Ollama API request failed: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + let status = response.status(); + tracing::info!("Ollama list_models response status: {}", status); + + if !status.is_success() { + let error_body = response.text().await.unwrap_or_default(); + tracing::error!( + "Ollama list_models failed - Status: {}, Body: {}, URL: {}", + status, + error_body, + &url + ); + let error_msg = format!("Failed to list Ollama models: {} - {}", status, error_body); + return Err(WorkerError::from_api_error( + error_msg, + &crate::types::LlmProvider::Ollama, + )); + } + + let response_text = response.text().await.map_err(|e| { + tracing::error!("Failed to read Ollama response text: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + // Raw response詳細ログは削除(見づらさ解消のため) + + let models_response: serde_json::Value = + serde_json::from_str(&response_text).map_err(|e| { + tracing::error!( + "Failed to parse Ollama JSON response: {} - Response: {}", + e, + response_text + ); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + let model_names: Vec = models_response + .get("models") + .and_then(|models| models.as_array()) + .ok_or_else(|| { + tracing::error!("Invalid Ollama models response format - missing 'models' array"); + WorkerError::LlmApiError("Invalid models response format".to_string()) + })? + .iter() + .filter_map(|model| { + model + .get("name") + .and_then(|name| name.as_str()) + .map(|s| s.to_string()) + }) + .collect(); + + // Process models concurrently to get detailed information + let mut models = Vec::new(); + for name in model_names { + models.push(crate::types::ModelInfo { + id: name.clone(), + name: name.clone(), + provider: crate::types::LlmProvider::Ollama, + supports_tools: true, // Will be determined by config + supports_function_calling: true, + supports_vision: false, // Will be determined by config + supports_multimodal: false, + context_length: None, + training_cutoff: None, + capabilities: vec!["text_generation".to_string()], + description: Some(format!("Ollama model: {}", name)), + }); + } + + tracing::info!( + "Ollama list_models found {} models with dynamic capability detection", + models.len() + ); + Ok(models) + } +} + +use async_stream::stream; + +impl OllamaClient { + pub async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + let client = Client::new(); + let url = format!("{}/api/chat", self.base_url); + + let ollama_messages: Vec = messages + .into_iter() + .map(|msg| { + // Convert tool calls if present + let tool_calls = msg.tool_calls.map(|calls| { + calls + .into_iter() + .map(|call| OllamaToolCall { + function: OllamaToolCallFunction { + name: call.name, + arguments: call.arguments, + }, + }) + .collect() + }); + + OllamaMessage { + role: match msg.role { + Role::User => "user".to_string(), + Role::Model => "assistant".to_string(), + Role::System => "system".to_string(), + Role::Tool => "tool".to_string(), + }, + content: msg.content, + tool_calls, + } + }) + .collect(); + + // Convert tools to Ollama format (similar to OpenAI) + let ollama_tools = tools.map(|tools| { + tools + .iter() + .map(|tool| OllamaTool { + tool_type: "function".to_string(), + function: OllamaFunction { + name: tool.name.clone(), + description: tool.description.clone(), + parameters: tool.parameters_schema.clone(), + }, + }) + .collect() + }); + + let request = OllamaRequest { + model: self.model.clone(), + messages: ollama_messages, + stream: true, + tools: ollama_tools, + }; + + let stream = stream! { + // デバッグ情報を送信 + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_request(&self.model, "Ollama", &serde_json::to_value(&request).unwrap_or_default()) { + yield Ok(debug_event); + } + } + + // リクエスト情報をログに出力 + tracing::info!("Ollama chat_stream: Sending request to {}", &url); + tracing::debug!("Ollama request model: {}", &request.model); + tracing::debug!("Ollama request messages count: {}", request.messages.len()); + if let Some(ref tools) = request.tools { + tracing::debug!("Ollama request tools count: {}", tools.len()); + } + + // リクエストの詳細ログは削除(見づらさ解消のため) + + let request_builder = self.add_auth_header(client.post(&url)); + + let response = request_builder + .header("Content-Type", "application/json") + .json(&request) + .send() + .await + .map_err(|e| { + tracing::error!("Ollama chat_stream request failed: {}", e); + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + }); + + let response = match response { + Ok(resp) => resp, + Err(e) => { + yield Err(e); + return; + } + }; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + tracing::error!("Ollama chat_stream failed - Status: {}, Body: {}, URL: {}", status, error_body, &url); + yield Err(WorkerError::from_api_error( + format!("Ollama API error: {} - {}", status, error_body), + &crate::types::LlmProvider::Ollama, + )); + return; + } else { + tracing::info!("Ollama chat_stream response status: {}", response.status()); + } + + let mut byte_stream = response.bytes_stream(); + let mut buffer = String::new(); + let mut full_content = String::new(); + let mut chunk_count = 0; + + tracing::debug!("Ollama chat_stream: Starting to process response stream"); + + while let Some(chunk) = byte_stream.next().await { + match chunk { + Ok(bytes) => { + chunk_count += 1; + let chunk_str = String::from_utf8_lossy(&bytes); + // Chunk詳細ログは削除(見づらさ解消のため) + buffer.push_str(&chunk_str); + + // Process line by line + while let Some(line_end) = buffer.find('\n') { + let line = buffer[..line_end].to_string(); + buffer = buffer[line_end + 1..].to_string(); + + if line.trim().is_empty() { + continue; + } + + // Stream行詳細ログは削除(見づらさ解消のため) + + match serde_json::from_str::(&line) { + Ok(stream_response) => { + // デバッグ情報を送信 + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_response(&self.model, "Ollama", &serde_json::to_value(&stream_response).unwrap_or_default()) { + yield Ok(debug_event); + } + } + + // Handle tool calls + if let Some(tool_calls) = &stream_response.message.tool_calls { + tracing::info!("Ollama stream response contains {} tool calls", tool_calls.len()); + for (i, tool_call) in tool_calls.iter().enumerate() { + tracing::debug!("Tool call #{}: name={}, arguments={}", + i + 1, tool_call.function.name, tool_call.function.arguments); + let parsed_tool_call = ToolCall { + name: tool_call.function.name.clone(), + arguments: tool_call.function.arguments.clone(), + }; + yield Ok(StreamEvent::ToolCall(parsed_tool_call)); + } + } + + // Handle regular content + if !stream_response.message.content.is_empty() { + full_content.push_str(&stream_response.message.content); + yield Ok(StreamEvent::Chunk(stream_response.message.content)); + } + + if stream_response.done { + tracing::info!("Ollama stream completed, total content: {} chars", full_content.len()); + tracing::debug!("Ollama complete response content: {}", full_content); + yield Ok(StreamEvent::Completion(Message::new( + Role::Model, + full_content.clone(), + ))); + break; + } + } + Err(e) => { + tracing::warn!("Failed to parse Ollama stream response: {} - Line: {}", e, line); + tracing::debug!("Parse error details: line_length={}, error={}", line.len(), e); + } + } + } + } + Err(e) => { + tracing::error!("Ollama stream error after {} chunks: {}", chunk_count, e); + yield Err(WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama)); + break; + } + } + } + + tracing::debug!("Ollama chat_stream: Stream ended, processed {} chunks", chunk_count); + }; + + Ok(Box::new(Box::pin(stream))) + } + + pub async fn get_model_details( + &self, + model_name: &str, + ) -> Result { + let client = Client::new(); + let url = format!("{}/api/show", self.base_url); + + let request = serde_json::json!({ + "name": model_name + }); + + let response = self + .add_auth_header(client.post(&url)) + .json(&request) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + if !response.status().is_success() { + return Err(WorkerError::from_api_error( + format!( + "Ollama model details request failed with status: {}", + response.status() + ), + &crate::types::LlmProvider::Ollama, + )); + } + + let model_data: serde_json::Value = response.json().await.map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::Ollama) + })?; + + let details = model_data + .get("details") + .unwrap_or(&serde_json::Value::Null); + let family = details + .get("family") + .and_then(|f| f.as_str()) + .unwrap_or("unknown"); + let parameter_size = details + .get("parameter_size") + .and_then(|p| p.as_str()) + .unwrap_or("unknown"); + let quantization = details + .get("quantization_level") + .and_then(|q| q.as_str()) + .unwrap_or("unknown"); + + let size = model_data.get("size").and_then(|s| s.as_u64()).unwrap_or(0); + + let modified_at = model_data + .get("modified_at") + .and_then(|m| m.as_str()) + .map(|s| s.to_string()); + + let supports_tools = true; // Will be determined by config + let context_length = None; // Will be determined by config + let capabilities = vec!["text_generation".to_string()]; // Basic default + let description = format!("Ollama model: {}", model_name); + + Ok(crate::types::ModelInfo { + id: model_name.to_string(), + name: format!("{} ({}, {})", model_name, family, parameter_size), + provider: crate::types::LlmProvider::Ollama, + supports_tools, + supports_function_calling: supports_tools, + supports_vision: false, // Will be determined dynamically + supports_multimodal: false, + context_length, + training_cutoff: modified_at, + capabilities, + description: Some(format!( + "{} (Size: {} bytes, Quantization: {})", + description, size, quantization + )), + }) + } + + pub async fn check_connection(&self) -> Result<(), WorkerError> { + let client = Client::new(); + let url = format!("{}/api/tags", self.base_url); + self.add_auth_header(client.get(&url)) + .send() + .await + .map_err(|e| WorkerError::LlmApiError(format!("Failed to connect to Ollama: {}", e)))?; + Ok(()) + } +} + +#[async_trait::async_trait] +impl LlmClientTrait for OllamaClient { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + self.chat_stream(messages, tools, llm_debug).await + } + + async fn check_connection(&self) -> Result<(), WorkerError> { + self.check_connection().await + } + + fn provider(&self) -> LlmProvider { + LlmProvider::Ollama + } + + fn get_model_name(&self) -> String { + self.get_model_name() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::types::{Message, Role, ToolCall}; + + #[test] + fn test_message_conversion_with_tool_calls() { + let tool_call = ToolCall { + name: "List".to_string(), + arguments: r#"{"path": "./"}"#.to_string(), + }; + + let message = Message::with_tool_calls( + Role::Model, + "".to_string(), // Empty content, only tool calls + vec![tool_call.clone()], + ); + + let messages = vec![message]; + + // Simulate the conversion that happens in chat_stream + let ollama_messages: Vec = messages + .into_iter() + .map(|msg| { + // Convert tool calls if present + let tool_calls = msg.tool_calls.map(|calls| { + calls + .into_iter() + .map(|call| OllamaToolCall { + function: OllamaToolCallFunction { + name: call.name, + arguments: call.arguments, + }, + }) + .collect() + }); + + OllamaMessage { + role: "assistant".to_string(), + content: msg.content, + tool_calls, + } + }) + .collect(); + + // Verify the conversion preserved tool calls + assert_eq!(ollama_messages.len(), 1); + let converted_msg = &ollama_messages[0]; + assert_eq!(converted_msg.role, "assistant"); + assert_eq!(converted_msg.content, ""); + assert!(converted_msg.tool_calls.is_some()); + + let converted_tool_calls = converted_msg.tool_calls.as_ref().unwrap(); + assert_eq!(converted_tool_calls.len(), 1); + assert_eq!(converted_tool_calls[0].function.name, "List"); + assert_eq!( + converted_tool_calls[0].function.arguments, + r#"{"path": "./"}"# + ); + } + + #[test] + fn test_message_conversion_without_tool_calls() { + let message = Message::new(Role::User, "Hello".to_string()); + let messages = vec![message]; + + let ollama_messages: Vec = messages + .into_iter() + .map(|msg| { + let tool_calls = msg.tool_calls.map(|calls| { + calls + .into_iter() + .map(|call| OllamaToolCall { + function: OllamaToolCallFunction { + name: call.name, + arguments: call.arguments, + }, + }) + .collect() + }); + + OllamaMessage { + role: "user".to_string(), + content: msg.content, + tool_calls, + } + }) + .collect(); + + assert_eq!(ollama_messages.len(), 1); + let converted_msg = &ollama_messages[0]; + assert_eq!(converted_msg.role, "user"); + assert_eq!(converted_msg.content, "Hello"); + assert!(converted_msg.tool_calls.is_none()); + } +} diff --git a/worker/src/llm/openai.rs b/worker/src/llm/openai.rs new file mode 100644 index 0000000..5eae402 --- /dev/null +++ b/worker/src/llm/openai.rs @@ -0,0 +1,380 @@ +use crate::{ + LlmClientTrait, WorkerError, + types::{DynamicToolDefinition, LlmProvider, Message, Role, StreamEvent, ToolCall}, + url_config::UrlConfig, +}; +use futures_util::{Stream, StreamExt}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +// --- Request & Response Structures --- +#[derive(Debug, Serialize)] +pub(crate) struct OpenAIRequest { + pub model: String, + pub messages: Vec, + #[serde(skip_serializing_if = "std::ops::Not::not")] + pub stream: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OpenAIMessage { + pub role: String, + pub content: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OpenAIToolCall { + pub id: String, + #[serde(rename = "type")] + pub call_type: String, + pub function: OpenAIFunction, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OpenAIFunction { + pub name: String, + pub arguments: String, +} + +#[derive(Debug, Serialize, Clone)] +pub struct OpenAITool { + #[serde(rename = "type")] + pub tool_type: String, + pub function: OpenAIFunctionDef, +} + +#[derive(Debug, Serialize, Clone)] +pub struct OpenAIFunctionDef { + pub name: String, + pub description: String, + pub parameters: Value, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct OpenAIResponse { + pub choices: Vec, +} + +#[derive(Debug, Deserialize)] +pub struct OpenAIChoice { + pub message: OpenAIMessage, + #[serde(skip_serializing_if = "Option::is_none")] + pub delta: Option, +} + +#[derive(Debug, Deserialize)] +pub struct OpenAIDelta { + pub content: Option, + pub tool_calls: Option>, +} + +// --- Client --- +pub struct OpenAIClient { + api_key: String, + model: String, +} + +impl OpenAIClient { + pub fn new(api_key: &str, model: &str) -> Self { + Self { + api_key: api_key.to_string(), + model: model.to_string(), + } + } + + pub fn get_model_name(&self) -> String { + self.model.clone() + } +} + +use async_stream::stream; + +impl OpenAIClient { + pub async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + let client = Client::new(); + let url = UrlConfig::get_completion_url("openai"); + + let openai_messages: Vec = messages + .into_iter() + .map(|msg| OpenAIMessage { + role: match msg.role { + Role::User => "user".to_string(), + Role::Model => "assistant".to_string(), + Role::System => "system".to_string(), + Role::Tool => "tool".to_string(), + }, + content: msg.content, + tool_calls: None, + }) + .collect(); + + // Convert tools to OpenAI format + let openai_tools = tools.map(|tools| { + tools + .iter() + .map(|tool| OpenAITool { + tool_type: "function".to_string(), + function: OpenAIFunctionDef { + name: tool.name.clone(), + description: tool.description.clone(), + parameters: tool.parameters_schema.clone(), + }, + }) + .collect() + }); + + let request = OpenAIRequest { + model: self.model.clone(), + messages: openai_messages, + stream: true, + tools: openai_tools, + }; + + let response = client + .post(url) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", self.api_key)) + .json(&request) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::OpenAI) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + return Err(WorkerError::from_api_error( + format!("OpenAI API error: {} - {}", status, error_body), + &crate::types::LlmProvider::OpenAI, + )); + } + + let stream = stream! { + // デバッグ情報を送信 + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_request(&self.model, "OpenAI", &serde_json::to_value(&request).unwrap_or_default()) { + yield Ok(debug_event); + } + } + + let mut stream = response.bytes_stream(); + let mut buffer = String::new(); + + while let Some(chunk) = stream.next().await { + match chunk { + Ok(bytes) => { + let chunk_str = String::from_utf8_lossy(&bytes); + buffer.push_str(&chunk_str); + + // Server-sent eventsを処理 + while let Some(line_end) = buffer.find('\n') { + let line = buffer[..line_end].to_string(); + buffer = buffer[line_end + 1..].to_string(); + + if line.starts_with("data: ") { + let data = &line[6..]; + if data == "[DONE]" { + yield Ok(StreamEvent::Completion(Message::new( + Role::Model, + "".to_string(), + ))); + break; + } + + match serde_json::from_str::(data) { + Ok(json_data) => { + // デバッグ情報を送信 + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_response(&self.model, "OpenAI", &json_data) { + yield Ok(debug_event); + } + } + if let Some(choices) = json_data.get("choices").and_then(|c| c.as_array()) { + for choice in choices { + if let Some(delta) = choice.get("delta") { + // コンテンツを処理 + if let Some(content) = delta.get("content").and_then(|c| c.as_str()) { + yield Ok(StreamEvent::Chunk(content.to_string())); + } + + // ツールコールを処理 + if let Some(tool_calls) = delta.get("tool_calls").and_then(|tc| tc.as_array()) { + for tool_call in tool_calls { + if let Some(function) = tool_call.get("function") { + if let Some(name) = function.get("name").and_then(|n| n.as_str()) { + let arguments = function.get("arguments") + .and_then(|a| a.as_str()) + .unwrap_or(""); + let tool_call = ToolCall { + name: name.to_string(), + arguments: arguments.to_string(), + }; + yield Ok(StreamEvent::ToolCall(tool_call)); + } + } + } + } + } + } + } + } + Err(e) => { + tracing::warn!("Failed to parse OpenAI stream response: {}", e); + } + } + } + } + } + Err(e) => { + yield Err(WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::OpenAI)); + break; + } + } + } + }; + + Ok(Box::new(Box::pin(stream))) + } + + pub async fn get_model_details( + &self, + model_id: &str, + ) -> Result { + let client = Client::new(); + let url = UrlConfig::get_model_url("openai", model_id); + + let response = client + .get(&url) + .header("Authorization", format!("Bearer {}", self.api_key)) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::OpenAI) + })?; + + if !response.status().is_success() { + return Err(WorkerError::from_api_error( + format!( + "OpenAI model details request failed with status: {}", + response.status() + ), + &crate::types::LlmProvider::OpenAI, + )); + } + + let model_data: serde_json::Value = response.json().await.map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::OpenAI) + })?; + + let id = model_data + .get("id") + .and_then(|id| id.as_str()) + .unwrap_or(model_id); + let owned_by = model_data + .get("owned_by") + .and_then(|owner| owner.as_str()) + .unwrap_or("openai"); + let created = model_data + .get("created") + .and_then(|c| c.as_i64()) + .map(|timestamp| format!("{}", timestamp)); + + let supports_tools = true; // Default to true, will be determined by config + let context_length = None; // Will be determined by config + let capabilities = vec!["text_generation".to_string()]; // Basic default + let description = format!("OpenAI model: {}", id); + + Ok(crate::types::ModelInfo { + id: id.to_string(), + name: format!("{} ({})", id, owned_by), + provider: crate::types::LlmProvider::OpenAI, + supports_tools, + supports_function_calling: supports_tools, + supports_vision: false, // Will be determined dynamically + supports_multimodal: false, + context_length, + training_cutoff: created, + capabilities, + description: Some(description), + }) + } + + pub async fn check_connection(&self) -> Result<(), WorkerError> { + let client = Client::new(); + let url = UrlConfig::get_completion_url("openai"); + + let test_request = OpenAIRequest { + model: self.model.clone(), + messages: vec![OpenAIMessage { + role: "user".to_string(), + content: "Hi".to_string(), + tool_calls: None, + }], + stream: false, + tools: None, + }; + + let response = client + .post(url) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", self.api_key)) + .json(&test_request) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::OpenAI) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + return Err(WorkerError::from_api_error( + format!("OpenAI connection test failed: {} - {}", status, error_body), + &crate::types::LlmProvider::OpenAI, + )); + } + + Ok(()) + } +} + +#[async_trait::async_trait] +impl LlmClientTrait for OpenAIClient { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + self.chat_stream(messages, tools, llm_debug).await + } + + async fn check_connection(&self) -> Result<(), WorkerError> { + self.check_connection().await + } + + fn provider(&self) -> LlmProvider { + LlmProvider::OpenAI + } + + fn get_model_name(&self) -> String { + self.get_model_name() + } +} diff --git a/worker/src/llm/xai.rs b/worker/src/llm/xai.rs new file mode 100644 index 0000000..0bd5e74 --- /dev/null +++ b/worker/src/llm/xai.rs @@ -0,0 +1,386 @@ +use crate::{ + LlmClientTrait, WorkerError, + types::{DynamicToolDefinition, LlmProvider, Message, Role, StreamEvent, ToolCall}, + url_config::UrlConfig, +}; +use futures_util::{Stream, StreamExt}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug, Serialize)] +pub(crate) struct XAIRequest { + pub model: String, + pub messages: Vec, + #[serde(skip_serializing_if = "std::ops::Not::not")] + pub stream: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub max_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub temperature: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct XAIMessage { + pub role: String, + pub content: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct XAIToolCall { + pub id: String, + #[serde(rename = "type")] + pub call_type: String, + pub function: XAIFunction, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct XAIFunction { + pub name: String, + pub arguments: String, +} + +#[derive(Debug, Serialize, Clone)] +pub struct XAITool { + #[serde(rename = "type")] + pub tool_type: String, + pub function: XAIFunctionDef, +} + +#[derive(Debug, Serialize, Clone)] +pub struct XAIFunctionDef { + pub name: String, + pub description: String, + pub parameters: Value, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct XAIResponse { + pub choices: Vec, +} + +#[derive(Debug, Deserialize)] +pub struct XAIChoice { + pub message: XAIMessage, + #[serde(skip_serializing_if = "Option::is_none")] + pub delta: Option, +} + +#[derive(Debug, Deserialize)] +pub struct XAIDelta { + pub content: Option, + pub tool_calls: Option>, +} + +#[derive(Debug, Deserialize)] +pub struct XAIModel { + pub id: String, + pub object: String, + pub created: i64, + pub owned_by: String, +} + +#[derive(Debug, Deserialize)] +pub struct XAIModelsResponse { + pub object: String, + pub data: Vec, +} + +pub struct XAIClient { + api_key: String, + model: String, +} + +impl XAIClient { + pub fn new(api_key: &str, model: &str) -> Self { + Self { + api_key: api_key.to_string(), + model: model.to_string(), + } + } + + pub fn get_model_name(&self) -> String { + self.model.clone() + } +} + +use async_stream::stream; + +impl XAIClient { + pub async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[crate::types::DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + let client = Client::new(); + let url = UrlConfig::get_completion_url("xai"); + + let xai_messages: Vec = messages + .into_iter() + .map(|msg| XAIMessage { + role: match msg.role { + Role::User => "user".to_string(), + Role::Model => "assistant".to_string(), + Role::System => "system".to_string(), + Role::Tool => "tool".to_string(), + }, + content: msg.content, + tool_calls: None, + }) + .collect(); + + let xai_tools = tools.map(|tools| { + tools + .iter() + .map(|tool| XAITool { + tool_type: "function".to_string(), + function: XAIFunctionDef { + name: tool.name.clone(), + description: tool.description.clone(), + parameters: tool.parameters_schema.clone(), + }, + }) + .collect() + }); + + let request = XAIRequest { + model: self.model.clone(), + messages: xai_messages, + stream: true, + tools: xai_tools, + max_tokens: None, + temperature: None, + }; + + let response = client + .post(url) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", self.api_key)) + .json(&request) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::XAI) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + return Err(WorkerError::from_api_error( + format!("xAI API error: {} - {}", status, error_body), + &crate::types::LlmProvider::XAI, + )); + } + + let stream = stream! { + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_request(&self.model, "xAI", &serde_json::to_value(&request).unwrap_or_default()) { + yield Ok(debug_event); + } + } + + let mut stream = response.bytes_stream(); + let mut buffer = String::new(); + + while let Some(chunk) = stream.next().await { + match chunk { + Ok(bytes) => { + let chunk_str = String::from_utf8_lossy(&bytes); + buffer.push_str(&chunk_str); + + while let Some(line_end) = buffer.find('\n') { + let line = buffer[..line_end].to_string(); + buffer = buffer[line_end + 1..].to_string(); + + if line.starts_with("data: ") { + let data = &line[6..]; + if data == "[DONE]" { + yield Ok(StreamEvent::Completion(Message::new( + Role::Model, + "".to_string(), + ))); + break; + } + + match serde_json::from_str::(data) { + Ok(json_data) => { + if let Some(ref debug) = llm_debug { + if let Some(debug_event) = debug.debug_response(&self.model, "xAI", &json_data) { + yield Ok(debug_event); + } + } + if let Some(choices) = json_data.get("choices").and_then(|c| c.as_array()) { + for choice in choices { + if let Some(delta) = choice.get("delta") { + if let Some(content) = delta.get("content").and_then(|c| c.as_str()) { + yield Ok(StreamEvent::Chunk(content.to_string())); + } + + if let Some(tool_calls) = delta.get("tool_calls").and_then(|tc| tc.as_array()) { + for tool_call in tool_calls { + if let Some(function) = tool_call.get("function") { + if let (Some(name), Some(arguments)) = ( + function.get("name").and_then(|n| n.as_str()), + function.get("arguments").and_then(|a| a.as_str()) + ) { + let tool_call = ToolCall { + name: name.to_string(), + arguments: arguments.to_string(), + }; + yield Ok(StreamEvent::ToolCall(tool_call)); + } + } + } + } + } + } + } + } + Err(e) => { + tracing::warn!("Failed to parse xAI stream response: {}", e); + } + } + } + } + } + Err(e) => { + yield Err(WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::XAI)); + break; + } + } + } + }; + + Ok(Box::new(Box::pin(stream))) + } + + pub async fn get_model_details( + &self, + model_id: &str, + ) -> Result { + let client = Client::new(); + let url = UrlConfig::get_model_url("xai", model_id); + + let response = client + .get(&url) + .header("Authorization", format!("Bearer {}", self.api_key)) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::XAI) + })?; + + if !response.status().is_success() { + return Err(WorkerError::from_api_error( + format!( + "xAI model details request failed with status: {}", + response.status() + ), + &crate::types::LlmProvider::XAI, + )); + } + + let model_data: XAIModel = response.json().await.map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::XAI) + })?; + + let supports_tools = true; // Will be determined by config + let supports_vision = false; // Will be determined by config + let context_length = None; // Will be determined by config + let capabilities = vec!["text_generation".to_string()]; // Basic default + let description = format!("xAI {} model ({})", model_data.id, model_data.owned_by); + + Ok(crate::types::ModelInfo { + id: model_data.id.clone(), + name: format!("{} ({})", model_data.id, model_data.owned_by), + provider: crate::types::LlmProvider::XAI, + supports_tools, + supports_function_calling: supports_tools, + supports_vision, + supports_multimodal: supports_vision, + context_length, + training_cutoff: Some( + chrono::DateTime::from_timestamp(model_data.created, 0) + .map(|dt| dt.format("%Y-%m-%d").to_string()) + .unwrap_or_else(|| "2024-12-12".to_string()), + ), + capabilities, + description: Some(description), + }) + } + + pub async fn check_connection(&self) -> Result<(), WorkerError> { + let client = Client::new(); + let url = UrlConfig::get_completion_url("xai"); + + let test_request = XAIRequest { + model: self.model.clone(), + messages: vec![XAIMessage { + role: "user".to_string(), + content: "Hi".to_string(), + tool_calls: None, + }], + stream: false, + tools: None, + max_tokens: Some(10), + temperature: Some(0.1), + }; + + let response = client + .post(url) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", self.api_key)) + .json(&test_request) + .send() + .await + .map_err(|e| { + WorkerError::from_api_error(e.to_string(), &crate::types::LlmProvider::XAI) + })?; + + if !response.status().is_success() { + let status = response.status(); + let error_body = response.text().await.unwrap_or_default(); + return Err(WorkerError::from_api_error( + format!("xAI connection test failed: {} - {}", status, error_body), + &crate::types::LlmProvider::XAI, + )); + } + + Ok(()) + } +} + +#[async_trait::async_trait] +impl LlmClientTrait for XAIClient { + async fn chat_stream<'a>( + &'a self, + messages: Vec, + tools: Option<&[DynamicToolDefinition]>, + llm_debug: Option, + ) -> Result< + Box> + Unpin + Send + 'a>, + WorkerError, + > { + self.chat_stream(messages, tools, llm_debug).await + } + + async fn check_connection(&self) -> Result<(), WorkerError> { + self.check_connection().await + } + + fn provider(&self) -> LlmProvider { + LlmProvider::XAI + } + + fn get_model_name(&self) -> String { + self.get_model_name() + } +} diff --git a/worker/src/mcp_config.rs b/worker/src/mcp_config.rs new file mode 100644 index 0000000..3056f05 --- /dev/null +++ b/worker/src/mcp_config.rs @@ -0,0 +1,364 @@ +use crate::WorkerError; +use crate::mcp_tool::McpServerConfig; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::Path; +use tracing::{debug, info, warn}; + +/// MCP設定ファイルの構造 +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct McpConfig { + /// MCPサーバーの設定一覧 + pub servers: HashMap, +} + +/// 個別のMCPサーバー定義 +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct McpServerDefinition { + /// 実行コマンド + pub command: String, + + /// コマンドライン引数 + #[serde(default)] + pub args: Vec, + + /// 環境変数の設定 + #[serde(default)] + pub env: HashMap, + + /// サーバーの説明(オプション) + pub description: Option, + + /// 有効/無効の設定(デフォルト: true) + #[serde(default = "default_enabled")] + pub enabled: bool, + + /// 統合方式の選択(proxy または individual) + #[serde(default = "default_integration_mode")] + pub integration_mode: IntegrationMode, +} + +/// MCP統合方式 +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum IntegrationMode { + /// プロキシーモード - 単一ツールで全MCPツールにアクセス + Proxy, + /// 個別モード - MCPツールを個別のWorkerツールとして登録 + Individual, +} + +fn default_enabled() -> bool { + true +} + +fn default_integration_mode() -> IntegrationMode { + IntegrationMode::Individual +} + +impl McpConfig { + /// 設定ファイルを読み込む + pub fn load_from_file>(path: P) -> Result { + let path = path.as_ref(); + + if !path.exists() { + debug!( + "MCP config file not found at {:?}, returning empty config", + path + ); + return Ok(Self::default()); + } + + info!("Loading MCP config from: {:?}", path); + let content = std::fs::read_to_string(path).map_err(|e| { + WorkerError::ConfigurationError(format!( + "Failed to read MCP config file {:?}: {}", + path, e + )) + })?; + + let config: McpConfig = serde_yaml::from_str(&content).map_err(|e| { + WorkerError::ConfigurationError(format!( + "Failed to parse MCP config file {:?}: {}", + path, e + )) + })?; + + info!("Loaded {} MCP server configurations", config.servers.len()); + Ok(config) + } + + /// 設定ファイルに保存する + pub fn save_to_file>(&self, path: P) -> Result<(), WorkerError> { + let path = path.as_ref(); + + // ディレクトリが存在しない場合は作成 + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent).map_err(|e| { + WorkerError::ConfigurationError(format!( + "Failed to create config directory {:?}: {}", + parent, e + )) + })?; + } + + let content = serde_yaml::to_string(self).map_err(|e| { + WorkerError::ConfigurationError(format!("Failed to serialize MCP config: {}", e)) + })?; + + std::fs::write(path, content).map_err(|e| { + WorkerError::ConfigurationError(format!( + "Failed to write MCP config file {:?}: {}", + path, e + )) + })?; + + info!("Saved MCP config to: {:?}", path); + Ok(()) + } + + /// 有効なサーバー設定を取得 + pub fn get_enabled_servers(&self) -> Vec<(&String, &McpServerDefinition)> { + self.servers.iter().filter(|(_, def)| def.enabled).collect() + } + + /// デフォルト設定ファイルを生成 + pub fn create_default_config() -> Self { + let mut servers = HashMap::new(); + + // Brave Search MCP Server の設定例 + servers.insert( + "brave_search".to_string(), + McpServerDefinition { + command: "npx".to_string(), + args: vec![ + "-y".to_string(), + "@brave/brave-search-mcp-server".to_string(), + ], + env: { + let mut env = HashMap::new(); + env.insert("BRAVE_API_KEY".to_string(), "${BRAVE_API_KEY}".to_string()); + env + }, + description: Some("Brave Search API for web searching".to_string()), + enabled: false, // デフォルトでは無効(APIキーが必要なため) + integration_mode: IntegrationMode::Individual, + }, + ); + + // ファイルシステムMCPサーバーの設定例 + servers.insert( + "filesystem".to_string(), + McpServerDefinition { + command: "npx".to_string(), + args: vec![ + "-y".to_string(), + "@modelcontextprotocol/server-filesystem".to_string(), + "/tmp".to_string(), + ], + env: HashMap::new(), + description: Some("Filesystem operations in /tmp directory".to_string()), + enabled: false, // デフォルトでは無効 + integration_mode: IntegrationMode::Individual, + }, + ); + + // Git MCP サーバーの設定例 + servers.insert( + "git".to_string(), + McpServerDefinition { + command: "npx".to_string(), + args: vec![ + "-y".to_string(), + "@modelcontextprotocol/server-git".to_string(), + ".".to_string(), + ], + env: HashMap::new(), + description: Some("Git operations in current directory".to_string()), + enabled: false, // デフォルトでは無効 + integration_mode: IntegrationMode::Individual, + }, + ); + + Self { servers } + } +} + +impl Default for McpConfig { + fn default() -> Self { + Self { + servers: HashMap::new(), + } + } +} + +impl McpServerDefinition { + /// 環境変数を展開してMcpServerConfigに変換 + pub fn to_mcp_server_config(&self, _name: &str) -> Result { + // 環境変数を設定 + for (key, value) in &self.env { + let expanded_value = expand_environment_variables(value)?; + // SAFETY: Setting environment variables is safe when done in a controlled manner + // for MCP server configuration purposes + unsafe { + std::env::set_var(key, expanded_value); + } + debug!("Set environment variable: {}={}", key, value); // 実際の値はログに出力しない + } + + // コマンドと引数の環境変数を展開 + let expanded_command = expand_environment_variables(&self.command)?; + let expanded_args: Result, WorkerError> = self + .args + .iter() + .map(|arg| expand_environment_variables(arg)) + .collect(); + let expanded_args = expanded_args?; + + Ok(McpServerConfig::new(expanded_command, expanded_args)) + } +} + +/// 環境変数を展開する(${VAR_NAME} 形式をサポート) +fn expand_environment_variables(input: &str) -> Result { + let mut result = input.to_string(); + + // ${VAR_NAME} パターンを検索して置換 + let re = regex::Regex::new(r"\$\{([^}]+)\}") + .map_err(|e| WorkerError::ConfigurationError(format!("Regex error: {}", e)))?; + + for caps in re.captures_iter(input) { + let full_match = &caps[0]; + let var_name = &caps[1]; + + match std::env::var(var_name) { + Ok(value) => { + result = result.replace(full_match, &value); + } + Err(_) => { + warn!( + "Environment variable '{}' not found, leaving unexpanded", + var_name + ); + // 環境変数が見つからない場合はそのまま残す + // これにより、必要に応じてユーザーが後で設定できる + } + } + } + + Ok(result) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::tempdir; + + #[test] + fn test_default_config_creation() { + let config = McpConfig::create_default_config(); + assert!(!config.servers.is_empty()); + assert!(config.servers.contains_key("brave_search")); + assert!(config.servers.contains_key("filesystem")); + } + + #[test] + fn test_config_serialization() { + let config = McpConfig::create_default_config(); + let yaml = serde_yaml::to_string(&config).unwrap(); + + // YAML形式で正しくシリアライズされることを確認 + assert!(yaml.contains("servers:")); + assert!(yaml.contains("brave_search:")); + assert!(yaml.contains("command:")); + } + + #[test] + fn test_config_deserialization() { + let yaml_content = r#" +servers: + test_server: + command: "python3" + args: ["test.py"] + env: + TEST_VAR: "test_value" + description: "Test server" + enabled: true + integration_mode: "individual" +"#; + + let config: McpConfig = serde_yaml::from_str(yaml_content).unwrap(); + assert_eq!(config.servers.len(), 1); + + let server = config.servers.get("test_server").unwrap(); + assert_eq!(server.command, "python3"); + assert_eq!(server.args, vec!["test.py"]); + assert_eq!(server.env.get("TEST_VAR").unwrap(), "test_value"); + assert!(server.enabled); + } + + #[test] + fn test_environment_variable_expansion() { + std::env::set_var("TEST_VAR", "test_value"); + + let result = expand_environment_variables("prefix_${TEST_VAR}_suffix").unwrap(); + assert_eq!(result, "prefix_test_value_suffix"); + + // 存在しない環境変数の場合はそのまま残る + let result = expand_environment_variables("${NON_EXISTENT_VAR}").unwrap(); + assert_eq!(result, "${NON_EXISTENT_VAR}"); + } + + #[test] + fn test_config_file_operations() { + let dir = tempdir().unwrap(); + let config_path = dir.path().join("mcp.yaml"); + + // 設定を作成して保存 + let config = McpConfig::create_default_config(); + config.save_to_file(&config_path).unwrap(); + + // ファイルが作成されたことを確認 + assert!(config_path.exists()); + + // 設定を読み込み + let loaded_config = McpConfig::load_from_file(&config_path).unwrap(); + assert_eq!(config.servers.len(), loaded_config.servers.len()); + } + + #[test] + fn test_enabled_servers_filter() { + let mut config = McpConfig::default(); + + // 有効なサーバーを追加 + config.servers.insert( + "enabled_server".to_string(), + McpServerDefinition { + command: "test".to_string(), + args: vec![], + env: HashMap::new(), + description: None, + enabled: true, + integration_mode: IntegrationMode::Individual, + }, + ); + + // 無効なサーバーを追加 + config.servers.insert( + "disabled_server".to_string(), + McpServerDefinition { + command: "test".to_string(), + args: vec![], + env: HashMap::new(), + description: None, + enabled: false, + integration_mode: IntegrationMode::Individual, + }, + ); + + let enabled_servers = config.get_enabled_servers(); + assert_eq!(enabled_servers.len(), 1); + assert_eq!(enabled_servers[0].0, "enabled_server"); + } +} diff --git a/worker/src/mcp_protocol.rs b/worker/src/mcp_protocol.rs new file mode 100644 index 0000000..02e6220 --- /dev/null +++ b/worker/src/mcp_protocol.rs @@ -0,0 +1,449 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; +use tokio::process::{Child, ChildStdin, ChildStdout}; +use tracing::{debug, info, trace, warn}; + +/// JSON-RPC 2.0 Request +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JsonRpcRequest { + pub jsonrpc: String, + pub id: Value, + pub method: String, + pub params: Option, +} + +/// JSON-RPC 2.0 Response +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JsonRpcResponse { + pub jsonrpc: String, + pub id: Value, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +/// JSON-RPC 2.0 Error +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JsonRpcError { + pub code: i32, + pub message: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, +} + +/// MCP Tool definition +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct McpToolDefinition { + pub name: String, + pub description: Option, + #[serde(rename = "inputSchema")] + pub input_schema: Value, +} + +/// MCP Initialize request parameters +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InitializeParams { + #[serde(rename = "protocolVersion")] + pub protocol_version: String, + pub capabilities: InitializeCapabilities, + #[serde(rename = "clientInfo")] + pub client_info: ClientInfo, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InitializeCapabilities { + pub tools: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolCapabilities { + #[serde(rename = "listChanged")] + pub list_changed: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ClientInfo { + pub name: String, + pub version: String, +} + +/// MCP Initialize response +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InitializeResult { + #[serde(rename = "protocolVersion")] + pub protocol_version: String, + pub capabilities: ServerCapabilities, + #[serde(rename = "serverInfo")] + pub server_info: ServerInfo, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ServerCapabilities { + pub tools: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ServerInfo { + pub name: String, + pub version: String, +} + +/// List tools response +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ListToolsResult { + pub tools: Vec, +} + +/// Call tool parameters +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CallToolParams { + pub name: String, + pub arguments: Option, +} + +/// Tool call result content +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolContent { + #[serde(rename = "type")] + pub content_type: String, + pub text: Option, + pub data: Option, +} + +/// Call tool result +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CallToolResult { + pub content: Vec, +} + +/// MCP Client for JSON-RPC 2.0 communication +pub struct McpClient { + child: Option, + stdin: Option, + stdout: Option>, + request_id: u64, + initialized: bool, +} + +impl McpClient { + /// Create a new MCP client + pub fn new() -> Self { + Self { + child: None, + stdin: None, + stdout: None, + request_id: 0, + initialized: false, + } + } + + /// Start MCP server process and initialize connection + pub async fn connect( + &mut self, + command: String, + args: Vec, + ) -> Result<(), Box> { + info!("Starting MCP server: {} {:?}", command, args); + + // Start the process + let mut cmd = tokio::process::Command::new(&command); + cmd.args(&args); + cmd.stdin(std::process::Stdio::piped()); + cmd.stdout(std::process::Stdio::piped()); + cmd.stderr(std::process::Stdio::piped()); + + let mut child = cmd.spawn()?; + + let stdin = child.stdin.take().ok_or("Failed to get stdin")?; + let stdout = child.stdout.take().ok_or("Failed to get stdout")?; + let stdout = BufReader::new(stdout); + + self.child = Some(child); + self.stdin = Some(stdin); + self.stdout = Some(stdout); + + info!("MCP server started successfully"); + + // Initialize the connection + self.initialize().await?; + + Ok(()) + } + + /// Initialize MCP connection + async fn initialize( + &mut self, + ) -> Result> { + let params = InitializeParams { + protocol_version: "2024-11-05".to_string(), + capabilities: InitializeCapabilities { + tools: Some(ToolCapabilities { + list_changed: Some(true), + }), + }, + client_info: ClientInfo { + name: "nia-worker".to_string(), + version: "0.1.0".to_string(), + }, + }; + + debug!("Sending initialize request with params: {:?}", params); + let result: InitializeResult = self + .send_request("initialize", Some(serde_json::to_value(¶ms)?)) + .await?; + debug!("Received initialize result: {:?}", result); + + // Send initialized notification + debug!("Sending initialized notification"); + self.send_notification("initialized", None).await?; + + self.initialized = true; + info!("MCP connection initialized successfully"); + + Ok(result) + } + + /// Send a JSON-RPC request and wait for response + async fn send_request( + &mut self, + method: &str, + params: Option, + ) -> Result> + where + T: for<'de> Deserialize<'de>, + { + if self.stdin.is_none() || self.stdout.is_none() { + return Err("Not connected to MCP server".into()); + } + + self.request_id += 1; + let id = Value::Number(serde_json::Number::from(self.request_id)); + + let request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + id: id.clone(), + method: method.to_string(), + params, + }; + + let request_json = serde_json::to_string(&request)?; + trace!("Sending MCP request: {}", request_json); + + // Send request + let stdin = self.stdin.as_mut().unwrap(); + stdin.write_all(request_json.as_bytes()).await?; + stdin.write_all(b"\n").await?; + stdin.flush().await?; + + // Read response - keep reading until we get a valid JSON-RPC response + let stdout = self.stdout.as_mut().unwrap(); + let mut response_line = String::new(); + let mut attempts = 0; + const MAX_ATTEMPTS: usize = 20; + + let response = loop { + response_line.clear(); + + // Add timeout for reading each line + let read_result = tokio::time::timeout( + std::time::Duration::from_secs(10), + stdout.read_line(&mut response_line), + ) + .await; + + let bytes_read = match read_result { + Ok(Ok(bytes)) => bytes, + Ok(Err(e)) => { + return Err(format!("I/O error reading from MCP server: {}", e).into()); + } + Err(_) => return Err("Timeout reading from MCP server".into()), + }; + + trace!("Read {} bytes from MCP server", bytes_read); + trace!("Raw response line: {:?}", response_line); + + if bytes_read == 0 { + // Check if the process is still running + if let Some(child) = &mut self.child { + match child.try_wait() { + Ok(Some(exit_status)) => { + return Err( + format!("MCP server exited with status: {}", exit_status).into() + ); + } + Ok(None) => { + // Process is still running but closed stdout + return Err("MCP server closed stdout connection".into()); + } + Err(e) => { + return Err(format!("Failed to check MCP server status: {}", e).into()); + } + } + } else { + return Err("MCP server process not found".into()); + } + } + + let trimmed = response_line.trim(); + if trimmed.is_empty() { + continue; + } + + // Try to parse as JSON-RPC response + if let Ok(response) = serde_json::from_str::(trimmed) { + debug!("Received valid JSON-RPC response: {}", trimmed); + break response; + } else { + // This is likely a log message or other non-JSON output + debug!("Skipping non-JSON output: {}", trimmed); + attempts += 1; + if attempts >= MAX_ATTEMPTS { + return Err("Too many non-JSON responses from MCP server".into()); + } + continue; + } + }; + + // Check if this is our response + if response.id != id { + return Err(format!( + "Response ID mismatch: expected {:?}, got {:?}", + id, response.id + ) + .into()); + } + + // Handle error response + if let Some(error) = response.error { + return Err(format!("MCP server error: {} ({})", error.message, error.code).into()); + } + + // Parse result + let result = response.result.ok_or("No result in response")?; + let parsed_result: T = serde_json::from_value(result)?; + + Ok(parsed_result) + } + + /// Send a JSON-RPC notification (no response expected) + async fn send_notification( + &mut self, + method: &str, + params: Option, + ) -> Result<(), Box> { + if self.stdin.is_none() { + return Err("Not connected to MCP server".into()); + } + + let request = serde_json::json!({ + "jsonrpc": "2.0", + "method": method, + "params": params + }); + + let request_json = serde_json::to_string(&request)?; + trace!("Sending MCP notification: {}", request_json); + + let stdin = self.stdin.as_mut().unwrap(); + stdin.write_all(request_json.as_bytes()).await?; + stdin.write_all(b"\n").await?; + stdin.flush().await?; + + Ok(()) + } + + /// List available tools from MCP server + pub async fn list_tools( + &mut self, + ) -> Result, Box> { + if !self.initialized { + return Err("MCP client not initialized".into()); + } + + // Some MCP servers expect an empty object instead of null for params + let params = serde_json::json!({}); + let result: ListToolsResult = self.send_request("tools/list", Some(params)).await?; + Ok(result.tools) + } + + /// Call a tool on the MCP server + pub async fn call_tool( + &mut self, + name: &str, + arguments: Option, + ) -> Result> { + if !self.initialized { + return Err("MCP client not initialized".into()); + } + + let params = CallToolParams { + name: name.to_string(), + arguments, + }; + + let result: CallToolResult = self + .send_request("tools/call", Some(serde_json::to_value(¶ms)?)) + .await?; + Ok(result) + } + + /// Close the connection and terminate the server process + pub async fn close(&mut self) -> Result<(), Box> { + if let Some(mut child) = self.child.take() { + // Try to terminate gracefully first + match child.kill().await { + Ok(()) => info!("MCP server process terminated"), + Err(e) => warn!("Failed to terminate MCP server process: {}", e), + } + } + + self.stdin = None; + self.stdout = None; + self.initialized = false; + + Ok(()) + } +} + +impl Drop for McpClient { + fn drop(&mut self) { + if let Some(mut child) = self.child.take() { + // Best effort cleanup - spawn a task to handle async kill + tokio::spawn(async move { + let _ = child.kill().await; + }); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_json_rpc_serialization() { + let request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + id: Value::Number(serde_json::Number::from(1)), + method: "tools/list".to_string(), + params: None, + }; + + let json = serde_json::to_string(&request).unwrap(); + assert!(json.contains("\"jsonrpc\":\"2.0\"")); + assert!(json.contains("\"id\":1")); + assert!(json.contains("\"method\":\"tools/list\"")); + } + + #[test] + fn test_error_response() { + let response_json = + r#"{"jsonrpc":"2.0","id":1,"error":{"code":-32601,"message":"Method not found"}}"#; + let response: JsonRpcResponse = serde_json::from_str(response_json).unwrap(); + + assert!(response.error.is_some()); + assert_eq!(response.error.unwrap().code, -32601); + } +} diff --git a/worker/src/mcp_tool.rs b/worker/src/mcp_tool.rs new file mode 100644 index 0000000..76aff28 --- /dev/null +++ b/worker/src/mcp_tool.rs @@ -0,0 +1,474 @@ +use crate::mcp_protocol::{CallToolResult, McpClient, McpToolDefinition}; +use crate::types::{Tool, ToolResult}; +use async_trait::async_trait; +use serde_json::Value; +use std::sync::Arc; +use tokio::sync::{Mutex, RwLock}; +use tracing::{debug, error, info, warn}; + +/// Convert MCP CallToolResult to JSON format +fn convert_mcp_result_to_json(result: &CallToolResult) -> Value { + if result.content.is_empty() { + serde_json::json!({ + "success": true, + "content": [] + }) + } else { + let content_json: Vec = result + .content + .iter() + .map(|content| { + serde_json::json!({ + "type": content.content_type, + "text": content.text, + "data": content.data + }) + }) + .collect(); + + serde_json::json!({ + "success": true, + "content": content_json + }) + } +} + +/// MCPサーバー設定 +#[derive(Debug, Clone)] +pub struct McpServerConfig { + pub command: String, + pub args: Vec, + pub name: String, +} + +impl McpServerConfig { + pub fn new(command: impl Into, args: Vec>) -> Self { + let command = command.into(); + let args: Vec = args.into_iter().map(|s| s.into()).collect(); + let name = format!("{}({})", command, args.join(" ")); + + Self { + command, + args, + name, + } + } +} + +/// 実際に動作するMCP統合ツール +pub struct McpDynamicTool { + config: McpServerConfig, + client: Arc>>, + tools_cache: Arc>>, +} + +/// 単一のMCPツールを表すDynamicTool +pub struct SingleMcpTool { + tool_name: String, + tool_description: String, + tool_schema: Value, + client: Arc>>, +} + +impl McpDynamicTool { + /// 新しいMCPツールを作成 + pub fn new(config: McpServerConfig) -> Self { + Self { + config, + client: Arc::new(Mutex::new(None)), + tools_cache: Arc::new(RwLock::new(Vec::new())), + } + } + + /// MCPサーバーに接続 + async fn ensure_connected(&self) -> ToolResult<()> { + let mut client_guard = self.client.lock().await; + + if client_guard.is_none() { + info!("Connecting to MCP server: {}", self.config.name); + + let mut mcp_client = McpClient::new(); + mcp_client + .connect(self.config.command.clone(), self.config.args.clone()) + .await + .map_err(|e| { + crate::WorkerError::ToolExecutionError(format!( + "Failed to connect to MCP server '{}': {}", + self.config.name, e + )) + })?; + + *client_guard = Some(mcp_client); + info!("Successfully connected to MCP server: {}", self.config.name); + } + + Ok(()) + } + + /// 利用可能なツール一覧を取得 + async fn fetch_tools(&self) -> ToolResult> { + self.ensure_connected().await?; + + let mut client_guard = self.client.lock().await; + let client = client_guard.as_mut().ok_or_else(|| { + crate::WorkerError::ToolExecutionError("MCP client not connected".to_string()) + })?; + + let tools = client.list_tools().await.map_err(|e| { + crate::WorkerError::ToolExecutionError(format!( + "Failed to list tools from MCP server '{}': {}", + self.config.name, e + )) + })?; + + debug!( + "Retrieved {} tools from MCP server '{}'", + tools.len(), + self.config.name + ); + Ok(tools) + } + + /// ツールキャッシュを更新 + async fn update_tools_cache(&self) -> ToolResult<()> { + let tools = self.fetch_tools().await?; + let mut cache_guard = self.tools_cache.write().await; + *cache_guard = tools; + Ok(()) + } + + /// 特定のツールを名前で検索 + async fn find_tool_by_name(&self, tool_name: &str) -> ToolResult> { + let cache_guard = self.tools_cache.read().await; + + // キャッシュが空の場合は更新 + if cache_guard.is_empty() { + drop(cache_guard); + self.update_tools_cache().await?; + let cache_guard = self.tools_cache.read().await; + let result = cache_guard + .iter() + .find(|tool| tool.name == tool_name) + .cloned(); + Ok(result) + } else { + let result = cache_guard + .iter() + .find(|tool| tool.name == tool_name) + .cloned(); + Ok(result) + } + } + + /// MCPサーバーのツールを実行 + async fn call_mcp_tool(&self, tool_name: &str, args: Value) -> ToolResult { + self.ensure_connected().await?; + + let mut client_guard = self.client.lock().await; + let client = client_guard.as_mut().ok_or_else(|| { + crate::WorkerError::ToolExecutionError("MCP client not connected".to_string()) + })?; + + debug!("Calling MCP tool '{}' with args: {}", tool_name, args); + + let result = client.call_tool(tool_name, Some(args)).await.map_err(|e| { + crate::WorkerError::ToolExecutionError(format!( + "Failed to call MCP tool '{}': {}", + tool_name, e + )) + })?; + + debug!("MCP tool '{}' returned: {:?}", tool_name, result); + + // Convert MCP result to JSON + Ok(convert_mcp_result_to_json(&result)) + } +} + +impl SingleMcpTool { + /// 新しい単一MCPツールを作成 + pub fn new( + tool_name: String, + tool_description: String, + tool_schema: Value, + client: Arc>>, + ) -> Self { + Self { + tool_name, + tool_description, + tool_schema, + client, + } + } + + /// MCPサーバーのツールを実行 + async fn call_mcp_tool(&self, args: Value) -> ToolResult { + let mut client_guard = self.client.lock().await; + let client = client_guard.as_mut().ok_or_else(|| { + crate::WorkerError::ToolExecutionError("MCP client not connected".to_string()) + })?; + + debug!("Calling MCP tool '{}' with args: {}", self.tool_name, args); + + let result = client + .call_tool(&self.tool_name, Some(args)) + .await + .map_err(|e| { + crate::WorkerError::ToolExecutionError(format!( + "Failed to call MCP tool '{}': {}", + self.tool_name, e + )) + })?; + + debug!("MCP tool '{}' returned: {:?}", self.tool_name, result); + + // Convert MCP result to JSON + Ok(convert_mcp_result_to_json(&result)) + } +} + +#[async_trait] +impl Tool for SingleMcpTool { + fn name(&self) -> &str { + &self.tool_name + } + + fn description(&self) -> &str { + &self.tool_description + } + + fn parameters_schema(&self) -> Value { + self.tool_schema.clone() + } + + async fn execute(&self, args: Value) -> ToolResult { + self.call_mcp_tool(args).await + } +} + +#[async_trait] +impl Tool for McpDynamicTool { + fn name(&self) -> &str { + "mcp_proxy" + } + + fn description(&self) -> &str { + "Execute tools from external MCP servers" + } + + fn parameters_schema(&self) -> Value { + serde_json::json!({ + "type": "object", + "properties": { + "tool_name": { + "type": "string", + "description": "Name of the MCP tool to execute" + }, + "tool_args": { + "type": "object", + "description": "Arguments to pass to the MCP tool", + "additionalProperties": true + } + }, + "required": ["tool_name", "tool_args"] + }) + } + + async fn execute(&self, args: Value) -> ToolResult { + let tool_name = args + .get("tool_name") + .and_then(|v| v.as_str()) + .ok_or_else(|| { + crate::WorkerError::ToolExecutionError( + "Missing required parameter 'tool_name'".to_string(), + ) + })?; + + let tool_args = args + .get("tool_args") + .ok_or_else(|| { + crate::WorkerError::ToolExecutionError( + "Missing required parameter 'tool_args'".to_string(), + ) + })? + .clone(); + + // ツールが存在するか確認 + match self.find_tool_by_name(tool_name).await? { + Some(_tool) => { + // ツールを実行 + let result = self.call_mcp_tool(tool_name, tool_args).await?; + Ok(serde_json::json!({ + "success": true, + "tool_name": tool_name, + "result": result + })) + } + None => { + // ツールキャッシュを更新して再試行 + warn!("Tool '{}' not found in cache, refreshing...", tool_name); + self.update_tools_cache().await?; + + match self.find_tool_by_name(tool_name).await? { + Some(_tool) => { + let result = self.call_mcp_tool(tool_name, tool_args).await?; + Ok(serde_json::json!({ + "success": true, + "tool_name": tool_name, + "result": result + })) + } + None => Err(Box::new(crate::WorkerError::ToolExecutionError(format!( + "Tool '{}' not found in MCP server '{}'", + tool_name, self.config.name + ))) + as Box), + } + } + } + } +} + +/// MCPサーバーから利用可能なツールをDynamicToolDefinitionとして取得 +pub async fn get_mcp_tools_as_definitions( + config: &McpServerConfig, +) -> ToolResult> { + let mcp_tool = McpDynamicTool::new(config.clone()); + let tools = mcp_tool.fetch_tools().await?; + + let mut definitions = Vec::new(); + + for tool in tools { + let definition = crate::types::DynamicToolDefinition { + name: tool.name.clone(), + description: tool + .description + .unwrap_or_else(|| format!("MCP tool: {}", tool.name)), + parameters_schema: tool.input_schema, + }; + definitions.push(definition); + } + + info!( + "Converted {} MCP tools to DynamicToolDefinitions", + definitions.len() + ); + Ok(definitions) +} + +/// MCPサーバーから単一のツールを取得してSingleMcpToolを作成 +pub async fn create_single_mcp_tools(config: &McpServerConfig) -> ToolResult>> { + let mcp_tool = McpDynamicTool::new(config.clone()); + let tools = mcp_tool.fetch_tools().await?; + + // 共有クライアントを作成 + let shared_client = mcp_tool.client.clone(); + + let mut single_tools: Vec> = Vec::new(); + + for tool in tools { + let tool_name = tool.name; + let tool_description = tool + .description + .unwrap_or_else(|| format!("MCP tool: {}", tool_name)); + let tool_schema = tool.input_schema; + + let single_tool = SingleMcpTool::new( + tool_name, + tool_description, + tool_schema, + shared_client.clone(), + ); + + single_tools.push(Box::new(single_tool)); + } + + info!( + "Created {} SingleMcpTools from MCP server '{}'", + single_tools.len(), + config.name + ); + Ok(single_tools) +} + +/// MCPサーバーとの接続をテストする +pub async fn test_mcp_connection( + config: &McpServerConfig, +) -> Result> { + info!("Testing MCP connection to server: {}", config.name); + + let mut client = McpClient::new(); + match client + .connect(config.command.clone(), config.args.clone()) + .await + { + Ok(()) => { + info!("Successfully connected to MCP server: {}", config.name); + + // Test listing tools + match client.list_tools().await { + Ok(tools) => { + info!( + "MCP server '{}' provides {} tools", + config.name, + tools.len() + ); + for tool in &tools { + debug!( + "Available tool: {} - {}", + tool.name, + tool.description.as_deref().unwrap_or("No description") + ); + } + } + Err(e) => { + warn!( + "Failed to list tools from MCP server '{}': {}", + config.name, e + ); + } + } + + // Close connection + let _ = client.close().await; + Ok(true) + } + Err(e) => { + error!("Failed to connect to MCP server '{}': {}", config.name, e); + Ok(false) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_mcp_server_config() { + let config = + McpServerConfig::new("npx", vec!["-y", "@modelcontextprotocol/server-everything"]); + assert_eq!(config.command, "npx"); + assert_eq!( + config.args, + vec!["-y", "@modelcontextprotocol/server-everything"] + ); + assert_eq!( + config.name, + "npx(-y @modelcontextprotocol/server-everything)" + ); + } + + #[tokio::test] + async fn test_mcp_tool_creation() { + let config = McpServerConfig::new("echo", vec!["test"]); + let tool = McpDynamicTool::new(config); + + assert_eq!(tool.name(), "mcp_proxy"); + assert!(!tool.description().is_empty()); + + let schema = tool.parameters_schema(); + assert!(schema.is_object()); + assert!(schema.get("properties").is_some()); + } +} diff --git a/worker/src/prompt_composer.rs b/worker/src/prompt_composer.rs new file mode 100644 index 0000000..c9822a0 --- /dev/null +++ b/worker/src/prompt_composer.rs @@ -0,0 +1,333 @@ +use crate::config_parser::ConfigParser; +use crate::prompt_types::*; +use crate::types::{Message, Role}; +use handlebars::{Context, Handlebars, Helper, HelperResult, Output, RenderContext}; +use std::fs; +use std::path::Path; + +/// プロンプト構築システム +#[derive(Clone)] +pub struct PromptComposer { + config: PromptRoleConfig, + handlebars: Handlebars<'static>, + context: PromptContext, + system_prompt: Option, +} + +impl PromptComposer { + /// 設定ファイルから新しいインスタンスを作成 + pub fn from_config_file>( + config_path: P, + context: PromptContext, + ) -> Result { + let config = ConfigParser::parse_from_file(config_path)?; + Self::from_config(config, context) + } + + /// 設定オブジェクトから新しいインスタンスを作成 + pub fn from_config( + config: PromptRoleConfig, + context: PromptContext, + ) -> Result { + let mut handlebars = Handlebars::new(); + + // カスタムヘルパー関数を登録 + Self::register_custom_helpers(&mut handlebars)?; + + let mut composer = Self { + config, + handlebars, + context, + system_prompt: None, + }; + + // パーシャルテンプレートを読み込み・登録 + composer.load_partials()?; + + Ok(composer) + } + + /// セッション開始時にシステムプロンプトを事前構築 + pub fn initialize_session(&mut self, initial_messages: &[Message]) -> Result<(), PromptError> { + let system_prompt = self.compose_system_prompt(initial_messages)?; + self.system_prompt = Some(system_prompt); + Ok(()) + } + + /// メインのプロンプト構築メソッド + pub fn compose(&self, messages: &[Message]) -> Result, PromptError> { + if let Some(system_prompt) = &self.system_prompt { + // システムプロンプトが既に構築済みの場合、それを使用 + let mut result_messages = vec![Message::new(Role::System, system_prompt.clone())]; + + // ユーザーメッセージを追加 + for msg in messages { + if msg.role != Role::System { + result_messages.push(msg.clone()); + } + } + + Ok(result_messages) + } else { + // フォールバック: 従来の動的構築 + self.compose_with_context(messages, &self.context) + } + } + + /// ツール情報を含むセッション初期化 + pub fn initialize_session_with_tools( + &mut self, + initial_messages: &[Message], + tools_schema: &serde_json::Value, + ) -> Result<(), PromptError> { + // 一時的にコンテキストをコピーしてツールスキーマを追加 + let mut temp_context = self.context.clone(); + temp_context + .variables + .insert("tools_schema".to_string(), tools_schema.clone()); + + let system_prompt = + self.compose_system_prompt_with_context(initial_messages, &temp_context)?; + self.system_prompt = Some(system_prompt); + Ok(()) + } + + /// ツール情報を含むプロンプト構築(後方互換性のため保持) + pub fn compose_with_tools( + &self, + messages: &[Message], + tools_schema: &serde_json::Value, + ) -> Result, PromptError> { + if let Some(system_prompt) = &self.system_prompt { + // システムプロンプトが既に構築済みの場合、それを使用 + let mut result_messages = vec![Message::new(Role::System, system_prompt.clone())]; + + // ユーザーメッセージを追加 + for msg in messages { + if msg.role != Role::System { + result_messages.push(msg.clone()); + } + } + + Ok(result_messages) + } else { + // フォールバック: 従来の動的構築 + let mut temp_context = self.context.clone(); + temp_context + .variables + .insert("tools_schema".to_string(), tools_schema.clone()); + + self.compose_with_context(messages, &temp_context) + } + } + + /// システムプロンプトのみを構築(セッション初期化用) + fn compose_system_prompt(&self, messages: &[Message]) -> Result { + self.compose_system_prompt_with_context(messages, &self.context) + } + + /// コンテキストを指定してシステムプロンプトを構築 + fn compose_system_prompt_with_context( + &self, + messages: &[Message], + context: &PromptContext, + ) -> Result { + // コンテキスト変数を準備 + let mut template_data = self.prepare_template_data_with_context(messages, context)?; + + // 条件評価と変数の動的設定 + self.apply_conditions(&mut template_data)?; + + // メインテンプレートを実行 + let system_prompt = self + .handlebars + .render_template(&self.config.template, &template_data) + .map_err(PromptError::Handlebars)?; + + Ok(system_prompt) + } + + /// コンテキストを指定してプロンプトを構築(後方互換性のため保持) + fn compose_with_context( + &self, + messages: &[Message], + context: &PromptContext, + ) -> Result, PromptError> { + let system_prompt = self.compose_system_prompt_with_context(messages, context)?; + + // システムメッセージとユーザーメッセージを結合 + let mut result_messages = vec![Message::new(Role::System, system_prompt)]; + + // ユーザーメッセージを追加 + for msg in messages { + if msg.role != Role::System { + result_messages.push(msg.clone()); + } + } + + Ok(result_messages) + } + + /// カスタムヘルパー関数を登録 + fn register_custom_helpers(handlebars: &mut Handlebars) -> Result<(), PromptError> { + // 基本的なヘルパーのみ実装(複雑なライフタイム問題を回避) + handlebars.register_helper("include_file", Box::new(include_file_helper)); + handlebars.register_helper("workspace_content", Box::new(workspace_content_helper)); + + Ok(()) + } + + /// パーシャルテンプレートを読み込み・登録 + fn load_partials(&mut self) -> Result<(), PromptError> { + if let Some(partials) = &self.config.partials { + for (name, partial_config) in partials { + let content = self.load_partial_content(partial_config)?; + self.handlebars + .register_partial(name, content) + .map_err(|e| PromptError::PartialLoading(e.to_string()))?; + } + } + Ok(()) + } + + /// パーシャルの内容を読み込み(フォールバック対応) + fn load_partial_content(&self, partial_config: &PartialConfig) -> Result { + let primary_path = ConfigParser::resolve_path(&partial_config.path)?; + + // メインパスを試行 + if let Ok(content) = fs::read_to_string(&primary_path) { + return Ok(content); + } + + // フォールバックパスを試行 + if let Some(fallback) = &partial_config.fallback { + let fallback_path = ConfigParser::resolve_path(fallback)?; + if let Ok(content) = fs::read_to_string(&fallback_path) { + return Ok(content); + } + } + + Err(PromptError::FileNotFound(format!( + "Could not load partial '{}' from {} (fallback: {:?})", + partial_config.path, + primary_path.display(), + partial_config.fallback + ))) + } + + /// コンテキストを指定してテンプレート用のデータを準備 + fn prepare_template_data_with_context( + &self, + messages: &[Message], + context: &PromptContext, + ) -> Result { + let user_input = messages + .iter() + .filter(|m| m.role == Role::User) + .map(|m| m.content.as_str()) + .collect::>() + .join("\n\n"); + + let mut data = serde_json::json!({ + "workspace": context.workspace, + "model": context.model, + "session": context.session, + "user_input": user_input, + "tools": context.variables.get("tools_schema").unwrap_or(&serde_json::Value::Null), + "tools_schema": context.variables.get("tools_schema").unwrap_or(&serde_json::Value::Null), + }); + + // 設定ファイルの変数を追加 + if let Some(variables) = &self.config.variables { + for (key, value_template) in variables { + // 変数値もHandlebarsテンプレートとして処理 + let resolved_value = self + .handlebars + .render_template(value_template, &data) + .map_err(PromptError::Handlebars)?; + data[key] = serde_json::Value::String(resolved_value); + } + } + + // コンテキストの追加変数をマージ + for (key, value) in &context.variables { + data[key] = value.clone(); + } + + Ok(data) + } + + /// 条件評価と動的変数設定 + fn apply_conditions(&self, data: &mut serde_json::Value) -> Result<(), PromptError> { + if let Some(conditions) = &self.config.conditions { + for (_condition_name, condition_config) in conditions { + // 条件式を評価 + let condition_result = self + .handlebars + .render_template(&condition_config.when, data) + .map_err(PromptError::Handlebars)?; + + // 条件が真の場合、変数を適用 + if condition_result.trim() == "true" { + if let Some(variables) = &condition_config.variables { + for (key, value_template) in variables { + let resolved_value = self + .handlebars + .render_template(value_template, data) + .map_err(PromptError::Handlebars)?; + data[key] = serde_json::Value::String(resolved_value); + } + } + } + } + } + Ok(()) + } +} + +// カスタムヘルパー関数の実装 + +fn include_file_helper( + h: &Helper, + _hbs: &Handlebars, + _ctx: &Context, + _rc: &mut RenderContext, + out: &mut dyn Output, +) -> HelperResult { + let file_path = h.param(0).and_then(|v| v.value().as_str()).unwrap_or(""); + + match ConfigParser::resolve_path(file_path) { + Ok(path) => { + match fs::read_to_string(&path) { + Ok(content) => { + out.write(&content)?; + } + Err(_) => { + // ファイルが見つからない場合は空文字を出力 + out.write("")?; + } + } + } + Err(_) => { + out.write("")?; + } + } + Ok(()) +} + +fn workspace_content_helper( + _h: &Helper, + _hbs: &Handlebars, + ctx: &Context, + _rc: &mut RenderContext, + out: &mut dyn Output, +) -> HelperResult { + if let Some(workspace) = ctx.data().get("workspace") { + if let Some(content) = workspace.get("nia_md_content") { + if let Some(content_str) = content.as_str() { + out.write(content_str)?; + } + } + } + Ok(()) +} diff --git a/worker/src/prompt_types.rs b/worker/src/prompt_types.rs new file mode 100644 index 0000000..bb0ecd7 --- /dev/null +++ b/worker/src/prompt_types.rs @@ -0,0 +1,377 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; + +/// ロール設定ファイルの型定義 +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PromptRoleConfig { + pub name: String, + pub description: String, + pub version: Option, + pub template: String, + pub partials: Option>, + pub variables: Option>, + pub conditions: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PartialConfig { + pub path: String, + pub fallback: Option, + pub description: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ConditionConfig { + pub when: String, + pub variables: Option>, + pub template_override: Option, +} + +/// システム情報 +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SystemInfo { + pub os_name: String, // linux, windows, macos + pub kernel_version: String, // Linux 6.15.6 + pub distribution: String, // NixOS 25.11 (Xantusia) + pub architecture: String, // x86_64 + pub full_system_info: String, // 全体の情報を組み合わせた文字列 + pub working_directory: String, + pub current_time: String, + pub timezone: String, +} + +/// ワークスペースコンテキスト +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WorkspaceContext { + pub root_path: PathBuf, + pub nia_md_content: Option, + pub project_type: Option, + pub git_info: Option, + pub has_nia_md: bool, + pub project_name: Option, + pub system_info: SystemInfo, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GitInfo { + pub repo_name: Option, + pub current_branch: Option, + pub last_commit_summary: Option, + pub is_clean: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub enum ProjectType { + Rust, + JavaScript, + TypeScript, + Python, + Go, + Java, + Cpp, + Unknown, +} + +/// モデルコンテキスト +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelContext { + pub provider: crate::types::LlmProvider, + pub model_name: String, + pub capabilities: ModelCapabilities, + pub supports_native_tools: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelCapabilities { + pub supports_tools: bool, + pub supports_function_calling: bool, + pub supports_vision: bool, + pub supports_multimodal: Option, + pub context_length: Option, + pub capabilities: Vec, + pub needs_verification: Option, +} + +/// セッションコンテキスト +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SessionContext { + pub conversation_id: Option, + pub message_count: usize, + pub active_tools: Vec, + pub user_preferences: Option>, +} + +/// 全体的なプロンプトコンテキスト +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PromptContext { + pub workspace: WorkspaceContext, + pub model: ModelContext, + pub session: SessionContext, + pub variables: HashMap, +} + +/// プロンプト構築エラー +#[derive(Debug, thiserror::Error)] +pub enum PromptError { + #[error("Template compilation error: {0}")] + TemplateCompilation(String), + + #[error("Variable resolution error: {0}")] + VariableResolution(String), + + #[error("Partial loading error: {0}")] + PartialLoading(String), + + #[error("File not found: {0}")] + FileNotFound(String), + + #[error("Workspace detection error: {0}")] + WorkspaceDetection(String), + + #[error("Git information error: {0}")] + GitInfo(String), + + #[error("Handlebars error: {0}")] + Handlebars(#[from] handlebars::RenderError), + + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + + #[error("YAML parsing error: {0}")] + YamlParsing(#[from] serde_yaml::Error), +} + +impl SystemInfo { + /// システム情報を詳細に収集する + pub fn collect() -> Self { + let current_dir = std::env::current_dir() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_else(|_| ".".to_string()); + + let now = chrono::Local::now(); + let current_time = now.format("%Y-%m-%d %H:%M:%S").to_string(); + let timezone = now.format("%Z").to_string(); + + let os_name = std::env::consts::OS.to_string(); + let architecture = std::env::consts::ARCH.to_string(); + + let (kernel_version, distribution) = Self::get_system_details(); + + // フルシステム情報を構築 + let full_system_info = if distribution.is_empty() { + format!("{} {}", kernel_version, architecture) + } else { + format!("{} - {} {}", kernel_version, distribution, architecture) + }; + + Self { + os_name, + kernel_version, + distribution, + architecture, + full_system_info, + working_directory: current_dir, + current_time, + timezone, + } + } + + /// OSの詳細情報を取得 + fn get_system_details() -> (String, String) { + #[cfg(target_os = "linux")] + { + Self::get_linux_details() + } + #[cfg(target_os = "windows")] + { + Self::get_windows_details() + } + #[cfg(target_os = "macos")] + { + Self::get_macos_details() + } + #[cfg(not(any(target_os = "linux", target_os = "windows", target_os = "macos")))] + { + (std::env::consts::OS.to_string(), String::new()) + } + } + + #[cfg(target_os = "linux")] + fn get_linux_details() -> (String, String) { + use std::process::Command; + + // カーネルバージョンを取得 + let kernel_version = Command::new("uname") + .arg("-r") + .output() + .ok() + .and_then(|output| { + if output.status.success() { + Some(format!( + "Linux {}", + String::from_utf8_lossy(&output.stdout).trim() + )) + } else { + None + } + }) + .unwrap_or_else(|| "Linux".to_string()); + + // ディストリビューション情報を取得 + let distribution = Self::get_linux_distribution(); + + (kernel_version, distribution) + } + + #[cfg(target_os = "linux")] + fn get_linux_distribution() -> String { + use std::fs; + + // /etc/os-release を読み取る + if let Ok(content) = fs::read_to_string("/etc/os-release") { + let mut name = None; + let mut version = None; + let mut pretty_name = None; + + for line in content.lines() { + if let Some(value) = line.strip_prefix("NAME=") { + name = Some(value.trim_matches('"').to_string()); + } else if let Some(value) = line.strip_prefix("VERSION=") { + version = Some(value.trim_matches('"').to_string()); + } else if let Some(value) = line.strip_prefix("PRETTY_NAME=") { + pretty_name = Some(value.trim_matches('"').to_string()); + } + } + + // PRETTY_NAME があればそれを使用、なければ NAME + VERSION + if let Some(pretty) = pretty_name { + return pretty; + } else if let (Some(n), Some(v)) = (name, version) { + return format!("{} {}", n, v); + } + } + + // /etc/issue をフォールバックとして試行 + if let Ok(content) = fs::read_to_string("/etc/issue") { + let first_line = content.lines().next().unwrap_or("").trim(); + if !first_line.is_empty() && !first_line.contains("\\") { + return first_line.to_string(); + } + } + + String::new() + } + + #[cfg(target_os = "windows")] + fn get_windows_details() -> (String, String) { + use std::process::Command; + + let version = Command::new("cmd") + .args(&["/C", "ver"]) + .output() + .ok() + .and_then(|output| { + if output.status.success() { + Some(String::from_utf8_lossy(&output.stdout).trim().to_string()) + } else { + None + } + }) + .unwrap_or_else(|| "Windows".to_string()); + + (version, String::new()) + } + + #[cfg(target_os = "macos")] + fn get_macos_details() -> (String, String) { + use std::process::Command; + + let version = Command::new("sw_vers") + .arg("-productVersion") + .output() + .ok() + .and_then(|output| { + if output.status.success() { + Some(format!( + "macOS {}", + String::from_utf8_lossy(&output.stdout).trim() + )) + } else { + None + } + }) + .unwrap_or_else(|| "macOS".to_string()); + + (version, String::new()) + } +} + +impl Default for SystemInfo { + fn default() -> Self { + Self::collect() + } +} + +impl Default for WorkspaceContext { + fn default() -> Self { + Self { + root_path: std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")), + nia_md_content: None, + project_type: None, + git_info: None, + has_nia_md: false, + project_name: None, + system_info: SystemInfo::default(), + } + } +} + +impl Default for ModelCapabilities { + fn default() -> Self { + Self { + supports_tools: false, + supports_function_calling: false, + supports_vision: false, + supports_multimodal: None, + context_length: None, + capabilities: Vec::new(), + needs_verification: Some(false), + } + } +} + +impl Default for SessionContext { + fn default() -> Self { + Self { + conversation_id: None, + message_count: 0, + active_tools: Vec::new(), + user_preferences: None, + } + } +} + +impl Default for PromptRoleConfig { + fn default() -> Self { + let mut partials = HashMap::new(); + partials.insert( + "role_definition".to_string(), + PartialConfig { + path: "./resources/prompts/cli-assistant.md".to_string(), + fallback: None, + description: Some("Default role definition".to_string()), + }, + ); + + Self { + name: "default".to_string(), + description: "Default dynamic role configuration".to_string(), + version: Some("1.0.0".to_string()), + template: "{{>role_definition}}".to_string(), + partials: Some(partials), + variables: None, + conditions: None, + } + } +} diff --git a/worker/src/tests/config_tests.rs b/worker/src/tests/config_tests.rs new file mode 100644 index 0000000..5ed2352 --- /dev/null +++ b/worker/src/tests/config_tests.rs @@ -0,0 +1,252 @@ +use crate::config_parser::ConfigParser; +use std::io::Write; +use tempfile::NamedTempFile; + +#[test] +fn test_parse_basic_config() { + let yaml_content = r##" +name: "Test Assistant" +description: "A test configuration" +version: "1.0" +template: | + # Test Role + {{>role_header}} + + {{#if workspace.has_nia_md}} + # Project Context + {{workspace.nia_md_content}} + {{/if}} + +partials: + role_header: + path: "#nia/prompts/headers/role.md" + description: "Basic role definition" + +variables: + max_context_length: "{{model.context_length}}" + project_name: "{{workspace.project_name}}" +"##; + + let config = + ConfigParser::parse_from_string(yaml_content).expect("Failed to parse basic config"); + + assert_eq!(config.name, "Test Assistant"); + assert_eq!(config.description, "A test configuration"); + assert_eq!(config.version, Some("1.0".to_string())); + assert!(!config.template.is_empty()); + + // パーシャルのテスト + let partials = config.partials.expect("Partials should be present"); + assert!(partials.contains_key("role_header")); + let role_header = &partials["role_header"]; + assert_eq!(role_header.path, "#nia/prompts/headers/role.md"); + assert_eq!( + role_header.description, + Some("Basic role definition".to_string()) + ); + + // 変数のテスト + let variables = config.variables.expect("Variables should be present"); + assert!(variables.contains_key("max_context_length")); + assert!(variables.contains_key("project_name")); +} + +#[test] +fn test_parse_minimal_config() { + let yaml_content = r##" +name: "Minimal Assistant" +description: "A minimal configuration" +template: "Hello {{user_input}}" +"##; + + let config = + ConfigParser::parse_from_string(yaml_content).expect("Failed to parse minimal config"); + + assert_eq!(config.name, "Minimal Assistant"); + assert_eq!(config.description, "A minimal configuration"); + assert_eq!(config.template, "Hello {{user_input}}"); + assert!(config.partials.is_none()); + assert!(config.variables.is_none()); + assert!(config.conditions.is_none()); +} + +#[test] +fn test_parse_with_conditions() { + let yaml_content = r##" +name: "Conditional Assistant" +description: "Configuration with conditions" +template: "Base template" + +conditions: + native_tools_enabled: + when: "{{model.supports_native_tools}}" + variables: + tool_format: "native" + include_tool_schemas: false + + xml_tools_enabled: + when: "{{not model.supports_native_tools}}" + variables: + tool_format: "xml" + include_tool_schemas: true +"##; + + let config = ConfigParser::parse_from_string(yaml_content) + .expect("Failed to parse config with conditions"); + + let conditions = config.conditions.expect("Conditions should be present"); + assert!(conditions.contains_key("native_tools_enabled")); + assert!(conditions.contains_key("xml_tools_enabled")); + + let native_condition = &conditions["native_tools_enabled"]; + assert_eq!(native_condition.when, "{{model.supports_native_tools}}"); + + let variables = native_condition + .variables + .as_ref() + .expect("Variables should be present"); + assert_eq!(variables.get("tool_format"), Some(&"native".to_string())); + assert_eq!( + variables.get("include_tool_schemas"), + Some(&"false".to_string()) + ); +} + +#[test] +fn test_validation_errors() { + // 空の名前 + let invalid_yaml = r##" +name: "" +description: "Test" +template: "Test template" +"##; + let result = ConfigParser::parse_from_string(invalid_yaml); + assert!(result.is_err()); + + // 空のテンプレート + let invalid_yaml = r##" +name: "Test" +description: "Test" +template: "" +"##; + let result = ConfigParser::parse_from_string(invalid_yaml); + assert!(result.is_err()); + + // 空のパーシャルパス + let invalid_yaml = r##" +name: "Test" +description: "Test" +template: "Test template" +partials: + empty_path: + path: "" +"##; + let result = ConfigParser::parse_from_string(invalid_yaml); + assert!(result.is_err()); +} + +#[test] +fn test_parse_from_file() { + let yaml_content = r##" +name: "File Test Assistant" +description: "Testing file parsing" +template: "File content {{user_input}}" +"##; + + let mut temp_file = NamedTempFile::new().expect("Failed to create temp file"); + temp_file + .write_all(yaml_content.as_bytes()) + .expect("Failed to write to temp file"); + + let config = + ConfigParser::parse_from_file(temp_file.path()).expect("Failed to parse config from file"); + + assert_eq!(config.name, "File Test Assistant"); + assert_eq!(config.description, "Testing file parsing"); + assert_eq!(config.template, "File content {{user_input}}"); +} + +#[test] +fn test_resolve_path() { + // #nia/ prefix + let path = + ConfigParser::resolve_path("#nia/prompts/test.md").expect("Failed to resolve nia path"); + assert!( + path.to_string_lossy() + .contains("nia-cli/resources/prompts/test.md") + ); + + // #workspace/ prefix + let path = ConfigParser::resolve_path("#workspace/config.md") + .expect("Failed to resolve workspace path"); + assert!(path.to_string_lossy().contains(".nia/config.md")); + + // #user/ prefix + let path = + ConfigParser::resolve_path("#user/settings.md").expect("Failed to resolve user path"); + assert!(path.to_string_lossy().contains("settings.md")); + + // Regular path + let path = + ConfigParser::resolve_path("regular/path.md").expect("Failed to resolve regular path"); + assert_eq!(path.to_string_lossy(), "regular/path.md"); +} + +#[test] +fn test_complex_template_syntax() { + let yaml_content = r##" +name: "Complex Template Assistant" +description: "Testing complex Handlebars syntax" +template: | + # Dynamic Role + {{>role_header}} + + {{#if workspace.has_nia_md}} + # Project: {{workspace.project_name}} + {{workspace.nia_md_content}} + {{/if}} + + {{#if_native_tools model.supports_native_tools}} + Native tools are supported. + {{else}} + Using XML-based tool calls. + Available tools: + ```json + {{tools_schema}} + ``` + {{/if_native_tools}} + + {{#model_specific model.provider}} + {{#case "Claude"}} + Claude-specific instructions here. + {{/case}} + {{#case "Gemini"}} + Gemini-specific instructions here. + {{/case}} + {{#default}} + Generic model instructions. + {{/default}} + {{/model_specific}} + +partials: + role_header: + path: "#nia/prompts/headers/role.md" +"##; + + let config = + ConfigParser::parse_from_string(yaml_content).expect("Failed to parse complex template"); + + assert!(!config.template.is_empty()); + assert!(config.template.contains("{{>role_header}}")); + assert!(config.template.contains("{{#if workspace.has_nia_md}}")); + assert!( + config + .template + .contains("{{#if_native_tools model.supports_native_tools}}") + ); + assert!( + config + .template + .contains("{{#model_specific model.provider}}") + ); +} diff --git a/worker/src/tests/integration_tests.rs b/worker/src/tests/integration_tests.rs new file mode 100644 index 0000000..a0e2798 --- /dev/null +++ b/worker/src/tests/integration_tests.rs @@ -0,0 +1,333 @@ +use crate::types::{LlmProvider, Message, Role}; +use crate::workspace_detector::WorkspaceDetector; +use std::collections::HashMap; +use std::fs; +use tempfile::TempDir; + +#[test] +#[ignore] // Temporarily disabled due to missing dependencies +fn test_full_dynamic_prompt_composition() { + // テスト用の一時ディレクトリを作成 + let temp_dir = TempDir::new().expect("Failed to create temp dir"); + let temp_path = temp_dir.path(); + + // テスト用のNIA.mdファイルを作成 + let nia_md_content = r#"# Test Project + +This is a test project for dynamic prompt composition. + +## Features +- Dynamic prompt generation +- Workspace detection +- Model-specific optimizations +"#; + fs::write(temp_path.join("NIA.md"), nia_md_content).expect("Failed to write NIA.md"); + + // テスト用のCargoファイルを作成(Rustプロジェクトとして認識させる) + let cargo_toml = r#"[package] +name = "test-project" +version = "0.1.0" +edition = "2021" +"#; + fs::write(temp_path.join("Cargo.toml"), cargo_toml).expect("Failed to write Cargo.toml"); + + // ワークスペースコンテキストを取得 + let workspace = WorkspaceDetector::detect_workspace_from_path(temp_path) + .expect("Failed to detect workspace"); + + assert!(workspace.has_nia_md); + assert_eq!(workspace.project_type, Some(ProjectType::Rust)); + assert!(workspace.nia_md_content.is_some()); + assert_eq!(workspace.project_name, Some("test-project".to_string())); + + // モデルコンテキストを作成 + let model_context = ModelContext { + provider: LlmProvider::Claude, + model_name: "claude-3-sonnet".to_string(), + capabilities: ModelCapabilities { + supports_tools: true, + supports_function_calling: true, + ..Default::default() + }, + supports_native_tools: true, + }; + + // セッションコンテキストを作成 + let session_context = SessionContext { + conversation_id: Some("test-conv".to_string()), + message_count: 1, + active_tools: vec!["file_read".to_string(), "file_write".to_string()], + user_preferences: None, + }; + + // 全体的なプロンプトコンテキストを作成 + let prompt_context = PromptContext { + workspace, + model: model_context, + session: session_context, + variables: HashMap::new(), + }; + + // 動的設定を作成 + let config = create_test_dynamic_config(); + + // DynamicPromptComposerを作成 + let mut composer = DynamicPromptComposer::from_config(config, prompt_context) + .expect("Failed to create composer"); + + // テストメッセージ + let messages = vec![Message { + role: Role::User, + content: "Please help me understand the project structure".to_string(), + }]; + + // プロンプトを構築 + let result = composer + .compose(&messages) + .expect("Failed to compose prompt"); + + assert!(!result.is_empty()); + assert_eq!(result[0].role, Role::System); + + // 生成されたプロンプトにワークスペース情報が含まれていることを確認 + let system_prompt = &result[0].content; + assert!(system_prompt.contains("Test Project")); + assert!(system_prompt.contains("dynamic prompt generation")); + assert!(system_prompt.contains("test-project")); +} + +#[test] +#[ignore] // Temporarily disabled due to missing dependencies +fn test_native_tools_vs_xml_tools() { + let temp_dir = TempDir::new().expect("Failed to create temp dir"); + let workspace = WorkspaceDetector::detect_workspace_from_path(temp_dir.path()) + .expect("Failed to detect workspace"); + + // ネイティブツールサポートありのモデル + let native_model = ModelContext { + provider: LlmProvider::Claude, + model_name: "claude-3-sonnet".to_string(), + capabilities: ModelCapabilities { + supports_tools: true, + supports_function_calling: true, + ..Default::default() + }, + supports_native_tools: true, + }; + + // XMLツールのみのモデル + let xml_model = ModelContext { + provider: LlmProvider::Ollama, + model_name: "llama3".to_string(), + capabilities: ModelCapabilities { + supports_tools: false, + supports_function_calling: false, + ..Default::default() + }, + supports_native_tools: false, + }; + + let session = SessionContext::default(); + + // 両方のモデルでプロンプトを生成 + let native_context = PromptContext { + workspace: workspace.clone(), + model: native_model, + session: session.clone(), + variables: HashMap::new(), + }; + + let xml_context = PromptContext { + workspace: workspace.clone(), + model: xml_model, + session: session.clone(), + variables: HashMap::new(), + }; + + let config = create_test_dynamic_config(); + + let mut native_composer = DynamicPromptComposer::from_config(config.clone(), native_context) + .expect("Failed to create native composer"); + + let mut xml_composer = DynamicPromptComposer::from_config(config, xml_context) + .expect("Failed to create xml composer"); + + let messages = vec![Message { + role: Role::User, + content: "Test message".to_string(), + }]; + + let native_result = native_composer + .compose(&messages) + .expect("Failed to compose native prompt"); + + let xml_result = xml_composer + .compose(&messages) + .expect("Failed to compose xml prompt"); + + // 両方のプロンプトが生成されることを確認 + assert!(!native_result.is_empty()); + assert!(!xml_result.is_empty()); + + // ネイティブツール用プロンプトとXMLツール用プロンプトが異なることを確認 + assert_ne!(native_result[0].content, xml_result[0].content); +} + +#[test] +#[ignore] // Temporarily disabled due to missing dependencies +fn test_workspace_detection_without_nia_md() { + let temp_dir = TempDir::new().expect("Failed to create temp dir"); + + // .nia ディレクトリのみ作成(NIA.mdなし) + fs::create_dir(temp_dir.path().join(".nia")).expect("Failed to create .nia dir"); + + let workspace = WorkspaceDetector::detect_workspace_from_path(temp_dir.path()) + .expect("Failed to detect workspace"); + + assert!(!workspace.has_nia_md); + assert!(workspace.nia_md_content.is_none()); + assert_eq!(workspace.project_type, Some(ProjectType::Unknown)); +} + +#[test] +#[ignore] // Temporarily disabled due to missing dependencies +fn test_project_type_detection() { + let temp_dir = TempDir::new().expect("Failed to create temp dir"); + let temp_path = temp_dir.path(); + + // TypeScriptプロジェクト + fs::write(temp_path.join("package.json"), r#"{"name": "test"}"#) + .expect("Failed to write package.json"); + fs::write(temp_path.join("tsconfig.json"), "{}").expect("Failed to write tsconfig.json"); + + let workspace = WorkspaceDetector::detect_workspace_from_path(temp_path) + .expect("Failed to detect workspace"); + + assert_eq!(workspace.project_type, Some(ProjectType::TypeScript)); +} + +#[test] +#[ignore] // Temporarily disabled due to missing dependencies +fn test_tools_schema_integration() { + let temp_dir = TempDir::new().expect("Failed to create temp dir"); + let workspace = WorkspaceDetector::detect_workspace_from_path(temp_dir.path()) + .expect("Failed to detect workspace"); + + let model_context = ModelContext { + provider: LlmProvider::Gemini, + model_name: "gemini-1.5-flash".to_string(), + capabilities: ModelCapabilities { + supports_tools: false, + supports_function_calling: false, + ..Default::default() + }, + supports_native_tools: false, + }; + + let session_context = SessionContext::default(); + let prompt_context = PromptContext { + workspace, + model: model_context, + session: session_context, + variables: HashMap::new(), + }; + + let config = create_test_dynamic_config(); + let mut composer = DynamicPromptComposer::from_config(config, prompt_context) + .expect("Failed to create composer"); + + // ツールスキーマを作成 + let tools_schema = serde_json::json!([ + { + "name": "file_read", + "description": "Read a file", + "parameters": { + "type": "object", + "properties": { + "path": {"type": "string"} + } + } + } + ]); + + let messages = vec![Message { + role: Role::User, + content: "Read a file for me".to_string(), + }]; + + // ツール情報付きでプロンプトを構築 + let result = composer + .compose_with_tools(&messages, &tools_schema) + .expect("Failed to compose with tools"); + + assert!(!result.is_empty()); + + // XMLツールモデルなので、ツール情報がプロンプトに含まれるはず + let system_prompt = &result[0].content; + assert!(system_prompt.contains("file_read")); +} + +// テスト用の動的設定を作成 +fn create_test_dynamic_config() -> DynamicRoleConfig { + let mut variables = HashMap::new(); + variables.insert( + "project_name".to_string(), + "{{workspace.project_name}}".to_string(), + ); + variables.insert("model_name".to_string(), "{{model.model_name}}".to_string()); + + let mut conditions = HashMap::new(); + + // ネイティブツール条件 + let mut native_vars = HashMap::new(); + native_vars.insert("tool_format".to_string(), "native".to_string()); + conditions.insert( + "native_tools".to_string(), + ConditionConfig { + when: "{{model.supports_native_tools}}".to_string(), + variables: Some(native_vars), + template_override: None, + }, + ); + + // XMLツール条件 + let mut xml_vars = HashMap::new(); + xml_vars.insert("tool_format".to_string(), "xml".to_string()); + conditions.insert( + "xml_tools".to_string(), + ConditionConfig { + when: "{{not model.supports_native_tools}}".to_string(), + variables: Some(xml_vars), + template_override: None, + }, + ); + + DynamicRoleConfig { + name: "Test Assistant".to_string(), + description: "A test configuration".to_string(), + version: Some("1.0".to_string()), + template: r#"# Test Role + +{{#if workspace.has_nia_md}} +# Project Context +Project: {{workspace.project_name}} +{{workspace.nia_md_content}} +{{/if}} + +{{#if model.supports_native_tools}} +Native tools are supported for {{model.model_name}}. +{{else}} +Using XML-based tool calls for {{model.model_name}}. +{{#if tools_schema}} +Available tools: {{tools_schema}} +{{/if}} +{{/if}} + +User request: {{user_input}} +"# + .to_string(), + partials: None, // パーシャルを使わない + variables: Some(variables), + conditions: Some(conditions), + } +} diff --git a/worker/src/types.rs b/worker/src/types.rs new file mode 100644 index 0000000..726ccde --- /dev/null +++ b/worker/src/types.rs @@ -0,0 +1,50 @@ +// Re-export all types from worker-types for backwards compatibility +pub use worker_types::*; + +// Worker-specific error type +#[derive(Debug, thiserror::Error)] +pub enum WorkerError { + #[error("Tool execution failed: {0}")] + ToolExecution(String), + #[error("Tool execution error: {0}")] + ToolExecutionError(String), + #[error("LLM API error: {0}")] + LlmApiError(String), + #[error("Model not found: {0}")] + ModelNotFound(String), + #[error("JSON serialization/deserialization error: {0}")] + JsonError(#[from] serde_json::Error), + #[error("Serialization error: {0}")] + Serialization(serde_json::Error), + #[error("Network error: {0}")] + Network(String), + #[error("Configuration error: {0}")] + Config(String), + #[error("Configuration error: {0}")] + ConfigurationError(String), + #[error("General error: {0}")] + General(#[from] anyhow::Error), + #[error("Box error: {0}")] + BoxError(Box), +} + +impl From<&str> for WorkerError { + fn from(s: &str) -> Self { + WorkerError::General(anyhow::anyhow!(s.to_string())) + } +} + +impl From for WorkerError { + fn from(s: String) -> Self { + WorkerError::General(anyhow::anyhow!(s)) + } +} + +impl From> for WorkerError { + fn from(e: Box) -> Self { + WorkerError::BoxError(e) + } +} + +// Update ToolResult to use WorkerError +pub type WorkerToolResult = Result; diff --git a/worker/src/url_config.rs b/worker/src/url_config.rs new file mode 100644 index 0000000..e2f4226 --- /dev/null +++ b/worker/src/url_config.rs @@ -0,0 +1,209 @@ +/// URL configuration for LLM providers with environment variable support +use std::env; + +pub struct UrlConfig; + +impl UrlConfig { + /// Get base URL for a provider with environment variable override support + pub fn get_base_url(provider: &str) -> String { + let env_var = format!("{}_BASE_URL", provider.to_uppercase()); + + // Check environment variable first + if let Ok(url) = env::var(&env_var) { + return url; + } + + // Return default URLs (domain part only) + match provider.to_lowercase().as_str() { + "openai" => "https://api.openai.com".to_string(), + "anthropic" | "claude" => "https://api.anthropic.com".to_string(), + "gemini" | "google" => "https://generativelanguage.googleapis.com".to_string(), + "xai" => "https://api.x.ai".to_string(), + "ollama" => "http://localhost:11434".to_string(), + _ => panic!("Unknown LLM provider: {}", provider), + } + } + + /// Get models endpoint URL for a provider + pub fn get_models_url(provider: &str) -> String { + let base_url = Self::get_base_url(provider); + match provider.to_lowercase().as_str() { + "openai" => format!("{}/v1/models", base_url), + "anthropic" | "claude" => format!("{}/v1/models", base_url), + "gemini" | "google" => format!("{}/v1beta/models", base_url), + "xai" => format!("{}/v1/models", base_url), + "ollama" => format!("{}/api/tags", base_url), + _ => panic!("Unknown LLM provider: {}", provider), + } + } + + /// Get chat/completion endpoint URL for a provider + pub fn get_completion_url(provider: &str) -> String { + let base_url = Self::get_base_url(provider); + match provider.to_lowercase().as_str() { + "openai" => format!("{}/v1/chat/completions", base_url), + "anthropic" | "claude" => format!("{}/v1/messages", base_url), + "gemini" | "google" => format!("{}/v1beta/models/{{model}}:generateContent", base_url), + "xai" => format!("{}/v1/chat/completions", base_url), + "ollama" => format!("{}/api/chat", base_url), + _ => panic!("Unknown LLM provider: {}", provider), + } + } + + /// Get model-specific endpoint URL for a provider + pub fn get_model_url(provider: &str, model_id: &str) -> String { + let base_url = Self::get_base_url(provider); + match provider.to_lowercase().as_str() { + "openai" => format!("{}/v1/models/{}", base_url, model_id), + "anthropic" | "claude" => format!("{}/v1/models/{}", base_url, model_id), + "gemini" | "google" => format!("{}/v1beta/models/{}", base_url, model_id), + "xai" => format!("{}/v1/models/{}", base_url, model_id), + "ollama" => format!("{}/api/show", base_url), // Ollama uses different pattern + _ => panic!("Unknown LLM provider: {}", provider), + } + } + + /// Get all active URL overrides from environment variables + pub fn get_active_overrides() -> Vec<(String, String)> { + let providers = ["openai", "anthropic", "gemini", "xai", "ollama"]; + let mut overrides = Vec::new(); + + for provider in providers { + let env_var = format!("{}_BASE_URL", provider.to_uppercase()); + if let Ok(url) = env::var(&env_var) { + overrides.push((provider.to_string(), url)); + } + } + + overrides + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::env; + + #[test] + fn test_default_urls() { + // Clean up any existing env vars first + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + env::remove_var("GEMINI_BASE_URL"); + env::remove_var("XAI_BASE_URL"); + env::remove_var("OLLAMA_BASE_URL"); + + assert_eq!(UrlConfig::get_base_url("openai"), "https://api.openai.com"); + assert_eq!( + UrlConfig::get_base_url("anthropic"), + "https://api.anthropic.com" + ); + assert_eq!( + UrlConfig::get_base_url("gemini"), + "https://generativelanguage.googleapis.com" + ); + assert_eq!(UrlConfig::get_base_url("xai"), "https://api.x.ai"); + assert_eq!(UrlConfig::get_base_url("ollama"), "http://localhost:11434"); + } + + #[test] + fn test_env_override() { + // Clean up any existing env vars first + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + + env::set_var("OPENAI_BASE_URL", "https://custom.openai.com"); + env::set_var("ANTHROPIC_BASE_URL", "https://custom.anthropic.com"); + + assert_eq!( + UrlConfig::get_base_url("openai"), + "https://custom.openai.com" + ); + assert_eq!( + UrlConfig::get_base_url("anthropic"), + "https://custom.anthropic.com" + ); + + // Clean up + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + } + + #[test] + fn test_models_url() { + // Clean up any existing env vars first + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + env::remove_var("OLLAMA_BASE_URL"); + + assert_eq!( + UrlConfig::get_models_url("openai"), + "https://api.openai.com/v1/models" + ); + assert_eq!( + UrlConfig::get_models_url("anthropic"), + "https://api.anthropic.com/v1/models" + ); + assert_eq!( + UrlConfig::get_models_url("ollama"), + "http://localhost:11434/api/tags" + ); + } + + #[test] + fn test_completion_url() { + // Clean up any existing env vars first + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + env::remove_var("OLLAMA_BASE_URL"); + + assert_eq!( + UrlConfig::get_completion_url("openai"), + "https://api.openai.com/v1/chat/completions" + ); + assert_eq!( + UrlConfig::get_completion_url("anthropic"), + "https://api.anthropic.com/v1/messages" + ); + assert_eq!( + UrlConfig::get_completion_url("ollama"), + "http://localhost:11434/api/chat" + ); + } + + #[test] + fn test_get_active_overrides() { + // Clean up any existing env vars first + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + env::remove_var("GEMINI_BASE_URL"); + env::remove_var("XAI_BASE_URL"); + env::remove_var("OLLAMA_BASE_URL"); + + // Should return empty when no overrides are set + assert_eq!(UrlConfig::get_active_overrides().len(), 0); + + // Set some overrides + env::set_var("OPENAI_BASE_URL", "https://custom-openai.example.com"); + env::set_var("ANTHROPIC_BASE_URL", "https://custom-anthropic.example.com"); + + let overrides = UrlConfig::get_active_overrides(); + assert_eq!(overrides.len(), 2); + + // Check if both providers are in the overrides + let providers: Vec = overrides.iter().map(|(p, _)| p.clone()).collect(); + assert!(providers.contains(&"openai".to_string())); + assert!(providers.contains(&"anthropic".to_string())); + + // Check URLs + let openai_override = overrides.iter().find(|(p, _)| p == "openai").unwrap(); + assert_eq!(openai_override.1, "https://custom-openai.example.com"); + + let anthropic_override = overrides.iter().find(|(p, _)| p == "anthropic").unwrap(); + assert_eq!(anthropic_override.1, "https://custom-anthropic.example.com"); + + // Clean up + env::remove_var("OPENAI_BASE_URL"); + env::remove_var("ANTHROPIC_BASE_URL"); + } +} diff --git a/worker/src/workspace_detector.rs b/worker/src/workspace_detector.rs new file mode 100644 index 0000000..7c27707 --- /dev/null +++ b/worker/src/workspace_detector.rs @@ -0,0 +1,315 @@ +use crate::prompt_types::*; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; + +/// ワークスペース検出とプロジェクト情報収集 +pub struct WorkspaceDetector; + +impl WorkspaceDetector { + /// 現在のディレクトリからワークスペースを検出し、コンテキストを構築 + pub fn detect_workspace() -> Result { + let current_dir = + std::env::current_dir().map_err(|e| PromptError::WorkspaceDetection(e.to_string()))?; + + Self::detect_workspace_from_path(¤t_dir) + } + + /// 指定されたパスからワークスペースを検出 + pub fn detect_workspace_from_path(start_path: &Path) -> Result { + // 1. プロジェクトルートを決定 + let root_path = Self::find_project_root(start_path)?; + + // 2. .nia/context.md を読み込み + let nia_md_content = Self::read_nia_md(&root_path); + let has_nia_md = nia_md_content.is_some(); + + // 3. プロジェクトタイプを推定 + let project_type = Self::detect_project_type(&root_path); + + // 4. Git情報を取得 + let git_info = Self::get_git_info(&root_path); + + // 5. プロジェクト名を決定 + let project_name = Self::determine_project_name(&root_path, &git_info); + + // 6. システム情報を生成 + let system_info = crate::prompt_types::SystemInfo::default(); + + Ok(WorkspaceContext { + root_path, + nia_md_content, + project_type, + git_info, + has_nia_md, + project_name, + system_info, + }) + } + + /// プロジェクトルートを検出(Git > .nia > 現在のディレクトリの順) + fn find_project_root(start_path: &Path) -> Result { + let mut current = start_path.to_path_buf(); + + loop { + // Git リポジトリルートをチェック + if current.join(".git").exists() { + return Ok(current); + } + + // .nia ディレクトリをチェック + if current.join(".nia").exists() { + return Ok(current); + } + + // 親ディレクトリに移動 + match current.parent() { + Some(parent) => current = parent.to_path_buf(), + None => break, + } + } + + // 見つからない場合は開始パスを返す + Ok(start_path.to_path_buf()) + } + + /// .nia/context.md ファイルを読み込み + fn read_nia_md(root_path: &Path) -> Option { + let file_path = root_path.join(".nia/context.md"); + if let Ok(content) = fs::read_to_string(&file_path) { + // ファイルサイズが妥当であることを確認(10MB以下) + if content.len() <= 10 * 1024 * 1024 { + return Some(content); + } + } + None + } + + /// プロジェクトタイプを推定 + fn detect_project_type(root_path: &Path) -> Option { + // ファイルの存在によってプロジェクトタイプを判定 + if root_path.join("Cargo.toml").exists() { + return Some(ProjectType::Rust); + } + + if root_path.join("package.json").exists() { + // TypeScript か JavaScript かを判定 + if root_path.join("tsconfig.json").exists() + || root_path.join("src").join("index.ts").exists() + || Self::check_typescript_files(root_path) + { + return Some(ProjectType::TypeScript); + } + return Some(ProjectType::JavaScript); + } + + if root_path.join("pyproject.toml").exists() + || root_path.join("setup.py").exists() + || root_path.join("requirements.txt").exists() + { + return Some(ProjectType::Python); + } + + if root_path.join("go.mod").exists() { + return Some(ProjectType::Go); + } + + if root_path.join("pom.xml").exists() + || root_path.join("build.gradle").exists() + || root_path.join("build.gradle.kts").exists() + { + return Some(ProjectType::Java); + } + + if root_path.join("CMakeLists.txt").exists() || root_path.join("Makefile").exists() { + return Some(ProjectType::Cpp); + } + + Some(ProjectType::Unknown) + } + + /// TypeScriptファイルの存在をチェック + fn check_typescript_files(root_path: &Path) -> bool { + // src ディレクトリ内の .ts ファイルをチェック + let src_dir = root_path.join("src"); + if src_dir.exists() { + if let Ok(entries) = fs::read_dir(&src_dir) { + for entry in entries.flatten() { + if let Some(ext) = entry.path().extension() { + if ext == "ts" || ext == "tsx" { + return true; + } + } + } + } + } + false + } + + /// Git情報を取得 + fn get_git_info(root_path: &Path) -> Option { + if !root_path.join(".git").exists() { + return None; + } + + let repo_name = Self::get_git_repo_name(root_path); + let current_branch = Self::get_git_current_branch(root_path); + let last_commit_summary = Self::get_git_last_commit(root_path); + let is_clean = Self::is_git_clean(root_path); + + Some(GitInfo { + repo_name, + current_branch, + last_commit_summary, + is_clean, + }) + } + + /// Git リポジトリ名を取得 + fn get_git_repo_name(root_path: &Path) -> Option { + // リモートURLから名前を取得 + let output = Command::new("git") + .args(&["remote", "get-url", "origin"]) + .current_dir(root_path) + .output() + .ok()?; + + if output.status.success() { + let url = String::from_utf8_lossy(&output.stdout).trim().to_string(); + return Self::extract_repo_name_from_url(&url); + } + + // フォールバック: ディレクトリ名を使用 + root_path + .file_name() + .and_then(|name| name.to_str()) + .map(|s| s.to_string()) + } + + /// Git URL からリポジトリ名を抽出 + fn extract_repo_name_from_url(url: &str) -> Option { + // GitHub/GitLab/Bitbucket などの一般的なパターンに対応 + if let Some(captures) = regex::Regex::new(r"([^/]+/[^/]+?)(?:\.git)?$") + .ok()? + .captures(url) + { + return Some(captures[1].to_string()); + } + + // SSH形式: git@github.com:user/repo.git + if let Some(captures) = regex::Regex::new(r":([^/]+/[^/]+?)(?:\.git)?$") + .ok()? + .captures(url) + { + return Some(captures[1].to_string()); + } + + None + } + + /// 現在のGitブランチを取得 + fn get_git_current_branch(root_path: &Path) -> Option { + let output = Command::new("git") + .args(&["branch", "--show-current"]) + .current_dir(root_path) + .output() + .ok()?; + + if output.status.success() { + let branch = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if !branch.is_empty() { + return Some(branch); + } + } + + None + } + + /// 最新コミットの概要を取得 + fn get_git_last_commit(root_path: &Path) -> Option { + let output = Command::new("git") + .args(&["log", "-1", "--pretty=format:%s"]) + .current_dir(root_path) + .output() + .ok()?; + + if output.status.success() { + let commit = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if !commit.is_empty() { + return Some(commit); + } + } + + None + } + + /// Git作業ディレクトリがクリーンかどうかチェック + fn is_git_clean(root_path: &Path) -> Option { + let output = Command::new("git") + .args(&["status", "--porcelain"]) + .current_dir(root_path) + .output() + .ok()?; + + if output.status.success() { + let status = String::from_utf8_lossy(&output.stdout); + return Some(status.trim().is_empty()); + } + + None + } + + /// プロジェクト名を決定 + fn determine_project_name(root_path: &Path, git_info: &Option) -> Option { + // 1. Git リポジトリ名を使用 + if let Some(git) = git_info { + if let Some(repo_name) = &git.repo_name { + return Some(repo_name.clone()); + } + } + + // 2. Cargo.toml の name フィールドを使用 + if let Some(cargo_name) = Self::get_cargo_project_name(root_path) { + return Some(cargo_name); + } + + // 3. package.json の name フィールドを使用 + if let Some(npm_name) = Self::get_npm_project_name(root_path) { + return Some(npm_name); + } + + // 4. ディレクトリ名を使用 + root_path + .file_name() + .and_then(|name| name.to_str()) + .map(|s| s.to_string()) + } + + /// Cargo.toml からプロジェクト名を取得 + fn get_cargo_project_name(root_path: &Path) -> Option { + let cargo_toml_path = root_path.join("Cargo.toml"); + let content = fs::read_to_string(&cargo_toml_path).ok()?; + + // 簡単なパースで name フィールドを抽出 + for line in content.lines() { + if let Some(captures) = regex::Regex::new(r#"name\s*=\s*"([^"]+)""#) + .ok()? + .captures(line) + { + return Some(captures[1].to_string()); + } + } + + None + } + + /// package.json からプロジェクト名を取得 + fn get_npm_project_name(root_path: &Path) -> Option { + let package_json_path = root_path.join("package.json"); + let content = fs::read_to_string(&package_json_path).ok()?; + + // JSON パースでnameフィールドを取得 + let package_json: serde_json::Value = serde_json::from_str(&content).ok()?; + package_json.get("name")?.as_str().map(|s| s.to_string()) + } +}