@@ -1,823 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-version = 4
-
-[[package]]
-name = "adler2"
-version = "2.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
-
-[[package]]
-name = "anyhow"
-version = "1.0.100"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
-
-[[package]]
-name = "auditable-serde"
-version = "0.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5"
-dependencies = [
- "semver",
- "serde",
- "serde_json",
- "topological-sort",
-]
-
-[[package]]
-name = "bitflags"
-version = "2.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
-
-[[package]]
-name = "crc32fast"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
-dependencies = [
- "cfg-if",
-]
-
-[[package]]
-name = "displaydoc"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "equivalent"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
-
-[[package]]
-name = "flate2"
-version = "1.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb"
-dependencies = [
- "crc32fast",
- "miniz_oxide",
-]
-
-[[package]]
-name = "foldhash"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
-
-[[package]]
-name = "form_urlencoded"
-version = "1.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
-dependencies = [
- "percent-encoding",
-]
-
-[[package]]
-name = "futures"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
-dependencies = [
- "futures-channel",
- "futures-core",
- "futures-executor",
- "futures-io",
- "futures-sink",
- "futures-task",
- "futures-util",
-]
-
-[[package]]
-name = "futures-channel"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
-dependencies = [
- "futures-core",
- "futures-sink",
-]
-
-[[package]]
-name = "futures-core"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
-
-[[package]]
-name = "futures-executor"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
-dependencies = [
- "futures-core",
- "futures-task",
- "futures-util",
-]
-
-[[package]]
-name = "futures-io"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
-
-[[package]]
-name = "futures-macro"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "futures-sink"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
-
-[[package]]
-name = "futures-task"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
-
-[[package]]
-name = "futures-util"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
-dependencies = [
- "futures-channel",
- "futures-core",
- "futures-io",
- "futures-macro",
- "futures-sink",
- "futures-task",
- "memchr",
- "pin-project-lite",
- "pin-utils",
- "slab",
-]
-
-[[package]]
-name = "google-ai"
-version = "0.1.0"
-dependencies = [
- "serde",
- "serde_json",
- "zed_extension_api",
-]
-
-[[package]]
-name = "hashbrown"
-version = "0.15.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
-dependencies = [
- "foldhash",
-]
-
-[[package]]
-name = "hashbrown"
-version = "0.16.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
-
-[[package]]
-name = "heck"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-
-[[package]]
-name = "icu_collections"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
-dependencies = [
- "displaydoc",
- "potential_utf",
- "yoke",
- "zerofrom",
- "zerovec",
-]
-
-[[package]]
-name = "icu_locale_core"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
-dependencies = [
- "displaydoc",
- "litemap",
- "tinystr",
- "writeable",
- "zerovec",
-]
-
-[[package]]
-name = "icu_normalizer"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
-dependencies = [
- "icu_collections",
- "icu_normalizer_data",
- "icu_properties",
- "icu_provider",
- "smallvec",
- "zerovec",
-]
-
-[[package]]
-name = "icu_normalizer_data"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
-
-[[package]]
-name = "icu_properties"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
-dependencies = [
- "icu_collections",
- "icu_locale_core",
- "icu_properties_data",
- "icu_provider",
- "zerotrie",
- "zerovec",
-]
-
-[[package]]
-name = "icu_properties_data"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
-
-[[package]]
-name = "icu_provider"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
-dependencies = [
- "displaydoc",
- "icu_locale_core",
- "writeable",
- "yoke",
- "zerofrom",
- "zerotrie",
- "zerovec",
-]
-
-[[package]]
-name = "id-arena"
-version = "2.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005"
-
-[[package]]
-name = "idna"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
-dependencies = [
- "idna_adapter",
- "smallvec",
- "utf8_iter",
-]
-
-[[package]]
-name = "idna_adapter"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
-dependencies = [
- "icu_normalizer",
- "icu_properties",
-]
-
-[[package]]
-name = "indexmap"
-version = "2.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
-dependencies = [
- "equivalent",
- "hashbrown 0.16.1",
- "serde",
- "serde_core",
-]
-
-[[package]]
-name = "itoa"
-version = "1.0.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
-
-[[package]]
-name = "leb128fmt"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
-
-[[package]]
-name = "litemap"
-version = "0.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
-
-[[package]]
-name = "log"
-version = "0.4.29"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
-
-[[package]]
-name = "memchr"
-version = "2.7.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
-
-[[package]]
-name = "miniz_oxide"
-version = "0.8.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
-dependencies = [
- "adler2",
- "simd-adler32",
-]
-
-[[package]]
-name = "once_cell"
-version = "1.21.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
-
-[[package]]
-name = "percent-encoding"
-version = "2.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
-
-[[package]]
-name = "pin-project-lite"
-version = "0.2.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
-
-[[package]]
-name = "pin-utils"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
-
-[[package]]
-name = "potential_utf"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
-dependencies = [
- "zerovec",
-]
-
-[[package]]
-name = "prettyplease"
-version = "0.2.37"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
-dependencies = [
- "proc-macro2",
- "syn",
-]
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.103"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
-dependencies = [
- "unicode-ident",
-]
-
-[[package]]
-name = "quote"
-version = "1.0.42"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
-dependencies = [
- "proc-macro2",
-]
-
-[[package]]
-name = "ryu"
-version = "1.0.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
-
-[[package]]
-name = "semver"
-version = "1.0.27"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
-dependencies = [
- "serde",
- "serde_core",
-]
-
-[[package]]
-name = "serde"
-version = "1.0.228"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
-dependencies = [
- "serde_core",
- "serde_derive",
-]
-
-[[package]]
-name = "serde_core"
-version = "1.0.228"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
-dependencies = [
- "serde_derive",
-]
-
-[[package]]
-name = "serde_derive"
-version = "1.0.228"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "serde_json"
-version = "1.0.145"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
-dependencies = [
- "itoa",
- "memchr",
- "ryu",
- "serde",
- "serde_core",
-]
-
-[[package]]
-name = "simd-adler32"
-version = "0.3.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
-
-[[package]]
-name = "slab"
-version = "0.4.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
-
-[[package]]
-name = "smallvec"
-version = "1.15.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
-
-[[package]]
-name = "spdx"
-version = "0.10.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3"
-dependencies = [
- "smallvec",
-]
-
-[[package]]
-name = "stable_deref_trait"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
-
-[[package]]
-name = "syn"
-version = "2.0.111"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
-[[package]]
-name = "synstructure"
-version = "0.13.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "tinystr"
-version = "0.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
-dependencies = [
- "displaydoc",
- "zerovec",
-]
-
-[[package]]
-name = "topological-sort"
-version = "0.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d"
-
-[[package]]
-name = "unicode-ident"
-version = "1.0.22"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
-
-[[package]]
-name = "unicode-xid"
-version = "0.2.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
-
-[[package]]
-name = "url"
-version = "2.5.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
-dependencies = [
- "form_urlencoded",
- "idna",
- "percent-encoding",
- "serde",
-]
-
-[[package]]
-name = "utf8_iter"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
-
-[[package]]
-name = "wasm-encoder"
-version = "0.227.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822"
-dependencies = [
- "leb128fmt",
- "wasmparser",
-]
-
-[[package]]
-name = "wasm-metadata"
-version = "0.227.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d"
-dependencies = [
- "anyhow",
- "auditable-serde",
- "flate2",
- "indexmap",
- "serde",
- "serde_derive",
- "serde_json",
- "spdx",
- "url",
- "wasm-encoder",
- "wasmparser",
-]
-
-[[package]]
-name = "wasmparser"
-version = "0.227.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2"
-dependencies = [
- "bitflags",
- "hashbrown 0.15.5",
- "indexmap",
- "semver",
-]
-
-[[package]]
-name = "wit-bindgen"
-version = "0.41.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de"
-dependencies = [
- "wit-bindgen-rt",
- "wit-bindgen-rust-macro",
-]
-
-[[package]]
-name = "wit-bindgen-core"
-version = "0.41.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b"
-dependencies = [
- "anyhow",
- "heck",
- "wit-parser",
-]
-
-[[package]]
-name = "wit-bindgen-rt"
-version = "0.41.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621"
-dependencies = [
- "bitflags",
- "futures",
- "once_cell",
-]
-
-[[package]]
-name = "wit-bindgen-rust"
-version = "0.41.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce"
-dependencies = [
- "anyhow",
- "heck",
- "indexmap",
- "prettyplease",
- "syn",
- "wasm-metadata",
- "wit-bindgen-core",
- "wit-component",
-]
-
-[[package]]
-name = "wit-bindgen-rust-macro"
-version = "0.41.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799"
-dependencies = [
- "anyhow",
- "prettyplease",
- "proc-macro2",
- "quote",
- "syn",
- "wit-bindgen-core",
- "wit-bindgen-rust",
-]
-
-[[package]]
-name = "wit-component"
-version = "0.227.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676"
-dependencies = [
- "anyhow",
- "bitflags",
- "indexmap",
- "log",
- "serde",
- "serde_derive",
- "serde_json",
- "wasm-encoder",
- "wasm-metadata",
- "wasmparser",
- "wit-parser",
-]
-
-[[package]]
-name = "wit-parser"
-version = "0.227.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11"
-dependencies = [
- "anyhow",
- "id-arena",
- "indexmap",
- "log",
- "semver",
- "serde",
- "serde_derive",
- "serde_json",
- "unicode-xid",
- "wasmparser",
-]
-
-[[package]]
-name = "writeable"
-version = "0.6.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
-
-[[package]]
-name = "yoke"
-version = "0.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
-dependencies = [
- "stable_deref_trait",
- "yoke-derive",
- "zerofrom",
-]
-
-[[package]]
-name = "yoke-derive"
-version = "0.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "synstructure",
-]
-
-[[package]]
-name = "zed_extension_api"
-version = "0.8.0"
-dependencies = [
- "serde",
- "serde_json",
- "wit-bindgen",
-]
-
-[[package]]
-name = "zerofrom"
-version = "0.1.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
-dependencies = [
- "zerofrom-derive",
-]
-
-[[package]]
-name = "zerofrom-derive"
-version = "0.1.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "synstructure",
-]
-
-[[package]]
-name = "zerotrie"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
-dependencies = [
- "displaydoc",
- "yoke",
- "zerofrom",
-]
-
-[[package]]
-name = "zerovec"
-version = "0.11.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
-dependencies = [
- "yoke",
- "zerofrom",
- "zerovec-derive",
-]
-
-[[package]]
-name = "zerovec-derive"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
@@ -1,1128 +0,0 @@
-use std::collections::HashMap;
-use std::sync::atomic::{AtomicU64, Ordering};
-
-static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
-
-use serde::{Deserialize, Deserializer, Serialize, Serializer};
-use zed_extension_api::{
- self as zed, http_client::HttpMethod, http_client::HttpRequest,
- llm_get_env_var, llm_get_provider_settings,
- LlmCacheConfiguration, LlmCompletionEvent, LlmCompletionRequest, LlmCustomModelConfig,
- LlmMessageContent, LlmMessageRole, LlmModelCapabilities, LlmModelInfo, LlmProviderInfo,
- LlmStopReason, LlmThinkingContent, LlmTokenUsage, LlmToolInputFormat, LlmToolUse,
-};
-
-pub const DEFAULT_API_URL: &str = "https://generativelanguage.googleapis.com";
-
-fn get_api_url() -> String {
- llm_get_provider_settings(PROVIDER_ID)
- .and_then(|s| s.api_url)
- .unwrap_or_else(|| DEFAULT_API_URL.to_string())
-}
-
-fn get_custom_models() -> Vec<LlmCustomModelConfig> {
- llm_get_provider_settings(PROVIDER_ID)
- .map(|s| s.available_models)
- .unwrap_or_default()
-}
-
-fn stream_generate_content(
- model_id: &str,
- request: &LlmCompletionRequest,
- streams: &mut HashMap<String, StreamState>,
- next_stream_id: &mut u64,
-) -> Result<String, String> {
- let api_key = get_api_key().ok_or_else(|| "API key not configured".to_string())?;
-
- let generate_content_request = build_generate_content_request(model_id, request)?;
- validate_generate_content_request(&generate_content_request)?;
-
- let api_url = get_api_url();
- let uri = format!(
- "{}/v1beta/models/{}:streamGenerateContent?alt=sse&key={}",
- api_url, model_id, api_key
- );
-
- let body = serde_json::to_vec(&generate_content_request)
- .map_err(|e| format!("Failed to serialize request: {}", e))?;
-
- let http_request = HttpRequest::builder()
- .method(HttpMethod::Post)
- .url(&uri)
- .header("Content-Type", "application/json")
- .body(body)
- .build()?;
-
- let response_stream = http_request.fetch_stream()?;
-
- let stream_id = format!("stream-{}", *next_stream_id);
- *next_stream_id += 1;
-
- streams.insert(
- stream_id.clone(),
- StreamState {
- response_stream,
- buffer: String::new(),
- usage: None,
- pending_events: Vec::new(),
- wants_to_use_tool: false,
- },
- );
-
- Ok(stream_id)
-}
-
-fn count_tokens(model_id: &str, request: &LlmCompletionRequest) -> Result<u64, String> {
- let api_key = get_api_key().ok_or_else(|| "API key not configured".to_string())?;
-
- let generate_content_request = build_generate_content_request(model_id, request)?;
- validate_generate_content_request(&generate_content_request)?;
- let count_request = CountTokensRequest {
- generate_content_request,
- };
-
- let api_url = get_api_url();
- let uri = format!(
- "{}/v1beta/models/{}:countTokens?key={}",
- api_url, model_id, api_key
- );
-
- let body = serde_json::to_vec(&count_request)
- .map_err(|e| format!("Failed to serialize request: {}", e))?;
-
- let http_request = HttpRequest::builder()
- .method(HttpMethod::Post)
- .url(&uri)
- .header("Content-Type", "application/json")
- .body(body)
- .build()?;
-
- let response = http_request.fetch()?;
- let response_body: CountTokensResponse = serde_json::from_slice(&response.body)
- .map_err(|e| format!("Failed to parse response: {}", e))?;
-
- Ok(response_body.total_tokens)
-}
-
-fn validate_generate_content_request(request: &GenerateContentRequest) -> Result<(), String> {
- if request.model.is_empty() {
- return Err("Model must be specified".to_string());
- }
-
- if request.contents.is_empty() {
- return Err("Request must contain at least one content item".to_string());
- }
-
- if let Some(user_content) = request
- .contents
- .iter()
- .find(|content| content.role == Role::User)
- {
- if user_content.parts.is_empty() {
- return Err("User content must contain at least one part".to_string());
- }
- }
-
- Ok(())
-}
-
-// Extension implementation
-
-const PROVIDER_ID: &str = "google";
-const PROVIDER_NAME: &str = "Google AI";
-
-struct GoogleAiExtension {
- streams: HashMap<String, StreamState>,
- next_stream_id: u64,
-}
-
-struct StreamState {
- response_stream: zed::http_client::HttpResponseStream,
- buffer: String,
- usage: Option<UsageMetadata>,
- pending_events: Vec<LlmCompletionEvent>,
- wants_to_use_tool: bool,
-}
-
-impl zed::Extension for GoogleAiExtension {
- fn new() -> Self {
- Self {
- streams: HashMap::new(),
- next_stream_id: 0,
- }
- }
-
- fn llm_providers(&self) -> Vec<LlmProviderInfo> {
- vec![LlmProviderInfo {
- id: PROVIDER_ID.to_string(),
- name: PROVIDER_NAME.to_string(),
- icon: Some("icons/google-ai.svg".to_string()),
- }]
- }
-
- fn llm_provider_models(&self, provider_id: &str) -> Result<Vec<LlmModelInfo>, String> {
- if provider_id != PROVIDER_ID {
- return Err(format!("Unknown provider: {}", provider_id));
- }
- Ok(get_models())
- }
-
- fn llm_provider_settings_markdown(&self, provider_id: &str) -> Option<String> {
- if provider_id != PROVIDER_ID {
- return None;
- }
-
- Some(
- r#"## Google AI Setup
-
-To use Google AI models in Zed, you need a Gemini API key.
-
-1. Go to [Google AI Studio](https://aistudio.google.com/apikey)
-2. Create or select a project
-3. Generate an API key
-4. Set the `GEMINI_API_KEY` or `GOOGLE_AI_API_KEY` environment variable
-
-You can set this in your shell profile or use a `.envrc` file with [direnv](https://direnv.net/).
-"#
- .to_string(),
- )
- }
-
- fn llm_provider_is_authenticated(&self, provider_id: &str) -> bool {
- if provider_id != PROVIDER_ID {
- return false;
- }
- get_api_key().is_some()
- }
-
- fn llm_provider_reset_credentials(&mut self, provider_id: &str) -> Result<(), String> {
- if provider_id != PROVIDER_ID {
- return Err(format!("Unknown provider: {}", provider_id));
- }
- Ok(())
- }
-
- fn llm_count_tokens(
- &self,
- provider_id: &str,
- model_id: &str,
- request: &LlmCompletionRequest,
- ) -> Result<u64, String> {
- if provider_id != PROVIDER_ID {
- return Err(format!("Unknown provider: {}", provider_id));
- }
- count_tokens(model_id, request)
- }
-
- fn llm_stream_completion_start(
- &mut self,
- provider_id: &str,
- model_id: &str,
- request: &LlmCompletionRequest,
- ) -> Result<String, String> {
- if provider_id != PROVIDER_ID {
- return Err(format!("Unknown provider: {}", provider_id));
- }
- stream_generate_content(model_id, request, &mut self.streams, &mut self.next_stream_id)
- }
-
- fn llm_stream_completion_next(
- &mut self,
- stream_id: &str,
- ) -> Result<Option<LlmCompletionEvent>, String> {
- stream_generate_content_next(stream_id, &mut self.streams)
- }
-
- fn llm_stream_completion_close(&mut self, stream_id: &str) {
- self.streams.remove(stream_id);
- }
-
- fn llm_cache_configuration(
- &self,
- provider_id: &str,
- _model_id: &str,
- ) -> Option<LlmCacheConfiguration> {
- if provider_id != PROVIDER_ID {
- return None;
- }
-
- Some(LlmCacheConfiguration {
- max_cache_anchors: 1,
- should_cache_tool_definitions: false,
- min_total_token_count: 32768,
- })
- }
-}
-
-zed::register_extension!(GoogleAiExtension);
-
-// Helper functions
-
-fn get_api_key() -> Option<String> {
- llm_get_env_var("GEMINI_API_KEY").or_else(|| llm_get_env_var("GOOGLE_AI_API_KEY"))
-}
-
-fn get_default_models() -> Vec<LlmModelInfo> {
- vec![
- LlmModelInfo {
- id: "gemini-2.5-flash-lite".to_string(),
- name: "Gemini 2.5 Flash-Lite".to_string(),
- max_token_count: 1_048_576,
- max_output_tokens: Some(65_536),
- capabilities: LlmModelCapabilities {
- supports_images: true,
- supports_tools: true,
- supports_tool_choice_auto: true,
- supports_tool_choice_any: true,
- supports_tool_choice_none: true,
- supports_thinking: true,
- tool_input_format: LlmToolInputFormat::JsonSchemaSubset,
- },
- is_default: false,
- is_default_fast: true,
- },
- LlmModelInfo {
- id: "gemini-2.5-flash".to_string(),
- name: "Gemini 2.5 Flash".to_string(),
- max_token_count: 1_048_576,
- max_output_tokens: Some(65_536),
- capabilities: LlmModelCapabilities {
- supports_images: true,
- supports_tools: true,
- supports_tool_choice_auto: true,
- supports_tool_choice_any: true,
- supports_tool_choice_none: true,
- supports_thinking: true,
- tool_input_format: LlmToolInputFormat::JsonSchemaSubset,
- },
- is_default: true,
- is_default_fast: false,
- },
- LlmModelInfo {
- id: "gemini-2.5-pro".to_string(),
- name: "Gemini 2.5 Pro".to_string(),
- max_token_count: 1_048_576,
- max_output_tokens: Some(65_536),
- capabilities: LlmModelCapabilities {
- supports_images: true,
- supports_tools: true,
- supports_tool_choice_auto: true,
- supports_tool_choice_any: true,
- supports_tool_choice_none: true,
- supports_thinking: true,
- tool_input_format: LlmToolInputFormat::JsonSchemaSubset,
- },
- is_default: false,
- is_default_fast: false,
- },
- LlmModelInfo {
- id: "gemini-3-pro-preview".to_string(),
- name: "Gemini 3 Pro".to_string(),
- max_token_count: 1_048_576,
- max_output_tokens: Some(65_536),
- capabilities: LlmModelCapabilities {
- supports_images: true,
- supports_tools: true,
- supports_tool_choice_auto: true,
- supports_tool_choice_any: true,
- supports_tool_choice_none: true,
- supports_thinking: true,
- tool_input_format: LlmToolInputFormat::JsonSchemaSubset,
- },
- is_default: false,
- is_default_fast: false,
- },
- LlmModelInfo {
- id: "gemini-3-flash-preview".to_string(),
- name: "Gemini 3 Flash".to_string(),
- max_token_count: 1_048_576,
- max_output_tokens: Some(65_536),
- capabilities: LlmModelCapabilities {
- supports_images: true,
- supports_tools: true,
- supports_tool_choice_auto: true,
- supports_tool_choice_any: true,
- supports_tool_choice_none: true,
- supports_thinking: false,
- tool_input_format: LlmToolInputFormat::JsonSchemaSubset,
- },
- is_default: false,
- is_default_fast: false,
- },
- ]
-}
-
-/// Model aliases for backward compatibility with old model names.
-/// Maps old names to canonical model IDs.
-fn get_model_aliases() -> Vec<(&'static str, &'static str)> {
- vec![
- // Gemini 2.5 Flash-Lite aliases
- ("gemini-2.5-flash-lite-preview-06-17", "gemini-2.5-flash-lite"),
- ("gemini-2.0-flash-lite-preview", "gemini-2.5-flash-lite"),
- // Gemini 2.5 Flash aliases
- ("gemini-2.0-flash-thinking-exp", "gemini-2.5-flash"),
- ("gemini-2.5-flash-preview-04-17", "gemini-2.5-flash"),
- ("gemini-2.5-flash-preview-05-20", "gemini-2.5-flash"),
- ("gemini-2.5-flash-preview-latest", "gemini-2.5-flash"),
- ("gemini-2.0-flash", "gemini-2.5-flash"),
- // Gemini 2.5 Pro aliases
- ("gemini-2.0-pro-exp", "gemini-2.5-pro"),
- ("gemini-2.5-pro-preview-latest", "gemini-2.5-pro"),
- ("gemini-2.5-pro-exp-03-25", "gemini-2.5-pro"),
- ("gemini-2.5-pro-preview-03-25", "gemini-2.5-pro"),
- ("gemini-2.5-pro-preview-05-06", "gemini-2.5-pro"),
- ("gemini-2.5-pro-preview-06-05", "gemini-2.5-pro"),
- ]
-}
-
-fn get_models() -> Vec<LlmModelInfo> {
- let mut models: HashMap<String, LlmModelInfo> = HashMap::new();
-
- // Add default models
- for model in get_default_models() {
- models.insert(model.id.clone(), model);
- }
-
- // Add aliases as separate model entries (pointing to the same underlying model)
- for (alias, canonical_id) in get_model_aliases() {
- if let Some(canonical_model) = models.get(canonical_id) {
- let mut alias_model = canonical_model.clone();
- alias_model.id = alias.to_string();
- alias_model.is_default = false;
- alias_model.is_default_fast = false;
- models.insert(alias.to_string(), alias_model);
- }
- }
-
- // Add/override with custom models from settings
- for custom_model in get_custom_models() {
- let model = LlmModelInfo {
- id: custom_model.name.clone(),
- name: custom_model.display_name.unwrap_or(custom_model.name.clone()),
- max_token_count: custom_model.max_tokens,
- max_output_tokens: custom_model.max_output_tokens,
- capabilities: LlmModelCapabilities {
- supports_images: true,
- supports_tools: true,
- supports_tool_choice_auto: true,
- supports_tool_choice_any: true,
- supports_tool_choice_none: true,
- supports_thinking: custom_model.thinking_budget.is_some(),
- tool_input_format: LlmToolInputFormat::JsonSchemaSubset,
- },
- is_default: false,
- is_default_fast: false,
- };
- models.insert(custom_model.name, model);
- }
-
- models.into_values().collect()
-}
-
-/// Get the thinking budget for a specific model from custom settings.
-fn get_model_thinking_budget(model_id: &str) -> Option<u32> {
- get_custom_models()
- .into_iter()
- .find(|m| m.name == model_id)
- .and_then(|m| m.thinking_budget)
-}
-
-fn stream_generate_content_next(
- stream_id: &str,
- streams: &mut HashMap<String, StreamState>,
-) -> Result<Option<LlmCompletionEvent>, String> {
- let state = streams
- .get_mut(stream_id)
- .ok_or_else(|| format!("Unknown stream: {}", stream_id))?;
-
- loop {
- // Return any pending events first
- if let Some(event) = state.pending_events.pop() {
- return Ok(Some(event));
- }
-
- if let Some(newline_pos) = state.buffer.find('\n') {
- let line = state.buffer[..newline_pos].to_string();
- state.buffer = state.buffer[newline_pos + 1..].to_string();
-
- if let Some(data) = line.strip_prefix("data: ") {
- if data.trim().is_empty() {
- continue;
- }
-
- let response: GenerateContentResponse = match serde_json::from_str(data) {
- Ok(response) => response,
- Err(parse_error) => {
- // Try to parse as an API error response
- if let Ok(api_error) = serde_json::from_str::<ApiErrorResponse>(data) {
- let error_msg = api_error
- .error
- .message
- .unwrap_or_else(|| "Unknown API error".to_string());
- let status = api_error.error.status.unwrap_or_default();
- let code = api_error.error.code.unwrap_or(0);
- return Err(format!(
- "Google AI API error ({}): {} [status: {}]",
- code, error_msg, status
- ));
- }
- // If it's not an error response, return the parse error
- return Err(format!(
- "Failed to parse SSE data: {} - {}",
- parse_error, data
- ));
- }
- };
-
- // Handle prompt feedback (blocked prompts)
- if let Some(ref prompt_feedback) = response.prompt_feedback {
- if let Some(ref block_reason) = prompt_feedback.block_reason {
- let _stop_reason = match block_reason.as_str() {
- "SAFETY" | "OTHER" | "BLOCKLIST" | "PROHIBITED_CONTENT"
- | "IMAGE_SAFETY" => LlmStopReason::Refusal,
- _ => LlmStopReason::Refusal,
- };
- return Ok(Some(LlmCompletionEvent::Stop(LlmStopReason::Refusal)));
- }
- }
-
- // Send usage updates immediately when received
- if let Some(ref usage) = response.usage_metadata {
- let cached_tokens = usage.cached_content_token_count.unwrap_or(0);
- let prompt_tokens = usage.prompt_token_count.unwrap_or(0);
- let input_tokens = prompt_tokens.saturating_sub(cached_tokens);
- state.pending_events.push(LlmCompletionEvent::Usage(LlmTokenUsage {
- input_tokens,
- output_tokens: usage.candidates_token_count.unwrap_or(0),
- cache_creation_input_tokens: None,
- cache_read_input_tokens: Some(cached_tokens).filter(|&c| c > 0),
- }));
- state.usage = Some(usage.clone());
- }
-
- if let Some(candidates) = response.candidates {
- for candidate in candidates {
- for part in candidate.content.parts {
- match part {
- Part::TextPart(text_part) => {
- return Ok(Some(LlmCompletionEvent::Text(text_part.text)));
- }
- Part::ThoughtPart(thought_part) => {
- return Ok(Some(LlmCompletionEvent::Thinking(
- LlmThinkingContent {
- text: "(Encrypted thought)".to_string(),
- signature: Some(thought_part.thought_signature),
- },
- )));
- }
- Part::FunctionCallPart(fc_part) => {
- state.wants_to_use_tool = true;
- // Normalize empty string signatures to None
- let thought_signature =
- fc_part.thought_signature.filter(|s| !s.is_empty());
- // Generate unique tool use ID like hardcoded implementation
- let next_tool_id = TOOL_CALL_COUNTER.fetch_add(1, Ordering::SeqCst);
- let tool_use_id = format!("{}-{}", fc_part.function_call.name, next_tool_id);
- return Ok(Some(LlmCompletionEvent::ToolUse(LlmToolUse {
- id: tool_use_id,
- name: fc_part.function_call.name,
- input: serde_json::to_string(&fc_part.function_call.args)
- .unwrap_or_default(),
- is_input_complete: true,
- thought_signature,
- })));
- }
- _ => {}
- }
- }
-
- if let Some(finish_reason) = candidate.finish_reason {
- // Even when Gemini wants to use a Tool, the API
- // responds with `finish_reason: STOP`, so we check
- // wants_to_use_tool to override
- let stop_reason = if state.wants_to_use_tool {
- LlmStopReason::ToolUse
- } else {
- match finish_reason.as_str() {
- "STOP" => LlmStopReason::EndTurn,
- "MAX_TOKENS" => LlmStopReason::MaxTokens,
- "TOOL_USE" | "FUNCTION_CALL" => LlmStopReason::ToolUse,
- "SAFETY" | "RECITATION" | "OTHER" => LlmStopReason::Refusal,
- _ => LlmStopReason::EndTurn,
- }
- };
-
- return Ok(Some(LlmCompletionEvent::Stop(stop_reason)));
- }
- }
- }
- }
-
- continue;
- }
-
- // Check if the buffer contains a non-SSE error response (no "data: " prefix)
- // This can happen when Google returns an immediate error without streaming
- if !state.buffer.is_empty()
- && !state.buffer.contains("data: ")
- && state.buffer.contains("\"error\"")
- {
- // Try to parse the entire buffer as an error response
- if let Ok(api_error) = serde_json::from_str::<ApiErrorResponse>(&state.buffer) {
- let error_msg = api_error
- .error
- .message
- .unwrap_or_else(|| "Unknown API error".to_string());
- let status = api_error.error.status.unwrap_or_default();
- let code = api_error.error.code.unwrap_or(0);
- streams.remove(stream_id);
- return Err(format!(
- "Google AI API error ({}): {} [status: {}]",
- code, error_msg, status
- ));
- }
- }
-
- match state.response_stream.next_chunk() {
- Ok(Some(chunk)) => {
- let chunk_str = String::from_utf8_lossy(&chunk);
- state.buffer.push_str(&chunk_str);
- }
- Ok(None) => {
- streams.remove(stream_id);
- return Ok(None);
- }
- Err(e) => {
- streams.remove(stream_id);
- return Err(e);
- }
- }
- }
-}
-
-fn build_generate_content_request(
- model_id: &str,
- request: &LlmCompletionRequest,
-) -> Result<GenerateContentRequest, String> {
- let mut contents: Vec<Content> = Vec::new();
- let mut system_instruction: Option<SystemInstruction> = None;
-
- for message in &request.messages {
- match message.role {
- LlmMessageRole::System => {
- let parts = convert_content_to_parts(&message.content)?;
- system_instruction = Some(SystemInstruction { parts });
- }
- LlmMessageRole::User | LlmMessageRole::Assistant => {
- let role = match message.role {
- LlmMessageRole::User => Role::User,
- LlmMessageRole::Assistant => Role::Model,
- _ => continue,
- };
- let parts = convert_content_to_parts(&message.content)?;
- contents.push(Content { parts, role });
- }
- }
- }
-
- let tools = if !request.tools.is_empty() {
- Some(vec![Tool {
- function_declarations: request
- .tools
- .iter()
- .map(|t| FunctionDeclaration {
- name: t.name.clone(),
- description: t.description.clone(),
- parameters: serde_json::from_str(&t.input_schema).unwrap_or_default(),
- })
- .collect(),
- }])
- } else {
- None
- };
-
- let tool_config = request.tool_choice.as_ref().map(|choice| {
- let mode = match choice {
- zed::LlmToolChoice::Auto => FunctionCallingMode::Auto,
- zed::LlmToolChoice::Any => FunctionCallingMode::Any,
- zed::LlmToolChoice::None => FunctionCallingMode::None,
- };
- ToolConfig {
- function_calling_config: FunctionCallingConfig {
- mode,
- allowed_function_names: None,
- },
- }
- });
-
- let generation_config = Some(GenerationConfig {
- candidate_count: Some(1),
- stop_sequences: if request.stop_sequences.is_empty() {
- None
- } else {
- Some(request.stop_sequences.clone())
- },
- max_output_tokens: request.max_tokens.map(|t| t as usize),
- temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
- top_p: None,
- top_k: None,
- thinking_config: if request.thinking_allowed {
- // Check if this model has a custom thinking budget configured
- get_model_thinking_budget(model_id).map(|thinking_budget| ThinkingConfig {
- thinking_budget,
- })
- } else {
- None
- },
- });
-
- Ok(GenerateContentRequest {
- model: ModelName {
- model_id: model_id.to_string(),
- },
- contents,
- system_instruction,
- generation_config,
- safety_settings: None,
- tools,
- tool_config,
- })
-}
-
-fn convert_content_to_parts(content: &[LlmMessageContent]) -> Result<Vec<Part>, String> {
- let mut parts = Vec::new();
-
- for item in content {
- match item {
- LlmMessageContent::Text(text) => {
- parts.push(Part::TextPart(TextPart { text: text.clone() }));
- }
- LlmMessageContent::Image(image) => {
- parts.push(Part::InlineDataPart(InlineDataPart {
- inline_data: GenerativeContentBlob {
- mime_type: "image/png".to_string(),
- data: image.source.clone(),
- },
- }));
- }
- LlmMessageContent::ToolUse(tool_use) => {
- // Normalize empty string signatures to None
- let thought_signature = tool_use
- .thought_signature
- .clone()
- .filter(|s| !s.is_empty());
- parts.push(Part::FunctionCallPart(FunctionCallPart {
- function_call: FunctionCall {
- name: tool_use.name.clone(),
- args: serde_json::from_str(&tool_use.input).unwrap_or_default(),
- },
- thought_signature,
- }));
- }
- LlmMessageContent::ToolResult(tool_result) => {
- match &tool_result.content {
- zed::LlmToolResultContent::Text(text) => {
- parts.push(Part::FunctionResponsePart(FunctionResponsePart {
- function_response: FunctionResponse {
- name: tool_result.tool_name.clone(),
- response: serde_json::json!({ "output": text }),
- },
- }));
- }
- zed::LlmToolResultContent::Image(image) => {
- // Send both the function response and the image inline
- parts.push(Part::FunctionResponsePart(FunctionResponsePart {
- function_response: FunctionResponse {
- name: tool_result.tool_name.clone(),
- response: serde_json::json!({ "output": "Tool responded with an image" }),
- },
- }));
- parts.push(Part::InlineDataPart(InlineDataPart {
- inline_data: GenerativeContentBlob {
- mime_type: "image/png".to_string(),
- data: image.source.clone(),
- },
- }));
- }
- }
- }
- LlmMessageContent::Thinking(thinking) => {
- if let Some(signature) = &thinking.signature {
- parts.push(Part::ThoughtPart(ThoughtPart {
- thought: true,
- thought_signature: signature.clone(),
- }));
- }
- }
- LlmMessageContent::RedactedThinking(_) => {}
- }
- }
-
- Ok(parts)
-}
-
-// Data structures for Google AI API
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GenerateContentRequest {
- #[serde(default, skip_serializing_if = "ModelName::is_empty")]
- pub model: ModelName,
- pub contents: Vec<Content>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub system_instruction: Option<SystemInstruction>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub generation_config: Option<GenerationConfig>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub safety_settings: Option<Vec<SafetySetting>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tools: Option<Vec<Tool>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tool_config: Option<ToolConfig>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GenerateContentResponse {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub candidates: Option<Vec<GenerateContentCandidate>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub prompt_feedback: Option<PromptFeedback>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub usage_metadata: Option<UsageMetadata>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GenerateContentCandidate {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub index: Option<usize>,
- pub content: Content,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub finish_reason: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub finish_message: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub safety_ratings: Option<Vec<SafetyRating>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub citation_metadata: Option<CitationMetadata>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Content {
- #[serde(default)]
- pub parts: Vec<Part>,
- pub role: Role,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SystemInstruction {
- pub parts: Vec<Part>,
-}
-
-#[derive(Debug, PartialEq, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub enum Role {
- User,
- Model,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum Part {
- TextPart(TextPart),
- InlineDataPart(InlineDataPart),
- FunctionCallPart(FunctionCallPart),
- FunctionResponsePart(FunctionResponsePart),
- ThoughtPart(ThoughtPart),
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextPart {
- pub text: String,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlineDataPart {
- pub inline_data: GenerativeContentBlob,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GenerativeContentBlob {
- pub mime_type: String,
- pub data: String,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FunctionCallPart {
- pub function_call: FunctionCall,
- /// Thought signature returned by the model for function calls.
- /// Only present on the first function call in parallel call scenarios.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub thought_signature: Option<String>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FunctionResponsePart {
- pub function_response: FunctionResponse,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ThoughtPart {
- pub thought: bool,
- pub thought_signature: String,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CitationSource {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub start_index: Option<usize>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub end_index: Option<usize>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub uri: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub license: Option<String>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CitationMetadata {
- pub citation_sources: Vec<CitationSource>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct PromptFeedback {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub block_reason: Option<String>,
- pub safety_ratings: Option<Vec<SafetyRating>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub block_reason_message: Option<String>,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize, Default)]
-#[serde(rename_all = "camelCase")]
-pub struct UsageMetadata {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub prompt_token_count: Option<u64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub cached_content_token_count: Option<u64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub candidates_token_count: Option<u64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tool_use_prompt_token_count: Option<u64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub thoughts_token_count: Option<u64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub total_token_count: Option<u64>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ThinkingConfig {
- pub thinking_budget: u32,
-}
-
-#[derive(Debug, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GenerationConfig {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub candidate_count: Option<usize>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub stop_sequences: Option<Vec<String>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub max_output_tokens: Option<usize>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub temperature: Option<f64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub top_p: Option<f64>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub top_k: Option<usize>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub thinking_config: Option<ThinkingConfig>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SafetySetting {
- pub category: HarmCategory,
- pub threshold: HarmBlockThreshold,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub enum HarmCategory {
- #[serde(rename = "HARM_CATEGORY_UNSPECIFIED")]
- Unspecified,
- #[serde(rename = "HARM_CATEGORY_DEROGATORY")]
- Derogatory,
- #[serde(rename = "HARM_CATEGORY_TOXICITY")]
- Toxicity,
- #[serde(rename = "HARM_CATEGORY_VIOLENCE")]
- Violence,
- #[serde(rename = "HARM_CATEGORY_SEXUAL")]
- Sexual,
- #[serde(rename = "HARM_CATEGORY_MEDICAL")]
- Medical,
- #[serde(rename = "HARM_CATEGORY_DANGEROUS")]
- Dangerous,
- #[serde(rename = "HARM_CATEGORY_HARASSMENT")]
- Harassment,
- #[serde(rename = "HARM_CATEGORY_HATE_SPEECH")]
- HateSpeech,
- #[serde(rename = "HARM_CATEGORY_SEXUALLY_EXPLICIT")]
- SexuallyExplicit,
- #[serde(rename = "HARM_CATEGORY_DANGEROUS_CONTENT")]
- DangerousContent,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
-pub enum HarmBlockThreshold {
- #[serde(rename = "HARM_BLOCK_THRESHOLD_UNSPECIFIED")]
- Unspecified,
- BlockLowAndAbove,
- BlockMediumAndAbove,
- BlockOnlyHigh,
- BlockNone,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
-pub enum HarmProbability {
- #[serde(rename = "HARM_PROBABILITY_UNSPECIFIED")]
- Unspecified,
- Negligible,
- Low,
- Medium,
- High,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SafetyRating {
- pub category: HarmCategory,
- pub probability: HarmProbability,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CountTokensRequest {
- pub generate_content_request: GenerateContentRequest,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CountTokensResponse {
- pub total_tokens: u64,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct FunctionCall {
- pub name: String,
- pub args: serde_json::Value,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct FunctionResponse {
- pub name: String,
- pub response: serde_json::Value,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Tool {
- pub function_declarations: Vec<FunctionDeclaration>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ToolConfig {
- pub function_calling_config: FunctionCallingConfig,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FunctionCallingConfig {
- pub mode: FunctionCallingMode,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub allowed_function_names: Option<Vec<String>>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "lowercase")]
-pub enum FunctionCallingMode {
- Auto,
- Any,
- None,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct FunctionDeclaration {
- pub name: String,
- pub description: String,
- pub parameters: serde_json::Value,
-}
-
-#[derive(Debug, Default)]
-pub struct ModelName {
- pub model_id: String,
-}
-
-impl ModelName {
- pub fn is_empty(&self) -> bool {
- self.model_id.is_empty()
- }
-}
-
-const MODEL_NAME_PREFIX: &str = "models/";
-
-/// Google API error response structure
-#[derive(Debug, Deserialize)]
-pub struct ApiErrorResponse {
- pub error: ApiError,
-}
-
-#[derive(Debug, Deserialize)]
-pub struct ApiError {
- pub code: Option<u16>,
- pub message: Option<String>,
- pub status: Option<String>,
-}
-
-impl Serialize for ModelName {
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: Serializer,
- {
- serializer.serialize_str(&format!("{MODEL_NAME_PREFIX}{}", &self.model_id))
- }
-}
-
-impl<'de> Deserialize<'de> for ModelName {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: Deserializer<'de>,
- {
- let string = String::deserialize(deserializer)?;
- if let Some(id) = string.strip_prefix(MODEL_NAME_PREFIX) {
- Ok(Self {
- model_id: id.to_string(),
- })
- } else {
- Err(serde::de::Error::custom(format!(
- "Expected model name to begin with {}, got: {}",
- MODEL_NAME_PREFIX, string
- )))
- }
- }
-}