Detailed changes
@@ -28,7 +28,15 @@ jobs:
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- target: x86_64-apple-darwin
+ target: aarch64-apple-darwin
+ profile: minimal
+ default: true
+
+ - name: Install Rust wasm32-wasi target
+ uses: actions-rs/toolchain@v1
+ with:
+ toolchain: stable
+ target: wasm32-wasi
profile: minimal
- name: Install Node
@@ -58,18 +66,26 @@ jobs:
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
steps:
+ - name: Install Rust aarch64-apple-darwin target
+ uses: actions-rs/toolchain@v1
+ with:
+ toolchain: stable
+ target: aarch64-apple-darwin
+ profile: minimal
+ default: true
+
- name: Install Rust x86_64-apple-darwin target
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: x86_64-apple-darwin
profile: minimal
-
- - name: Install Rust aarch64-apple-darwin target
+
+ - name: Install Rust wasm32-wasi target
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- target: aarch64-apple-darwin
+ target: wasm32-wasi
profile: minimal
- name: Install Node
@@ -1,6 +1,7 @@
-/target
+**/target
/zed.xcworkspace
.DS_Store
+/plugins/bin
/script/node_modules
/styles/node_modules
/crates/collab/.env.toml
@@ -45,7 +45,7 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
- "getrandom 0.2.6",
+ "getrandom 0.2.7",
"once_cell",
"version_check",
]
@@ -98,6 +98,12 @@ dependencies = [
"winapi 0.3.9",
]
+[[package]]
+name = "ambient-authority"
+version = "0.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec8ad6edb4840b78c5c3d88de606b22252d552b55f3a4699fbb10fc070ec3049"
+
[[package]]
name = "ansi_term"
version = "0.12.1"
@@ -109,9 +115,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.57"
+version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc"
+checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
[[package]]
name = "arrayref"
@@ -376,7 +382,7 @@ version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
"winapi 0.3.9",
]
@@ -420,9 +426,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "axum"
-version = "0.5.6"
+version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab2504b827a8bef941ba3dd64bdffe9cf56ca182908a147edd6189c95fbcae7d"
+checksum = "c2cc6e8e8c993cb61a005fab8c1e5093a29199b7253b05a6883999312935c1ff"
dependencies = [
"async-trait",
"axum-core",
@@ -455,9 +461,9 @@ dependencies = [
[[package]]
name = "axum-core"
-version = "0.2.4"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da31c0ed7b4690e2c78fe4b880d21cd7db04a346ebc658b4270251b695437f17"
+checksum = "cf4d047478b986f14a13edad31a009e2e05cb241f9805d0d75e4cba4e129ad4d"
dependencies = [
"async-trait",
"bytes",
@@ -469,17 +475,19 @@ dependencies = [
[[package]]
name = "axum-extra"
-version = "0.3.3"
+version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75330529f6b27544cedc6089108602a056d016df6aa4f2cb24408d840392ef2d"
+checksum = "277c75e6c814b061ae4947d02335d9659db9771b9950cca670002ae986372f44"
dependencies = [
"axum",
"bytes",
+ "futures-util",
"http",
"mime",
"pin-project-lite 0.2.9",
"serde",
"serde_json",
+ "tokio",
"tower",
"tower-http",
"tower-layer",
@@ -515,9 +523,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
[[package]]
name = "base64ct"
-version = "1.5.0"
+version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dea908e7347a8c64e378c17e30ef880ad73e3b4498346b055c2c00ea342f3179"
+checksum = "3bdca834647821e0b13d9539a8634eb62d3501b6b6c2cec1722786ee6671b851"
[[package]]
name = "bincode"
@@ -639,9 +647,9 @@ checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3"
[[package]]
name = "bytemuck"
-version = "1.9.1"
+version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdead85bdec19c194affaeeb670c0e41fe23de31459efd1c174d049269cf02cc"
+checksum = "c53dfa917ec274df8ed3c572698f381a24eef2efba9492d797301b72b6db408a"
[[package]]
name = "byteorder"
@@ -672,6 +680,72 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c"
+[[package]]
+name = "cap-fs-ext"
+version = "0.24.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e54b86398b5852ddd45784b1d9b196b98beb39171821bad4b8b44534a1e87927"
+dependencies = [
+ "cap-primitives",
+ "cap-std",
+ "io-lifetimes",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "cap-primitives"
+version = "0.24.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb8fca3e81fae1d91a36e9784ca22a39ef623702b5f7904d89dc31f10184a178"
+dependencies = [
+ "ambient-authority",
+ "errno",
+ "fs-set-times",
+ "io-extras",
+ "io-lifetimes",
+ "ipnet",
+ "maybe-owned",
+ "rustix",
+ "winapi 0.3.9",
+ "winapi-util",
+ "winx",
+]
+
+[[package]]
+name = "cap-rand"
+version = "0.24.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3b27294116983d706f4c8168f6d10c84f9f5daed0c28bc7d0296cf16bcf971"
+dependencies = [
+ "ambient-authority",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "cap-std"
+version = "0.24.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2247568946095c7765ad2b441a56caffc08027734c634a6d5edda648f04e32eb"
+dependencies = [
+ "cap-primitives",
+ "io-extras",
+ "io-lifetimes",
+ "ipnet",
+ "rustix",
+]
+
+[[package]]
+name = "cap-time-ext"
+version = "0.24.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c50472b6ebc302af0401fa3fb939694cd8ff00e0d4c9182001e434fc822ab83a"
+dependencies = [
+ "cap-primitives",
+ "once_cell",
+ "rustix",
+ "winx",
+]
+
[[package]]
name = "castaway"
version = "0.1.2"
@@ -719,7 +793,7 @@ dependencies = [
"postage",
"settings",
"theme",
- "time 0.3.10",
+ "time 0.3.11",
"util",
"workspace",
]
@@ -733,7 +807,7 @@ dependencies = [
"libc",
"num-integer",
"num-traits",
- "time 0.1.43",
+ "time 0.1.44",
"winapi 0.3.9",
]
@@ -780,16 +854,16 @@ dependencies = [
[[package]]
name = "clap"
-version = "3.1.18"
+version = "3.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b"
+checksum = "190814073e85d238f31ff738fcb0bf6910cedeb73376c87cd69291028966fd83"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"clap_lex",
"indexmap",
- "lazy_static",
+ "once_cell",
"strsim 0.10.0",
"termcolor",
"textwrap 0.15.0",
@@ -797,9 +871,9 @@ dependencies = [
[[package]]
name = "clap_derive"
-version = "3.1.18"
+version = "3.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c"
+checksum = "759bf187376e1afa7b85b959e6a664a3e7a95203415dba952ad19139e798f902"
dependencies = [
"heck 0.4.0",
"proc-macro-error",
@@ -810,9 +884,9 @@ dependencies = [
[[package]]
name = "clap_lex"
-version = "0.2.0"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213"
+checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
dependencies = [
"os_str_bytes",
]
@@ -822,7 +896,7 @@ name = "cli"
version = "0.1.0"
dependencies = [
"anyhow",
- "clap 3.1.18",
+ "clap 3.2.8",
"core-foundation",
"core-services",
"dirs 3.0.2",
@@ -852,7 +926,7 @@ dependencies = [
"smol",
"sum_tree",
"thiserror",
- "time 0.3.10",
+ "time 0.3.11",
"tiny_http",
"url",
"util",
@@ -913,7 +987,7 @@ dependencies = [
"axum",
"axum-extra",
"base64 0.13.0",
- "clap 3.1.18",
+ "clap 3.2.8",
"client",
"collections",
"ctor",
@@ -942,7 +1016,7 @@ dependencies = [
"sha-1 0.9.8",
"sqlx",
"theme",
- "time 0.3.10",
+ "time 0.3.11",
"tokio",
"tokio-tungstenite",
"toml",
@@ -1089,6 +1163,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "cpp_demangle"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eeaa953eaad386a53111e47172c2fedba671e5684c8dd601a5f474f4f118710f"
+dependencies = [
+ "cfg-if 1.0.0",
+]
+
[[package]]
name = "cpufeatures"
version = "0.2.2"
@@ -1098,6 +1181,102 @@ dependencies = [
"libc",
]
+[[package]]
+name = "cranelift-bforest"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7901fbba05decc537080b07cb3f1cadf53be7b7602ca8255786288a8692ae29a"
+dependencies = [
+ "cranelift-entity",
+]
+
+[[package]]
+name = "cranelift-codegen"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37ba1b45d243a4a28e12d26cd5f2507da74e77c45927d40de8b6ffbf088b46b5"
+dependencies = [
+ "cranelift-bforest",
+ "cranelift-codegen-meta",
+ "cranelift-codegen-shared",
+ "cranelift-entity",
+ "cranelift-isle",
+ "gimli",
+ "log",
+ "regalloc2",
+ "smallvec",
+ "target-lexicon",
+]
+
+[[package]]
+name = "cranelift-codegen-meta"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54cc30032171bf230ce22b99c07c3a1de1221cb5375bd6dbe6dbe77d0eed743c"
+dependencies = [
+ "cranelift-codegen-shared",
+]
+
+[[package]]
+name = "cranelift-codegen-shared"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a23f2672426d2bb4c9c3ef53e023076cfc4d8922f0eeaebaf372c92fae8b5c69"
+
+[[package]]
+name = "cranelift-entity"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "886c59a5e0de1f06dbb7da80db149c75de10d5e2caca07cdd9fef8a5918a6336"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cranelift-frontend"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ace74eeca11c439a9d4ed1a5cb9df31a54cd0f7fbddf82c8ce4ea8e9ad2a8fe0"
+dependencies = [
+ "cranelift-codegen",
+ "log",
+ "smallvec",
+ "target-lexicon",
+]
+
+[[package]]
+name = "cranelift-isle"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db1ae52a5cc2cad0d86fdd3dcb16b7217d2f1e65ab4f5814aa4f014ad335fa43"
+
+[[package]]
+name = "cranelift-native"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dadcfb7852900780d37102bce5698bcd401736403f07b52e714ff7a180e0e22f"
+dependencies = [
+ "cranelift-codegen",
+ "libc",
+ "target-lexicon",
+]
+
+[[package]]
+name = "cranelift-wasm"
+version = "0.85.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c84e3410960389110b88f97776f39f6d2c8becdaa4cd59e390e6b76d9d0e7190"
+dependencies = [
+ "cranelift-codegen",
+ "cranelift-entity",
+ "cranelift-frontend",
+ "itertools",
+ "log",
+ "smallvec",
+ "wasmparser",
+ "wasmtime-types",
+]
+
[[package]]
name = "crc"
version = "3.0.0"
@@ -1134,12 +1313,12 @@ dependencies = [
[[package]]
name = "crossbeam-channel"
-version = "0.5.4"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53"
+checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c"
dependencies = [
"cfg-if 1.0.0",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
@@ -1150,20 +1329,20 @@ checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-epoch",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
name = "crossbeam-epoch"
-version = "0.9.8"
+version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c"
+checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d"
dependencies = [
"autocfg 1.1.0",
"cfg-if 1.0.0",
- "crossbeam-utils 0.8.8",
- "lazy_static",
+ "crossbeam-utils 0.8.10",
"memoffset",
+ "once_cell",
"scopeguard",
]
@@ -1174,7 +1353,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
dependencies = [
"cfg-if 1.0.0",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
@@ -1190,19 +1369,19 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
-version = "0.8.8"
+version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
+checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
dependencies = [
"cfg-if 1.0.0",
- "lazy_static",
+ "once_cell",
]
[[package]]
name = "crypto-common"
-version = "0.1.3"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
+checksum = "5999502d32b9c48d492abe66392408144895020ec4709e549e840799f3bb74c0"
dependencies = [
"generic-array",
"typenum",
@@ -1334,6 +1513,16 @@ dependencies = [
"subtle",
]
+[[package]]
+name = "directories-next"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "339ee130d97a610ea5a5872d2bbb130fdf68884ff09d3028b81bec8a1ac23bbc"
+dependencies = [
+ "cfg-if 1.0.0",
+ "dirs-sys-next",
+]
+
[[package]]
name = "dirs"
version = "3.0.2"
@@ -1404,9 +1593,9 @@ dependencies = [
[[package]]
name = "dyn-clone"
-version = "1.0.5"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21e50f3adc76d6a43f5ed73b698a87d0760ca74617f60f7c3b879003536fdd28"
+checksum = "140206b78fb2bc3edbcfc9b5ccbd0b30699cfe8d348b8b31b330e47df5291a5a"
[[package]]
name = "easy-parallel"
@@ -1456,9 +1645,9 @@ dependencies = [
[[package]]
name = "either"
-version = "1.6.1"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
[[package]]
name = "encoding_rs"
@@ -1493,13 +1682,34 @@ dependencies = [
[[package]]
name = "erased-serde"
-version = "0.3.20"
+version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad132dd8d0d0b546348d7d86cb3191aad14b34e5f979781fc005c80d4ac67ffd"
+checksum = "81d013529d5574a60caeda29e179e695125448e5de52e3874f7b4c1d7360e18e"
dependencies = [
"serde",
]
+[[package]]
+name = "errno"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
+dependencies = [
+ "errno-dragonfly",
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "errno-dragonfly"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+dependencies = [
+ "cc",
+ "libc",
+]
+
[[package]]
name = "etagere"
version = "0.2.7"
@@ -1535,6 +1745,12 @@ dependencies = [
"pkg-config",
]
+[[package]]
+name = "fallible-iterator"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
+
[[package]]
name = "fastrand"
version = "1.7.0"
@@ -1544,6 +1760,16 @@ dependencies = [
"instant",
]
+[[package]]
+name = "file-per-thread-logger"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21e16290574b39ee41c71aeb90ae960c504ebaf1e2a1c87bd52aa56ed6e1a02f"
+dependencies = [
+ "env_logger",
+ "log",
+]
+
[[package]]
name = "file_finder"
version = "0.1.0"
@@ -1566,9 +1792,9 @@ dependencies = [
[[package]]
name = "fixedbitset"
-version = "0.4.1"
+version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e"
+checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
@@ -1679,6 +1905,17 @@ dependencies = [
"pkg-config",
]
+[[package]]
+name = "fs-set-times"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7df62ee66ee2d532ea8d567b5a3f0d03ecd64636b98bad5be1e93dcc918b92aa"
+dependencies = [
+ "io-lifetimes",
+ "rustix",
+ "winapi 0.3.9",
+]
+
[[package]]
name = "fsevent"
version = "2.0.2"
@@ -1843,6 +2080,15 @@ dependencies = [
"util",
]
+[[package]]
+name = "fxhash"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
+dependencies = [
+ "byteorder",
+]
+
[[package]]
name = "generic-array"
version = "0.14.5"
@@ -1866,20 +2112,20 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.6"
+version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
dependencies = [
"cfg-if 1.0.0",
"libc",
- "wasi 0.10.2+wasi-snapshot-preview1",
+ "wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
name = "gif"
-version = "0.11.3"
+version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b"
+checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06"
dependencies = [
"color_quant",
"weezl",
@@ -1890,6 +2136,11 @@ name = "gimli"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
+dependencies = [
+ "fallible-iterator",
+ "indexmap",
+ "stable_deref_trait",
+]
[[package]]
name = "glob"
@@ -1899,9 +2150,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "globset"
-version = "0.4.8"
+version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd"
+checksum = "0a1e17342619edbc21a964c2afbeb6c820c6a2560032872f397bb97ea127bd0a"
dependencies = [
"aho-corasick",
"bstr",
@@ -1968,7 +2219,7 @@ dependencies = [
"smallvec",
"smol",
"sum_tree",
- "time 0.3.10",
+ "time 0.3.11",
"tiny-skia",
"tree-sitter",
"usvg",
@@ -2009,6 +2260,9 @@ name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+dependencies = [
+ "ahash",
+]
[[package]]
name = "hashbrown"
@@ -2080,6 +2334,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "hermit-abi"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d37fb7dc756218a0559bfc21e4381f03cbb696cdaf959e7e95e927496f0564cd"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "hex"
version = "0.4.3"
@@ -2235,7 +2498,7 @@ version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d"
dependencies = [
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
"globset",
"lazy_static",
"log",
@@ -2268,12 +2531,13 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.8.2"
+version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
dependencies = [
"autocfg 1.1.0",
- "hashbrown 0.11.2",
+ "hashbrown 0.12.1",
+ "serde",
]
[[package]]
@@ -2291,6 +2555,26 @@ dependencies = [
"cfg-if 1.0.0",
]
+[[package]]
+name = "io-extras"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0c937cc9891c12eaa8c63ad347e4a288364b1328b924886970b47a14ab8f8f8"
+dependencies = [
+ "io-lifetimes",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "io-lifetimes"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec58677acfea8a15352d42fc87d11d63596ade9239e0a7c9352914417515dbe6"
+dependencies = [
+ "libc",
+ "winapi 0.3.9",
+]
+
[[package]]
name = "iovec"
version = "0.1.4"
@@ -2325,6 +2609,18 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b"
+[[package]]
+name = "is-terminal"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c89a757e762896bdbdfadf2860d0f8b0cea5e363d8cf3e7bdfeb63d1d976352"
+dependencies = [
+ "hermit-abi 0.2.3",
+ "io-lifetimes",
+ "rustix",
+ "winapi 0.3.9",
+]
+
[[package]]
name = "isahc"
version = "1.7.2"
@@ -2333,7 +2629,7 @@ checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9"
dependencies = [
"async-channel",
"castaway",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
"curl",
"curl-sys",
"encoding_rs",
@@ -2367,6 +2663,15 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
+[[package]]
+name = "ittapi-rs"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f712648a1ad72fbfb7adc2772c331e8d90f022f8cf30cbabefba2878dd3172b0"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "jobserver"
version = "0.1.24"
@@ -2400,9 +2705,9 @@ dependencies = [
[[package]]
name = "js-sys"
-version = "0.3.57"
+version = "0.3.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397"
+checksum = "c3fac17f7123a73ca62df411b1bf727ccc805daa070338fda671c86dac1bdc27"
dependencies = [
"wasm-bindgen",
]
@@ -2487,6 +2792,12 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
+[[package]]
+name = "leb128"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
+
[[package]]
name = "libc"
version = "0.2.126"
@@ -2521,9 +2832,8 @@ dependencies = [
[[package]]
name = "librocksdb-sys"
-version = "0.6.1+6.28.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81bc587013734dadb7cf23468e531aa120788b87243648be42e2d3a072186291"
+version = "0.7.1+7.3.1"
+source = "git+https://github.com/rust-rocksdb/rust-rocksdb?rev=39dc822dde743b2a26eb160b660e8fbdab079d49#39dc822dde743b2a26eb160b660e8fbdab079d49"
dependencies = [
"bindgen",
"bzip2-sys",
@@ -2557,9 +2867,15 @@ dependencies = [
[[package]]
name = "linked-hash-map"
-version = "0.5.4"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
+checksum = "5284f00d480e1c39af34e72f8ad60b94f47007e3481cd3b731c1d67190ddc7b7"
[[package]]
name = "lipsum"
@@ -2627,6 +2943,15 @@ dependencies = [
"url",
]
+[[package]]
+name = "mach"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "malloc_buf"
version = "0.0.6"
@@ -2657,6 +2982,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb"
+[[package]]
+name = "maybe-owned"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4"
+
[[package]]
name = "maybe-uninit"
version = "2.0.0"
@@ -2678,6 +3009,15 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+[[package]]
+name = "memfd"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6627dc657574b49d6ad27105ed671822be56e0d2547d413bfbf3e8d8fa92e7a"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "memmap2"
version = "0.2.3"
@@ -2778,9 +3118,9 @@ dependencies = [
[[package]]
name = "mio"
-version = "0.8.3"
+version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799"
+checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
dependencies = [
"libc",
"log",
@@ -2846,8 +3186,14 @@ dependencies = [
]
[[package]]
-name = "multimap"
-version = "0.8.3"
+name = "more-asserts"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389"
+
+[[package]]
+name = "multimap"
+version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
@@ -2989,7 +3335,7 @@ version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
]
@@ -3027,14 +3373,17 @@ version = "0.28.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
dependencies = [
+ "crc32fast",
+ "hashbrown 0.11.2",
+ "indexmap",
"memchr",
]
[[package]]
name = "once_cell"
-version = "1.12.0"
+version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
+checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
[[package]]
name = "opaque-debug"
@@ -3293,18 +3642,18 @@ checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
[[package]]
name = "pin-project"
-version = "1.0.10"
+version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e"
+checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
-version = "1.0.10"
+version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb"
+checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74"
dependencies = [
"proc-macro2",
"quote",
@@ -3345,10 +3694,45 @@ dependencies = [
"indexmap",
"line-wrap",
"serde",
- "time 0.3.10",
+ "time 0.3.11",
"xml-rs",
]
+[[package]]
+name = "plugin"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "plugin_macros",
+ "serde",
+]
+
+[[package]]
+name = "plugin_macros"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn",
+]
+
+[[package]]
+name = "plugin_runtime"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "bincode",
+ "pollster",
+ "serde",
+ "serde_json",
+ "smol",
+ "wasi-common",
+ "wasmtime",
+ "wasmtime-wasi",
+]
+
[[package]]
name = "png"
version = "0.16.8"
@@ -13,6 +13,8 @@ cocoa-foundation = { git = "https://github.com/servo/core-foundation-rs", rev =
core-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
core-foundation-sys = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
core-graphics = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }
+# TODO - Remove when a new version of RustRocksDB is released
+rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "39dc822dde743b2a26eb160b660e8fbdab079d49" }
[profile.dev]
split-debuginfo = "unpacked"
@@ -1471,7 +1471,7 @@ async fn test_collaborating_with_diagnostics(
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
// Share a project as client A
@@ -1706,16 +1706,18 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities: lsp::ServerCapabilities {
- completion_provider: Some(lsp::CompletionOptions {
- trigger_characters: Some(vec![".".to_string()]),
+ let mut fake_language_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string()]),
+ ..Default::default()
+ }),
..Default::default()
- }),
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -1959,7 +1961,7 @@ async fn test_formatting_buffer(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
// Here we insert a fake tree with a directory that exists on disk. This is needed
@@ -2045,7 +2047,7 @@ async fn test_definition(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -2154,7 +2156,7 @@ async fn test_references(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -2334,7 +2336,7 @@ async fn test_document_highlights(cx_a: &mut TestAppContext, cx_b: &mut TestAppC
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
@@ -2431,7 +2433,7 @@ async fn test_lsp_hover(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
@@ -2519,7 +2521,7 @@ async fn test_project_symbols(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -2622,7 +2624,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -2693,7 +2695,7 @@ async fn test_collaborating_with_code_actions(
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -2898,16 +2900,18 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities: lsp::ServerCapabilities {
- rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
- prepare_provider: Some(true),
- work_done_progress_options: Default::default(),
- })),
+ let mut fake_language_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
+ prepare_provider: Some(true),
+ work_done_progress_options: Default::default(),
+ })),
+ ..Default::default()
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -3082,10 +3086,12 @@ async fn test_language_server_statuses(
},
Some(tree_sitter_rust::language()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- name: "the-language-server",
- ..Default::default()
- });
+ let mut fake_language_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-language-server",
+ ..Default::default()
+ }))
+ .await;
client_a.language_registry.add(Arc::new(language));
client_a
@@ -4608,119 +4614,124 @@ async fn test_random_collaboration(
},
None,
);
- let _fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- name: "the-fake-language-server",
- capabilities: lsp::LanguageServer::full_capabilities(),
- initializer: Some(Box::new({
- let rng = rng.clone();
- let fs = fs.clone();
- let project = host_project.downgrade();
- move |fake_server: &mut FakeLanguageServer| {
- fake_server.handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
- Ok(Some(lsp::CompletionResponse::Array(vec![
- lsp::CompletionItem {
- text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
- range: lsp::Range::new(
- lsp::Position::new(0, 0),
- lsp::Position::new(0, 0),
- ),
- new_text: "the-new-text".to_string(),
- })),
- ..Default::default()
+ let _fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-fake-language-server",
+ capabilities: lsp::LanguageServer::full_capabilities(),
+ initializer: Some(Box::new({
+ let rng = rng.clone();
+ let fs = fs.clone();
+ let project = host_project.downgrade();
+ move |fake_server: &mut FakeLanguageServer| {
+ fake_server.handle_request::<lsp::request::Completion, _, _>(
+ |_, _| async move {
+ Ok(Some(lsp::CompletionResponse::Array(vec![
+ lsp::CompletionItem {
+ text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 0),
+ lsp::Position::new(0, 0),
+ ),
+ new_text: "the-new-text".to_string(),
+ })),
+ ..Default::default()
+ },
+ ])))
},
- ])))
- });
-
- fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
- |_, _| async move {
- Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
- lsp::CodeAction {
- title: "the-code-action".to_string(),
- ..Default::default()
- },
- )]))
- },
- );
+ );
- fake_server.handle_request::<lsp::request::PrepareRenameRequest, _, _>(
- |params, _| async move {
- Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
- params.position,
- params.position,
- ))))
- },
- );
+ fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
+ |_, _| async move {
+ Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
+ lsp::CodeAction {
+ title: "the-code-action".to_string(),
+ ..Default::default()
+ },
+ )]))
+ },
+ );
+
+ fake_server.handle_request::<lsp::request::PrepareRenameRequest, _, _>(
+ |params, _| async move {
+ Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
+ params.position,
+ params.position,
+ ))))
+ },
+ );
- fake_server.handle_request::<lsp::request::GotoDefinition, _, _>({
- let fs = fs.clone();
- let rng = rng.clone();
- move |_, _| {
+ fake_server.handle_request::<lsp::request::GotoDefinition, _, _>({
let fs = fs.clone();
let rng = rng.clone();
- async move {
- let files = fs.files().await;
- let mut rng = rng.lock();
- let count = rng.gen_range::<usize, _>(1..3);
- let files = (0..count)
- .map(|_| files.choose(&mut *rng).unwrap())
- .collect::<Vec<_>>();
- log::info!("LSP: Returning definitions in files {:?}", &files);
- Ok(Some(lsp::GotoDefinitionResponse::Array(
- files
- .into_iter()
- .map(|file| lsp::Location {
- uri: lsp::Url::from_file_path(file).unwrap(),
- range: Default::default(),
- })
- .collect(),
- )))
+ move |_, _| {
+ let fs = fs.clone();
+ let rng = rng.clone();
+ async move {
+ let files = fs.files().await;
+ let mut rng = rng.lock();
+ let count = rng.gen_range::<usize, _>(1..3);
+ let files = (0..count)
+ .map(|_| files.choose(&mut *rng).unwrap())
+ .collect::<Vec<_>>();
+ log::info!("LSP: Returning definitions in files {:?}", &files);
+ Ok(Some(lsp::GotoDefinitionResponse::Array(
+ files
+ .into_iter()
+ .map(|file| lsp::Location {
+ uri: lsp::Url::from_file_path(file).unwrap(),
+ range: Default::default(),
+ })
+ .collect(),
+ )))
+ }
}
- }
- });
-
- fake_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>({
- let rng = rng.clone();
- let project = project.clone();
- move |params, mut cx| {
- let highlights = if let Some(project) = project.upgrade(&cx) {
- project.update(&mut cx, |project, cx| {
- let path = params
- .text_document_position_params
- .text_document
- .uri
- .to_file_path()
- .unwrap();
- let (worktree, relative_path) =
- project.find_local_worktree(&path, cx)?;
- let project_path =
- ProjectPath::from((worktree.read(cx).id(), relative_path));
- let buffer = project.get_open_buffer(&project_path, cx)?.read(cx);
-
- let mut highlights = Vec::new();
- let highlight_count = rng.lock().gen_range(1..=5);
- let mut prev_end = 0;
- for _ in 0..highlight_count {
- let range =
- buffer.random_byte_range(prev_end, &mut *rng.lock());
-
- highlights.push(lsp::DocumentHighlight {
- range: range_to_lsp(range.to_point_utf16(buffer)),
- kind: Some(lsp::DocumentHighlightKind::READ),
- });
- prev_end = range.end;
- }
- Some(highlights)
- })
- } else {
- None
- };
- async move { Ok(highlights) }
- }
- });
- }
- })),
- ..Default::default()
- });
+ });
+
+ fake_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>({
+ let rng = rng.clone();
+ let project = project.clone();
+ move |params, mut cx| {
+ let highlights = if let Some(project) = project.upgrade(&cx) {
+ project.update(&mut cx, |project, cx| {
+ let path = params
+ .text_document_position_params
+ .text_document
+ .uri
+ .to_file_path()
+ .unwrap();
+ let (worktree, relative_path) =
+ project.find_local_worktree(&path, cx)?;
+ let project_path =
+ ProjectPath::from((worktree.read(cx).id(), relative_path));
+ let buffer =
+ project.get_open_buffer(&project_path, cx)?.read(cx);
+
+ let mut highlights = Vec::new();
+ let highlight_count = rng.lock().gen_range(1..=5);
+ let mut prev_end = 0;
+ for _ in 0..highlight_count {
+ let range =
+ buffer.random_byte_range(prev_end, &mut *rng.lock());
+
+ highlights.push(lsp::DocumentHighlight {
+ range: range_to_lsp(range.to_point_utf16(buffer)),
+ kind: Some(lsp::DocumentHighlightKind::READ),
+ });
+ prev_end = range.end;
+ }
+ Some(highlights)
+ })
+ } else {
+ None
+ };
+ async move { Ok(highlights) }
+ }
+ });
+ }
+ })),
+ ..Default::default()
+ }))
+ .await;
host_language_registry.add(Arc::new(language));
let op_start_signal = futures::channel::mpsc::unbounded();
@@ -9302,13 +9302,15 @@ mod tests {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities: lsp::ServerCapabilities {
- document_formatting_provider: Some(lsp::OneOf::Left(true)),
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ document_formatting_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
let fs = FakeFs::new(cx.background().clone());
fs.insert_file("/file.rs", Default::default()).await;
@@ -9414,13 +9416,15 @@ mod tests {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities: lsp::ServerCapabilities {
- document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
let fs = FakeFs::new(cx.background().clone());
fs.insert_file("/file.rs", Default::default()).await;
@@ -9526,16 +9530,18 @@ mod tests {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities: lsp::ServerCapabilities {
- completion_provider: Some(lsp::CompletionOptions {
- trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
+ ..Default::default()
+ }),
..Default::default()
- }),
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
let text = "
one
@@ -449,10 +449,12 @@ impl<'a> EditorLspTestContext<'a> {
.unwrap_or(&"txt".to_string())
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities,
- ..Default::default()
- });
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities,
+ ..Default::default()
+ }))
+ .await;
let project = Project::test(params.fs.clone(), [], cx).await;
project.update(cx, |project, _| project.languages().add(Arc::new(language)));
@@ -7,6 +7,7 @@ pub mod proto;
mod tests;
use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
use client::http::HttpClient;
use collections::HashMap;
use futures::{
@@ -17,6 +18,7 @@ use gpui::{MutableAppContext, Task};
use highlight_map::HighlightMap;
use lazy_static::lazy_static;
use parking_lot::{Mutex, RwLock};
+use postage::watch;
use regex::Regex;
use serde::{de, Deserialize, Deserializer};
use serde_json::Value;
@@ -29,7 +31,7 @@ use std::{
str,
sync::Arc,
};
-use theme::SyntaxTheme;
+use theme::{SyntaxTheme, Theme};
use tree_sitter::{self, Query};
use util::ResultExt;
@@ -43,7 +45,7 @@ pub use outline::{Outline, OutlineItem};
pub use tree_sitter::{Parser, Tree};
thread_local! {
- static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
+ static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
}
lazy_static! {
@@ -63,48 +65,141 @@ pub trait ToLspPosition {
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct LanguageServerName(pub Arc<str>);
+/// Represents a Language Server, with certain cached sync properties.
+/// Uses [`LspAdapter`] under the hood, but calls all 'static' methods
+/// once at startup, and caches the results.
+pub struct CachedLspAdapter {
+ pub name: LanguageServerName,
+ pub server_args: Vec<String>,
+ pub initialization_options: Option<Value>,
+ pub disk_based_diagnostic_sources: Vec<String>,
+ pub disk_based_diagnostics_progress_token: Option<String>,
+ pub id_for_language: Option<String>,
+ pub adapter: Box<dyn LspAdapter>,
+}
+
+impl CachedLspAdapter {
+ pub async fn new<T: LspAdapter>(adapter: T) -> Arc<Self> {
+ let adapter = Box::new(adapter);
+ let name = adapter.name().await;
+ let server_args = adapter.server_args().await;
+ let initialization_options = adapter.initialization_options().await;
+ let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
+ let disk_based_diagnostics_progress_token =
+ adapter.disk_based_diagnostics_progress_token().await;
+ let id_for_language = adapter.id_for_language(name.0.as_ref()).await;
+
+ Arc::new(CachedLspAdapter {
+ name,
+ server_args,
+ initialization_options,
+ disk_based_diagnostic_sources,
+ disk_based_diagnostics_progress_token,
+ id_for_language,
+ adapter,
+ })
+ }
+
+ pub async fn fetch_latest_server_version(
+ &self,
+ http: Arc<dyn HttpClient>,
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ self.adapter.fetch_latest_server_version(http).await
+ }
+
+ pub async fn fetch_server_binary(
+ &self,
+ version: Box<dyn 'static + Send + Any>,
+ http: Arc<dyn HttpClient>,
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
+ self.adapter
+ .fetch_server_binary(version, http, container_dir)
+ .await
+ }
+
+ pub async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ self.adapter.cached_server_binary(container_dir).await
+ }
+
+ pub async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
+ self.adapter.process_diagnostics(params).await
+ }
+
+ pub async fn label_for_completion(
+ &self,
+ completion_item: &lsp::CompletionItem,
+ language: &Language,
+ ) -> Option<CodeLabel> {
+ self.adapter
+ .label_for_completion(completion_item, language)
+ .await
+ }
+
+ pub async fn label_for_symbol(
+ &self,
+ name: &str,
+ kind: lsp::SymbolKind,
+ language: &Language,
+ ) -> Option<CodeLabel> {
+ self.adapter.label_for_symbol(name, kind, language).await
+ }
+}
+
+#[async_trait]
pub trait LspAdapter: 'static + Send + Sync {
- fn name(&self) -> LanguageServerName;
- fn fetch_latest_server_version(
+ async fn name(&self) -> LanguageServerName;
+
+ async fn fetch_latest_server_version(
&self,
http: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>>;
- fn fetch_server_binary(
+ ) -> Result<Box<dyn 'static + Send + Any>>;
+
+ async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>>;
- fn cached_server_binary(&self, container_dir: Arc<Path>)
- -> BoxFuture<'static, Option<PathBuf>>;
+ container_dir: PathBuf,
+ ) -> Result<PathBuf>;
+
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf>;
- fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
+ async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
- fn label_for_completion(&self, _: &lsp::CompletionItem, _: &Language) -> Option<CodeLabel> {
+ async fn label_for_completion(
+ &self,
+ _: &lsp::CompletionItem,
+ _: &Language,
+ ) -> Option<CodeLabel> {
None
}
- fn label_for_symbol(&self, _: &str, _: lsp::SymbolKind, _: &Language) -> Option<CodeLabel> {
+ async fn label_for_symbol(
+ &self,
+ _: &str,
+ _: lsp::SymbolKind,
+ _: &Language,
+ ) -> Option<CodeLabel> {
None
}
- fn server_args(&self) -> &[&str] {
- &[]
+ async fn server_args(&self) -> Vec<String> {
+ Vec::new()
}
- fn initialization_options(&self) -> Option<Value> {
+ async fn initialization_options(&self) -> Option<Value> {
None
}
- fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
+ async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
Default::default()
}
- fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
+ async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
None
}
- fn id_for_language(&self, _name: &str) -> Option<String> {
+ async fn id_for_language(&self, _name: &str) -> Option<String> {
None
}
}
@@ -165,8 +260,8 @@ pub struct FakeLspAdapter {
pub name: &'static str,
pub capabilities: lsp::ServerCapabilities,
pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
- pub disk_based_diagnostics_progress_token: Option<&'static str>,
- pub disk_based_diagnostics_sources: &'static [&'static str],
+ pub disk_based_diagnostics_progress_token: Option<String>,
+ pub disk_based_diagnostics_sources: Vec<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -180,7 +275,7 @@ pub struct BracketPair {
pub struct Language {
pub(crate) config: LanguageConfig,
pub(crate) grammar: Option<Arc<Grammar>>,
- pub(crate) adapter: Option<Arc<dyn LspAdapter>>,
+ pub(crate) adapter: Option<Arc<CachedLspAdapter>>,
#[cfg(any(test, feature = "test-support"))]
fake_adapter: Option<(
@@ -219,6 +314,8 @@ pub struct LanguageRegistry {
Shared<BoxFuture<'static, Result<PathBuf, Arc<anyhow::Error>>>>,
>,
>,
+ subscription: RwLock<(watch::Sender<()>, watch::Receiver<()>)>,
+ theme: RwLock<Option<Arc<Theme>>>,
}
impl LanguageRegistry {
@@ -231,6 +328,8 @@ impl LanguageRegistry {
lsp_binary_statuses_rx,
login_shell_env_loaded: login_shell_env_loaded.shared(),
lsp_binary_paths: Default::default(),
+ subscription: RwLock::new(watch::channel()),
+ theme: Default::default(),
}
}
@@ -240,12 +339,21 @@ impl LanguageRegistry {
}
pub fn add(&self, language: Arc<Language>) {
+ if let Some(theme) = self.theme.read().clone() {
+ language.set_theme(&theme.editor.syntax);
+ }
self.languages.write().push(language.clone());
+ *self.subscription.write().0.borrow_mut() = ();
}
- pub fn set_theme(&self, theme: &SyntaxTheme) {
+ pub fn subscribe(&self) -> watch::Receiver<()> {
+ self.subscription.read().1.clone()
+ }
+
+ pub fn set_theme(&self, theme: Arc<Theme>) {
+ *self.theme.write() = Some(theme.clone());
for language in self.languages.read().iter() {
- language.set_theme(theme);
+ language.set_theme(&theme.editor.syntax);
}
}
@@ -345,7 +453,7 @@ impl LanguageRegistry {
let server_binary_path = this
.lsp_binary_paths
.lock()
- .entry(adapter.name())
+ .entry(adapter.name.clone())
.or_insert_with(|| {
get_server_binary_path(
adapter.clone(),
@@ -362,11 +470,11 @@ impl LanguageRegistry {
.map_err(|e| anyhow!(e));
let server_binary_path = server_binary_path.await?;
- let server_args = adapter.server_args();
+ let server_args = &adapter.server_args;
let server = lsp::LanguageServer::new(
server_id,
&server_binary_path,
- server_args,
+ &server_args,
&root_path,
cx,
)?;
@@ -382,13 +490,13 @@ impl LanguageRegistry {
}
async fn get_server_binary_path(
- adapter: Arc<dyn LspAdapter>,
+ adapter: Arc<CachedLspAdapter>,
language: Arc<Language>,
http_client: Arc<dyn HttpClient>,
download_dir: Arc<Path>,
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
) -> Result<PathBuf> {
- let container_dir: Arc<Path> = download_dir.join(adapter.name().0.as_ref()).into();
+ let container_dir = download_dir.join(adapter.name.0.as_ref());
if !container_dir.exists() {
smol::fs::create_dir_all(&container_dir)
.await
@@ -424,7 +532,7 @@ async fn get_server_binary_path(
}
async fn fetch_latest_server_binary_path(
- adapter: Arc<dyn LspAdapter>,
+ adapter: Arc<CachedLspAdapter>,
language: Arc<Language>,
http_client: Arc<dyn HttpClient>,
container_dir: &Path,
@@ -444,7 +552,7 @@ async fn fetch_latest_server_binary_path(
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
.await?;
let path = adapter
- .fetch_server_binary(version_info, http_client, container_dir.clone())
+ .fetch_server_binary(version_info, http_client, container_dir.to_path_buf())
.await?;
lsp_binary_statuses_tx
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded))
@@ -473,7 +581,7 @@ impl Language {
}
}
- pub fn lsp_adapter(&self) -> Option<Arc<dyn LspAdapter>> {
+ pub fn lsp_adapter(&self) -> Option<Arc<CachedLspAdapter>> {
self.adapter.clone()
}
@@ -505,19 +613,19 @@ impl Language {
Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
}
- pub fn with_lsp_adapter(mut self, lsp_adapter: Arc<dyn LspAdapter>) -> Self {
+ pub fn with_lsp_adapter(mut self, lsp_adapter: Arc<CachedLspAdapter>) -> Self {
self.adapter = Some(lsp_adapter);
self
}
#[cfg(any(test, feature = "test-support"))]
- pub fn set_fake_lsp_adapter(
+ pub async fn set_fake_lsp_adapter(
&mut self,
- fake_lsp_adapter: FakeLspAdapter,
+ fake_lsp_adapter: Arc<FakeLspAdapter>,
) -> mpsc::UnboundedReceiver<lsp::FakeLanguageServer> {
let (servers_tx, servers_rx) = mpsc::unbounded();
- let adapter = Arc::new(fake_lsp_adapter);
- self.fake_adapter = Some((servers_tx, adapter.clone()));
+ self.fake_adapter = Some((servers_tx, fake_lsp_adapter.clone()));
+ let adapter = CachedLspAdapter::new(fake_lsp_adapter).await;
self.adapter = Some(adapter);
servers_rx
}
@@ -530,32 +638,42 @@ impl Language {
self.config.line_comment.as_deref()
}
- pub fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
- self.adapter.as_ref().map_or(&[] as &[_], |adapter| {
- adapter.disk_based_diagnostic_sources()
- })
+ pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
+ match self.adapter.as_ref() {
+ Some(adapter) => &adapter.disk_based_diagnostic_sources,
+ None => &[],
+ }
}
- pub fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
- self.adapter
- .as_ref()
- .and_then(|adapter| adapter.disk_based_diagnostics_progress_token())
+ pub async fn disk_based_diagnostics_progress_token(&self) -> Option<&str> {
+ if let Some(adapter) = self.adapter.as_ref() {
+ adapter.disk_based_diagnostics_progress_token.as_deref()
+ } else {
+ None
+ }
}
- pub fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
+ pub async fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
if let Some(processor) = self.adapter.as_ref() {
- processor.process_diagnostics(diagnostics);
+ processor.process_diagnostics(diagnostics).await;
}
}
- pub fn label_for_completion(&self, completion: &lsp::CompletionItem) -> Option<CodeLabel> {
+ pub async fn label_for_completion(
+ &self,
+ completion: &lsp::CompletionItem,
+ ) -> Option<CodeLabel> {
self.adapter
.as_ref()?
.label_for_completion(completion, self)
+ .await
}
- pub fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option<CodeLabel> {
- self.adapter.as_ref()?.label_for_symbol(name, kind, self)
+ pub async fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option<CodeLabel> {
+ self.adapter
+ .as_ref()?
+ .label_for_symbol(name, kind, self)
+ .await
}
pub fn highlight_text<'a>(
@@ -664,45 +782,46 @@ impl Default for FakeLspAdapter {
capabilities: lsp::LanguageServer::full_capabilities(),
initializer: None,
disk_based_diagnostics_progress_token: None,
- disk_based_diagnostics_sources: &[],
+ disk_based_diagnostics_sources: Vec::new(),
}
}
}
#[cfg(any(test, feature = "test-support"))]
-impl LspAdapter for FakeLspAdapter {
- fn name(&self) -> LanguageServerName {
+#[async_trait]
+impl LspAdapter for Arc<FakeLspAdapter> {
+ async fn name(&self) -> LanguageServerName {
LanguageServerName(self.name.into())
}
- fn fetch_latest_server_version(
+ async fn fetch_latest_server_version(
&self,
_: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
+ ) -> Result<Box<dyn 'static + Send + Any>> {
unreachable!();
}
- fn fetch_server_binary(
+ async fn fetch_server_binary(
&self,
_: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
- _: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
+ _: PathBuf,
+ ) -> Result<PathBuf> {
unreachable!();
}
- fn cached_server_binary(&self, _: Arc<Path>) -> BoxFuture<'static, Option<PathBuf>> {
+ async fn cached_server_binary(&self, _: PathBuf) -> Option<PathBuf> {
unreachable!();
}
- fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
+ async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
- fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
- self.disk_based_diagnostics_sources
+ async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
+ self.disk_based_diagnostics_sources.clone()
}
- fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
- self.disk_based_diagnostics_progress_token
+ async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
+ self.disk_based_diagnostics_progress_token.clone()
}
}
@@ -397,9 +397,9 @@ pub fn serialize_completion(completion: &Completion) -> proto::Completion {
}
}
-pub fn deserialize_completion(
+pub async fn deserialize_completion(
completion: proto::Completion,
- language: Option<&Arc<Language>>,
+ language: Option<Arc<Language>>,
) -> Result<Completion> {
let old_start = completion
.old_start
@@ -410,15 +410,18 @@ pub fn deserialize_completion(
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid old end"))?;
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
+ let label = match language {
+ Some(l) => l.label_for_completion(&lsp_completion).await,
+ None => None,
+ };
+
Ok(Completion {
old_range: old_start..old_end,
new_text: completion.new_text,
- label: language
- .and_then(|l| l.label_for_completion(&lsp_completion))
- .unwrap_or(CodeLabel::plain(
- lsp_completion.label.clone(),
- lsp_completion.filter_text.as_deref(),
- )),
+ label: label.unwrap_or(CodeLabel::plain(
+ lsp_completion.label.clone(),
+ lsp_completion.filter_text.as_deref(),
+ )),
lsp_completion,
})
}
@@ -101,10 +101,10 @@ struct Error {
}
impl LanguageServer {
- pub fn new(
+ pub fn new<T: AsRef<std::ffi::OsStr>>(
server_id: usize,
binary_path: &Path,
- args: &[&str],
+ args: &[T],
root_path: &Path,
cx: AsyncAppContext,
) -> Result<Self> {
@@ -258,6 +258,9 @@ impl LanguageServer {
}
}
+ /// Initializes a language server.
+ /// Note that `options` is used directly to construct [`InitializeParams`],
+ /// which is why it is owned.
pub async fn initialize(mut self, options: Option<Value>) -> Result<Arc<Self>> {
let root_uri = Url::from_file_path(&self.root_path).unwrap();
#[allow(deprecated)]
@@ -0,0 +1,9 @@
+[package]
+name = "plugin"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+serde = "1.0"
+bincode = "1.3"
+plugin_macros = { path = "../plugin_macros" }
@@ -0,0 +1,61 @@
+pub use bincode;
+pub use serde;
+
+/// This is the buffer that is used Wasm side.
+/// Note that it mirrors the functionality of
+/// the `WasiBuffer` found in `plugin_runtime/src/plugin.rs`,
+/// But has a few different methods.
+pub struct __Buffer {
+ pub ptr: u32, // *const u8,
+ pub len: u32, // usize,
+}
+
+impl __Buffer {
+ pub fn into_u64(self) -> u64 {
+ ((self.ptr as u64) << 32) | (self.len as u64)
+ }
+
+ pub fn from_u64(packed: u64) -> Self {
+ __Buffer {
+ ptr: (packed >> 32) as u32,
+ len: packed as u32,
+ }
+ }
+}
+
+/// Allocates a buffer with an exact size.
+/// We don't return the size because it has to be passed in anyway.
+#[no_mangle]
+pub extern "C" fn __alloc_buffer(len: u32) -> u32 {
+ let vec = vec![0; len as usize];
+ let buffer = unsafe { __Buffer::from_vec(vec) };
+ return buffer.ptr;
+}
+
+/// Frees a given buffer, requires the size.
+#[no_mangle]
+pub extern "C" fn __free_buffer(buffer: u64) {
+ let vec = unsafe { __Buffer::from_u64(buffer).to_vec() };
+ std::mem::drop(vec);
+}
+
+impl __Buffer {
+ #[inline(always)]
+ pub unsafe fn to_vec(&self) -> Vec<u8> {
+ core::slice::from_raw_parts(self.ptr as *const u8, self.len as usize).to_vec()
+ }
+
+ #[inline(always)]
+ pub unsafe fn from_vec(mut vec: Vec<u8>) -> __Buffer {
+ vec.shrink_to(0);
+ let ptr = vec.as_ptr() as u32;
+ let len = vec.len() as u32;
+ std::mem::forget(vec);
+ __Buffer { ptr, len }
+ }
+}
+
+pub mod prelude {
+ pub use super::{__Buffer, __alloc_buffer};
+ pub use plugin_macros::{export, import};
+}
@@ -0,0 +1,14 @@
+[package]
+name = "plugin_macros"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+proc-macro = true
+
+[dependencies]
+syn = { version = "1.0", features = ["full", "extra-traits"] }
+quote = "1.0"
+proc-macro2 = "1.0"
+serde = "1.0"
+bincode = "1.3"
@@ -0,0 +1,168 @@
+use core::panic;
+
+use proc_macro::TokenStream;
+use quote::{format_ident, quote};
+use syn::{parse_macro_input, Block, FnArg, ForeignItemFn, Ident, ItemFn, Pat, Type, Visibility};
+
+/// Attribute macro to be used guest-side within a plugin.
+/// ```ignore
+/// #[export]
+/// pub fn say_hello() -> String {
+/// "Hello from Wasm".into()
+/// }
+/// ```
+/// This macro makes a function defined guest-side avaliable host-side.
+/// Note that all arguments and return types must be `serde`.
+#[proc_macro_attribute]
+pub fn export(args: TokenStream, function: TokenStream) -> TokenStream {
+ if !args.is_empty() {
+ panic!("The export attribute does not take any arguments");
+ }
+
+ let inner_fn = parse_macro_input!(function as ItemFn);
+
+ if !inner_fn.sig.generics.params.is_empty() {
+ panic!("Exported functions can not take generic parameters");
+ }
+
+ if let Visibility::Public(_) = inner_fn.vis {
+ } else {
+ panic!("The export attribute only works for public functions");
+ }
+
+ let inner_fn_name = format_ident!("{}", inner_fn.sig.ident);
+ let outer_fn_name = format_ident!("__{}", inner_fn_name);
+
+ let variadic = inner_fn.sig.inputs.len();
+ let i = (0..variadic).map(syn::Index::from);
+ let t: Vec<Type> = inner_fn
+ .sig
+ .inputs
+ .iter()
+ .map(|x| match x {
+ FnArg::Receiver(_) => {
+ panic!("All arguments must have specified types, no `self` allowed")
+ }
+ FnArg::Typed(item) => *item.ty.clone(),
+ })
+ .collect();
+
+ // this is cursed...
+ let (args, ty) = if variadic != 1 {
+ (
+ quote! {
+ #( data.#i ),*
+ },
+ quote! {
+ ( #( #t ),* )
+ },
+ )
+ } else {
+ let ty = &t[0];
+ (quote! { data }, quote! { #ty })
+ };
+
+ TokenStream::from(quote! {
+ #[no_mangle]
+ #inner_fn
+
+ #[no_mangle]
+ pub extern "C" fn #outer_fn_name(packed_buffer: u64) -> u64 {
+ // setup
+ let data = unsafe { ::plugin::__Buffer::from_u64(packed_buffer).to_vec() };
+
+ // operation
+ let data: #ty = match ::plugin::bincode::deserialize(&data) {
+ Ok(d) => d,
+ Err(e) => panic!("Data passed to function not deserializable."),
+ };
+ let result = #inner_fn_name(#args);
+ let new_data: Result<Vec<u8>, _> = ::plugin::bincode::serialize(&result);
+ let new_data = new_data.unwrap();
+
+ // teardown
+ let new_buffer = unsafe { ::plugin::__Buffer::from_vec(new_data) }.into_u64();
+ return new_buffer;
+ }
+ })
+}
+
+/// Attribute macro to be used guest-side within a plugin.
+/// ```ignore
+/// #[import]
+/// pub fn operating_system_name() -> String;
+/// ```
+/// This macro makes a function defined host-side avaliable guest-side.
+/// Note that all arguments and return types must be `serde`.
+/// All that's provided is a signature, as the function is implemented host-side.
+#[proc_macro_attribute]
+pub fn import(args: TokenStream, function: TokenStream) -> TokenStream {
+ if !args.is_empty() {
+ panic!("The import attribute does not take any arguments");
+ }
+
+ let fn_declare = parse_macro_input!(function as ForeignItemFn);
+
+ if !fn_declare.sig.generics.params.is_empty() {
+ panic!("Exported functions can not take generic parameters");
+ }
+
+ // let inner_fn_name = format_ident!("{}", fn_declare.sig.ident);
+ let extern_fn_name = format_ident!("__{}", fn_declare.sig.ident);
+
+ let (args, tys): (Vec<Ident>, Vec<Type>) = fn_declare
+ .sig
+ .inputs
+ .clone()
+ .into_iter()
+ .map(|x| match x {
+ FnArg::Receiver(_) => {
+ panic!("All arguments must have specified types, no `self` allowed")
+ }
+ FnArg::Typed(t) => {
+ if let Pat::Ident(i) = *t.pat {
+ (i.ident, *t.ty)
+ } else {
+ panic!("All function arguments must be identifiers");
+ }
+ }
+ })
+ .unzip();
+
+ let body = TokenStream::from(quote! {
+ {
+ // setup
+ let data: (#( #tys ),*) = (#( #args ),*);
+ let data = ::plugin::bincode::serialize(&data).unwrap();
+ let buffer = unsafe { ::plugin::__Buffer::from_vec(data) };
+
+ // operation
+ let new_buffer = unsafe { #extern_fn_name(buffer.into_u64()) };
+ let new_data = unsafe { ::plugin::__Buffer::from_u64(new_buffer).to_vec() };
+
+ // teardown
+ match ::plugin::bincode::deserialize(&new_data) {
+ Ok(d) => d,
+ Err(e) => panic!("Data returned from function not deserializable."),
+ }
+ }
+ });
+
+ let block = parse_macro_input!(body as Block);
+
+ let inner_fn = ItemFn {
+ attrs: fn_declare.attrs,
+ vis: fn_declare.vis,
+ sig: fn_declare.sig,
+ block: Box::new(block),
+ };
+
+ TokenStream::from(quote! {
+ extern "C" {
+ fn #extern_fn_name(buffer: u64) -> u64;
+ }
+
+ #[no_mangle]
+ #inner_fn
+ })
+}
@@ -0,0 +1,18 @@
+[package]
+name = "plugin_runtime"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+wasmtime = "0.38"
+wasmtime-wasi = "0.38"
+wasi-common = "0.38"
+anyhow = { version = "1.0", features = ["std"] }
+serde = "1.0"
+serde_json = "1.0"
+bincode = "1.3"
+pollster = "0.2.5"
+smol = "1.2.5"
+
+[build-dependencies]
+wasmtime = "0.38"
@@ -0,0 +1,58 @@
+# Zed's Plugin Runner
+Wasm plugins can be run through `wasmtime`, with supported for sandboxed system integration through WASI. There are three `plugin` crates that implement different things:
+
+1. `plugin_runtime` loads and runs compiled `Wasm` plugins, and handles setting up system bindings.
+
+2. `plugin` is the crate that Rust Wasm plugins should depend on. It re-exports some required crates (e.g. `serde`, `bincode`) and provides some necessary macros for generating bindings that `plugin_runtime` can hook into.
+
+3. `plugin_macros` implements the proc macros required by `plugin`, like the `#[bind]` attribute macro.
+
+## ABI
+The interface between the host Rust runtime ('Runtime') and plugins implemented in Wasm ('Plugin') is pretty simple.
+
+`Buffer` is a pair of two 4-byte (`u32`) fields, encoded as a single `u64`.
+
+```
+struct Buffer {
+ ptr: u32,
+ len: u32,
+}
+```
+
+All functions that Plugin exports must have the following properties:
+
+- Have the signature `fn(ptr: u64) -> u64`, where both the argument and return types are a `Buffer`:
+
+ - The input `Buffer` will contain the input arguments serialized to `bincode`.
+ - The output `Buffer` will contain the output arguments serialized to `bincode`.
+
+- Have a name starting with two underscores.
+
+Additionally, Plugin must export an:
+
+- `__alloc_buffer` function that, given a `u32` length, returns a `u32` pointer to a buffer of that length.
+- `__free_buffer` function that, given a buffer encoded as a `u64`, frees the buffer at the given location, and does not return anything.
+
+Note that all of these requirements are automatically fullfilled for any Rust Wasm plugin that uses the `plugin` crate, and imports the `prelude`.
+
+Here's an example Rust Wasm plugin that doubles the value of every float in a `Vec<f64>` passed into it:
+
+```rust
+use plugin::prelude::*;
+
+#[export]
+pub fn double(mut x: Vec<f64>) -> Vec<f64> {
+ x.into_iter().map(|x| x * 2.0).collect()
+}
+```
+
+All the serialization code is automatically generated by `#[export]`.
+
+You can specify functions that must be defined host-side by using the `#[import]` attribute. This attribute must be attached to a function signature:
+
+```rust
+#[import]
+fn run(command: String) -> Vec<u8>;
+```
+
+The `#[import]` macro will generate a function body that performs the proper serialization/deserialization needed to call out to the host rust runtime. Note that the same ABI is used for both `#[import]` and `#[export]`.
@@ -0,0 +1,79 @@
+use std::{io::Write, path::Path};
+use wasmtime::{Config, Engine};
+
+fn main() {
+ let base = Path::new("../../plugins");
+
+ println!("cargo:rerun-if-changed={}", base.display());
+
+ let _ = std::fs::remove_dir_all(base.join("bin"));
+ let _ =
+ std::fs::create_dir_all(base.join("bin")).expect("Could not make plugins bin directory");
+
+ let (profile_flags, profile_target) = match std::env::var("PROFILE").unwrap().as_str() {
+ "debug" => (&[][..], "debug"),
+ "release" => (&["--release"][..], "release"),
+ unknown => panic!("unknown profile `{}`", unknown),
+ };
+
+ let build_successful = std::process::Command::new("cargo")
+ .args([
+ "build",
+ "--target",
+ "wasm32-wasi",
+ "--manifest-path",
+ base.join("Cargo.toml").to_str().unwrap(),
+ ])
+ .args(profile_flags)
+ .status()
+ .expect("Could not build plugins")
+ .success();
+ assert!(build_successful);
+
+ let binaries = std::fs::read_dir(base.join("target/wasm32-wasi").join(profile_target))
+ .expect("Could not find compiled plugins in target");
+
+ let engine = create_default_engine();
+
+ for file in binaries {
+ let is_wasm = || {
+ let path = file.ok()?.path();
+ if path.extension()? == "wasm" {
+ Some(path)
+ } else {
+ None
+ }
+ };
+
+ if let Some(path) = is_wasm() {
+ let out_path = base.join("bin").join(path.file_name().unwrap());
+ std::fs::copy(&path, &out_path).expect("Could not copy compiled plugin to bin");
+ precompile(&out_path, &engine);
+ }
+ }
+}
+
+/// Creates a default engine for compiling Wasm.
+/// N.B.: this must create the same `Engine` as
+/// the `create_default_engine` function
+/// in `plugin_runtime/src/plugin.rs`.
+fn create_default_engine() -> Engine {
+ let mut config = Config::default();
+ config.async_support(true);
+ // config.epoch_interruption(true);
+ Engine::new(&config).expect("Could not create engine")
+}
+
+fn precompile(path: &Path, engine: &Engine) {
+ let bytes = std::fs::read(path).expect("Could not read wasm module");
+ let compiled = engine
+ .precompile_module(&bytes)
+ .expect("Could not precompile module");
+ let out_path = path.parent().unwrap().join(&format!(
+ "{}.pre",
+ path.file_name().unwrap().to_string_lossy()
+ ));
+ let mut out_file = std::fs::File::create(out_path)
+ .expect("Could not create output file for precompiled module");
+ out_file.write_all(&compiled).unwrap();
+}
@@ -0,0 +1,93 @@
+pub mod plugin;
+pub use plugin::*;
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use pollster::FutureExt as _;
+
+ #[test]
+ pub fn test_plugin() {
+ pub struct TestPlugin {
+ noop: WasiFn<(), ()>,
+ constant: WasiFn<(), u32>,
+ identity: WasiFn<u32, u32>,
+ add: WasiFn<(u32, u32), u32>,
+ swap: WasiFn<(u32, u32), (u32, u32)>,
+ sort: WasiFn<Vec<u32>, Vec<u32>>,
+ print: WasiFn<String, ()>,
+ and_back: WasiFn<u32, u32>,
+ imports: WasiFn<u32, u32>,
+ half_async: WasiFn<u32, u32>,
+ echo_async: WasiFn<String, String>,
+ }
+
+ async {
+ let mut runtime = PluginBuilder::new_with_default_ctx()
+ .unwrap()
+ .host_function("mystery_number", |input: u32| input + 7)
+ .unwrap()
+ .host_function("import_noop", |_: ()| ())
+ .unwrap()
+ .host_function("import_identity", |input: u32| input)
+ .unwrap()
+ .host_function("import_swap", |(a, b): (u32, u32)| (b, a))
+ .unwrap()
+ .host_function_async("import_half", |a: u32| async move { a / 2 })
+ .unwrap()
+ .host_function_async("command_async", |command: String| async move {
+ let mut args = command.split(' ');
+ let command = args.next().unwrap();
+ smol::process::Command::new(command)
+ .args(args)
+ .output()
+ .await
+ .ok()
+ .map(|output| output.stdout)
+ })
+ .unwrap()
+ .init(
+ false,
+ include_bytes!("../../../plugins/bin/test_plugin.wasm"),
+ )
+ .await
+ .unwrap();
+
+ let plugin = TestPlugin {
+ noop: runtime.function("noop").unwrap(),
+ constant: runtime.function("constant").unwrap(),
+ identity: runtime.function("identity").unwrap(),
+ add: runtime.function("add").unwrap(),
+ swap: runtime.function("swap").unwrap(),
+ sort: runtime.function("sort").unwrap(),
+ print: runtime.function("print").unwrap(),
+ and_back: runtime.function("and_back").unwrap(),
+ imports: runtime.function("imports").unwrap(),
+ half_async: runtime.function("half_async").unwrap(),
+ echo_async: runtime.function("echo_async").unwrap(),
+ };
+
+ let unsorted = vec![1, 3, 4, 2, 5];
+ let sorted = vec![1, 2, 3, 4, 5];
+
+ assert_eq!(runtime.call(&plugin.noop, ()).await.unwrap(), ());
+ assert_eq!(runtime.call(&plugin.constant, ()).await.unwrap(), 27);
+ assert_eq!(runtime.call(&plugin.identity, 58).await.unwrap(), 58);
+ assert_eq!(runtime.call(&plugin.add, (3, 4)).await.unwrap(), 7);
+ assert_eq!(runtime.call(&plugin.swap, (1, 2)).await.unwrap(), (2, 1));
+ assert_eq!(runtime.call(&plugin.sort, unsorted).await.unwrap(), sorted);
+ assert_eq!(runtime.call(&plugin.print, "Hi!".into()).await.unwrap(), ());
+ assert_eq!(runtime.call(&plugin.and_back, 1).await.unwrap(), 8);
+ assert_eq!(runtime.call(&plugin.imports, 1).await.unwrap(), 8);
+ assert_eq!(runtime.call(&plugin.half_async, 4).await.unwrap(), 2);
+ assert_eq!(
+ runtime
+ .call(&plugin.echo_async, "eko".into())
+ .await
+ .unwrap(),
+ "eko\n"
+ );
+ }
+ .block_on()
+ }
+}
@@ -0,0 +1,564 @@
+use std::future::Future;
+
+use std::{fs::File, marker::PhantomData, path::Path};
+
+use anyhow::{anyhow, Error};
+use serde::{de::DeserializeOwned, Serialize};
+
+use wasi_common::{dir, file};
+use wasmtime::Memory;
+use wasmtime::{
+ AsContext, AsContextMut, Caller, Config, Engine, Extern, Instance, Linker, Module, Store, Trap,
+ TypedFunc,
+};
+use wasmtime_wasi::{Dir, WasiCtx, WasiCtxBuilder};
+
+/// Represents a resource currently managed by the plugin, like a file descriptor.
+pub struct PluginResource(u32);
+
+/// This is the buffer that is used Host side.
+/// Note that it mirrors the functionality of
+/// the `__Buffer` found in the `plugin/src/lib.rs` prelude.
+struct WasiBuffer {
+ ptr: u32,
+ len: u32,
+}
+
+impl WasiBuffer {
+ pub fn into_u64(self) -> u64 {
+ ((self.ptr as u64) << 32) | (self.len as u64)
+ }
+
+ pub fn from_u64(packed: u64) -> Self {
+ WasiBuffer {
+ ptr: (packed >> 32) as u32,
+ len: packed as u32,
+ }
+ }
+}
+
+/// Represents a typed WebAssembly function.
+pub struct WasiFn<A: Serialize, R: DeserializeOwned> {
+ function: TypedFunc<u64, u64>,
+ _function_type: PhantomData<fn(A) -> R>,
+}
+
+impl<A: Serialize, R: DeserializeOwned> Copy for WasiFn<A, R> {}
+
+impl<A: Serialize, R: DeserializeOwned> Clone for WasiFn<A, R> {
+ fn clone(&self) -> Self {
+ Self {
+ function: self.function,
+ _function_type: PhantomData,
+ }
+ }
+}
+
+/// This struct is used to build a new [`Plugin`], using the builder pattern.
+/// Create a new default plugin with `PluginBuilder::new_with_default_ctx`,
+/// and add host-side exported functions using `host_function` and `host_function_async`.
+/// Finalize the plugin by calling [`init`].
+pub struct PluginBuilder {
+ wasi_ctx: WasiCtx,
+ engine: Engine,
+ linker: Linker<WasiCtxAlloc>,
+}
+
+/// Creates a default engine for compiling Wasm.
+/// N.B.: this must create the same `Engine` as
+/// the `create_default_engine` function
+/// in `plugin_runtime/build.rs`.
+pub fn create_default_engine() -> Result<Engine, Error> {
+ let mut config = Config::default();
+ config.async_support(true);
+ // config.epoch_interruption(true);
+ Engine::new(&config)
+}
+
+impl PluginBuilder {
+ /// Create a new [`PluginBuilder`] with the given WASI context.
+ /// Using the default context is a safe bet, see [`new_with_default_context`].
+ pub fn new(wasi_ctx: WasiCtx) -> Result<Self, Error> {
+ let engine = create_default_engine()?;
+ let linker = Linker::new(&engine);
+
+ Ok(PluginBuilder {
+ // host_functions: HashMap::new(),
+ wasi_ctx,
+ engine,
+ linker,
+ })
+ }
+
+ /// Create a new `PluginBuilder` that inherits the
+ /// host processes' access to `stdout` and `stderr`.
+ pub fn new_with_default_ctx() -> Result<Self, Error> {
+ let wasi_ctx = WasiCtxBuilder::new()
+ .inherit_stdout()
+ .inherit_stderr()
+ .build();
+ Self::new(wasi_ctx)
+ }
+
+ /// Add an `async` host function. See [`host_function`] for details.
+ pub fn host_function_async<F, A, R, Fut>(
+ mut self,
+ name: &str,
+ function: F,
+ ) -> Result<Self, Error>
+ where
+ F: Fn(A) -> Fut + Send + Sync + 'static,
+ Fut: Future<Output = R> + Send + 'static,
+ A: DeserializeOwned + Send + 'static,
+ R: Serialize + Send + Sync + 'static,
+ {
+ self.linker.func_wrap1_async(
+ "env",
+ &format!("__{}", name),
+ move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
+ // TODO: use try block once avaliable
+ let result: Result<(WasiBuffer, Memory, _), Trap> = (|| {
+ // grab a handle to the memory
+ let mut plugin_memory = match caller.get_export("memory") {
+ Some(Extern::Memory(mem)) => mem,
+ _ => return Err(Trap::new("Could not grab slice of plugin memory"))?,
+ };
+
+ let buffer = WasiBuffer::from_u64(packed_buffer);
+
+ // get the args passed from Guest
+ let args =
+ Plugin::buffer_to_bytes(&mut plugin_memory, caller.as_context(), &buffer)?;
+
+ let args: A = Plugin::deserialize_to_type(&args)?;
+
+ // Call the Host-side function
+ let result = function(args);
+
+ Ok((buffer, plugin_memory, result))
+ })();
+
+ Box::new(async move {
+ let (buffer, mut plugin_memory, future) = result?;
+
+ let result: R = future.await;
+ let result: Result<Vec<u8>, Error> = Plugin::serialize_to_bytes(result)
+ .map_err(|_| {
+ Trap::new("Could not serialize value returned from function").into()
+ });
+ let result = result?;
+
+ Plugin::buffer_to_free(caller.data().free_buffer(), &mut caller, buffer)
+ .await?;
+
+ let buffer = Plugin::bytes_to_buffer(
+ caller.data().alloc_buffer(),
+ &mut plugin_memory,
+ &mut caller,
+ result,
+ )
+ .await?;
+
+ Ok(buffer.into_u64())
+ })
+ },
+ )?;
+ Ok(self)
+ }
+
+ /// Add a new host function to the given `PluginBuilder`.
+ /// A host function is a function defined host-side, in Rust,
+ /// that is accessible guest-side, in WebAssembly.
+ /// You can specify host-side functions to import using
+ /// the `#[input]` macro attribute:
+ /// ```ignore
+ /// #[input]
+ /// fn total(counts: Vec<f64>) -> f64;
+ /// ```
+ /// When loading a plugin, you need to provide all host functions the plugin imports:
+ /// ```ignore
+ /// let plugin = PluginBuilder::new_with_default_context()
+ /// .host_function("total", |counts| counts.iter().fold(0.0, |tot, n| tot + n))
+ /// // and so on...
+ /// ```
+ /// And that's a wrap!
+ pub fn host_function<A, R>(
+ mut self,
+ name: &str,
+ function: impl Fn(A) -> R + Send + Sync + 'static,
+ ) -> Result<Self, Error>
+ where
+ A: DeserializeOwned + Send,
+ R: Serialize + Send + Sync,
+ {
+ self.linker.func_wrap1_async(
+ "env",
+ &format!("__{}", name),
+ move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
+ // TODO: use try block once avaliable
+ let result: Result<(WasiBuffer, Memory, Vec<u8>), Trap> = (|| {
+ // grab a handle to the memory
+ let mut plugin_memory = match caller.get_export("memory") {
+ Some(Extern::Memory(mem)) => mem,
+ _ => return Err(Trap::new("Could not grab slice of plugin memory"))?,
+ };
+
+ let buffer = WasiBuffer::from_u64(packed_buffer);
+
+ // get the args passed from Guest
+ let args = Plugin::buffer_to_type(&mut plugin_memory, &mut caller, &buffer)?;
+
+ // Call the Host-side function
+ let result: R = function(args);
+
+ // Serialize the result back to guest
+ let result = Plugin::serialize_to_bytes(result).map_err(|_| {
+ Trap::new("Could not serialize value returned from function")
+ })?;
+
+ Ok((buffer, plugin_memory, result))
+ })();
+
+ Box::new(async move {
+ let (buffer, mut plugin_memory, result) = result?;
+
+ Plugin::buffer_to_free(caller.data().free_buffer(), &mut caller, buffer)
+ .await?;
+
+ let buffer = Plugin::bytes_to_buffer(
+ caller.data().alloc_buffer(),
+ &mut plugin_memory,
+ &mut caller,
+ result,
+ )
+ .await?;
+
+ Ok(buffer.into_u64())
+ })
+ },
+ )?;
+ Ok(self)
+ }
+
+ /// Initializes a [`Plugin`] from a given compiled Wasm module.
+ /// Both binary (`.wasm`) and text (`.wat`) module formats are supported.
+ pub async fn init<T: AsRef<[u8]>>(self, precompiled: bool, module: T) -> Result<Plugin, Error> {
+ Plugin::init(precompiled, module.as_ref().to_vec(), self).await
+ }
+}
+
+#[derive(Copy, Clone)]
+struct WasiAlloc {
+ alloc_buffer: TypedFunc<u32, u32>,
+ free_buffer: TypedFunc<u64, ()>,
+}
+
+struct WasiCtxAlloc {
+ wasi_ctx: WasiCtx,
+ alloc: Option<WasiAlloc>,
+}
+
+impl WasiCtxAlloc {
+ fn alloc_buffer(&self) -> TypedFunc<u32, u32> {
+ self.alloc
+ .expect("allocator has been not initialized, cannot allocate buffer!")
+ .alloc_buffer
+ }
+
+ fn free_buffer(&self) -> TypedFunc<u64, ()> {
+ self.alloc
+ .expect("allocator has been not initialized, cannot free buffer!")
+ .free_buffer
+ }
+
+ fn init_alloc(&mut self, alloc: WasiAlloc) {
+ self.alloc = Some(alloc)
+ }
+}
+
+/// Represents a WebAssembly plugin, with access to the WebAssembly System Inferface.
+/// Build a new plugin using [`PluginBuilder`].
+pub struct Plugin {
+ store: Store<WasiCtxAlloc>,
+ instance: Instance,
+}
+
+impl Plugin {
+ /// Dumps the *entirety* of Wasm linear memory to `stdout`.
+ /// Don't call this unless you're debugging a memory issue!
+ pub fn dump_memory(data: &[u8]) {
+ for (i, byte) in data.iter().enumerate() {
+ if i % 32 == 0 {
+ println!();
+ }
+ if i % 4 == 0 {
+ print!("|");
+ }
+ if *byte == 0 {
+ print!("__")
+ } else {
+ print!("{:02x}", byte);
+ }
+ }
+ println!();
+ }
+
+ async fn init(
+ precompiled: bool,
+ module: Vec<u8>,
+ plugin: PluginBuilder,
+ ) -> Result<Self, Error> {
+ // initialize the WebAssembly System Interface context
+ let engine = plugin.engine;
+ let mut linker = plugin.linker;
+ wasmtime_wasi::add_to_linker(&mut linker, |s| &mut s.wasi_ctx)?;
+
+ // create a store, note that we can't initialize the allocator,
+ // because we can't grab the functions until initialized.
+ let mut store: Store<WasiCtxAlloc> = Store::new(
+ &engine,
+ WasiCtxAlloc {
+ wasi_ctx: plugin.wasi_ctx,
+ alloc: None,
+ },
+ );
+ // store.epoch_deadline_async_yield_and_update(todo!());
+ let module = if precompiled {
+ unsafe { Module::deserialize(&engine, module)? }
+ } else {
+ Module::new(&engine, module)?
+ };
+
+ // load the provided module into the asynchronous runtime
+ linker.module_async(&mut store, "", &module).await?;
+ let instance = linker.instantiate_async(&mut store, &module).await?;
+
+ // now that the module is initialized,
+ // we can initialize the store's allocator
+ let alloc_buffer = instance.get_typed_func(&mut store, "__alloc_buffer")?;
+ let free_buffer = instance.get_typed_func(&mut store, "__free_buffer")?;
+ store.data_mut().init_alloc(WasiAlloc {
+ alloc_buffer,
+ free_buffer,
+ });
+
+ Ok(Plugin { store, instance })
+ }
+
+ /// Attaches a file or directory the the given system path to the runtime.
+ /// Note that the resource must be freed by calling `remove_resource` afterwards.
+ pub fn attach_path<T: AsRef<Path>>(&mut self, path: T) -> Result<PluginResource, Error> {
+ // grab the WASI context
+ let ctx = self.store.data_mut();
+
+ // open the file we want, and convert it into the right type
+ // this is a footgun and a half
+ let file = File::open(&path).unwrap();
+ let dir = Dir::from_std_file(file);
+ let dir = Box::new(wasmtime_wasi::dir::Dir::from_cap_std(dir));
+
+ // grab an empty file descriptor, specify capabilities
+ let fd = ctx.wasi_ctx.table().push(Box::new(()))?;
+ let caps = dir::DirCaps::all();
+ let file_caps = file::FileCaps::all();
+
+ // insert the directory at the given fd,
+ // return a handle to the resource
+ ctx.wasi_ctx
+ .insert_dir(fd, dir, caps, file_caps, path.as_ref().to_path_buf());
+ Ok(PluginResource(fd))
+ }
+
+ /// Returns `true` if the resource existed and was removed.
+ /// Currently the only resource we support is adding scoped paths (e.g. folders and files)
+ /// to plugins using [`attach_path`].
+ pub fn remove_resource(&mut self, resource: PluginResource) -> Result<(), Error> {
+ self.store
+ .data_mut()
+ .wasi_ctx
+ .table()
+ .delete(resource.0)
+ .ok_or_else(|| anyhow!("Resource did not exist, but a valid handle was passed in"))?;
+ Ok(())
+ }
+
+ // So this call function is kinda a dance, I figured it'd be a good idea to document it.
+ // the high level is we take a serde type, serialize it to a byte array,
+ // (we're doing this using bincode for now)
+ // then toss that byte array into webassembly.
+ // webassembly grabs that byte array, does some magic,
+ // and serializes the result into yet another byte array.
+ // we then grab *that* result byte array and deserialize it into a result.
+ //
+ // phew...
+ //
+ // now the problem is, webassambly doesn't support buffers.
+ // only really like i32s, that's it (yeah, it's sad. Not even unsigned!)
+ // (ok, I'm exaggerating a bit).
+ //
+ // the Wasm function that this calls must have a very specific signature:
+ //
+ // fn(pointer to byte array: i32, length of byte array: i32)
+ // -> pointer to (
+ // pointer to byte_array: i32,
+ // length of byte array: i32,
+ // ): i32
+ //
+ // This pair `(pointer to byte array, length of byte array)` is called a `Buffer`
+ // and can be found in the cargo_test plugin.
+ //
+ // so on the wasm side, we grab the two parameters to the function,
+ // stuff them into a `Buffer`,
+ // and then pray to the `unsafe` Rust gods above that a valid byte array pops out.
+ //
+ // On the flip side, when returning from a wasm function,
+ // we convert whatever serialized result we get into byte array,
+ // which we stuff into a Buffer and allocate on the heap,
+ // which pointer to we then return.
+ // Note the double indirection!
+ //
+ // So when returning from a function, we actually leak memory *twice*:
+ //
+ // 1) once when we leak the byte array
+ // 2) again when we leak the allocated `Buffer`
+ //
+ // This isn't a problem because Wasm stops executing after the function returns,
+ // so the heap is still valid for our inspection when we want to pull things out.
+
+ /// Serializes a given type to bytes.
+ fn serialize_to_bytes<A: Serialize>(item: A) -> Result<Vec<u8>, Error> {
+ // serialize the argument using bincode
+ let bytes = bincode::serialize(&item)?;
+ Ok(bytes)
+ }
+
+ /// Deserializes a given type from bytes.
+ fn deserialize_to_type<R: DeserializeOwned>(bytes: &[u8]) -> Result<R, Error> {
+ // serialize the argument using bincode
+ let bytes = bincode::deserialize(bytes)?;
+ Ok(bytes)
+ }
+
+ // fn deserialize<R: DeserializeOwned>(
+ // plugin_memory: &mut Memory,
+ // mut store: impl AsContextMut<Data = WasiCtxAlloc>,
+ // buffer: WasiBuffer,
+ // ) -> Result<R, Error> {
+ // let buffer_start = buffer.ptr as usize;
+ // let buffer_end = buffer_start + buffer.len as usize;
+
+ // // read the buffer at this point into a byte array
+ // // deserialize the byte array into the provided serde type
+ // let item = &plugin_memory.data(store.as_context())[buffer_start..buffer_end];
+ // let item = bincode::deserialize(bytes)?;
+ // Ok(item)
+ // }
+
+ /// Takes an item, allocates a buffer, serializes the argument to that buffer,
+ /// and returns a (ptr, len) pair to that buffer.
+ async fn bytes_to_buffer(
+ alloc_buffer: TypedFunc<u32, u32>,
+ plugin_memory: &mut Memory,
+ mut store: impl AsContextMut<Data = WasiCtxAlloc>,
+ item: Vec<u8>,
+ ) -> Result<WasiBuffer, Error> {
+ // allocate a buffer and write the argument to that buffer
+ let len = item.len() as u32;
+ let ptr = alloc_buffer.call_async(&mut store, len).await?;
+ plugin_memory.write(&mut store, ptr as usize, &item)?;
+ Ok(WasiBuffer { ptr, len })
+ }
+
+ /// Takes a `(ptr, len)` pair and returns the corresponding deserialized buffer.
+ fn buffer_to_type<R: DeserializeOwned>(
+ plugin_memory: &Memory,
+ store: impl AsContext<Data = WasiCtxAlloc>,
+ buffer: &WasiBuffer,
+ ) -> Result<R, Error> {
+ let buffer_start = buffer.ptr as usize;
+ let buffer_end = buffer_start + buffer.len as usize;
+
+ // read the buffer at this point into a byte array
+ // deserialize the byte array into the provided serde type
+ let result = &plugin_memory.data(store.as_context())[buffer_start..buffer_end];
+ let result = bincode::deserialize(result)?;
+
+ Ok(result)
+ }
+
+ /// Takes a `(ptr, len)` pair and returns the corresponding deserialized buffer.
+ fn buffer_to_bytes<'a>(
+ plugin_memory: &'a Memory,
+ store: wasmtime::StoreContext<'a, WasiCtxAlloc>,
+ buffer: &'a WasiBuffer,
+ ) -> Result<&'a [u8], Error> {
+ let buffer_start = buffer.ptr as usize;
+ let buffer_end = buffer_start + buffer.len as usize;
+
+ // read the buffer at this point into a byte array
+ // deserialize the byte array into the provided serde type
+ let result = &plugin_memory.data(store)[buffer_start..buffer_end];
+ Ok(result)
+ }
+
+ async fn buffer_to_free(
+ free_buffer: TypedFunc<u64, ()>,
+ mut store: impl AsContextMut<Data = WasiCtxAlloc>,
+ buffer: WasiBuffer,
+ ) -> Result<(), Error> {
+ // deallocate the argument buffer
+ Ok(free_buffer
+ .call_async(&mut store, buffer.into_u64())
+ .await?)
+ }
+
+ /// Retrieves the handle to a function of a given type.
+ pub fn function<A: Serialize, R: DeserializeOwned, T: AsRef<str>>(
+ &mut self,
+ name: T,
+ ) -> Result<WasiFn<A, R>, Error> {
+ let fun_name = format!("__{}", name.as_ref());
+ let fun = self
+ .instance
+ .get_typed_func::<u64, u64, _>(&mut self.store, &fun_name)?;
+ Ok(WasiFn {
+ function: fun,
+ _function_type: PhantomData,
+ })
+ }
+
+ /// Asynchronously calls a function defined Guest-side.
+ pub async fn call<A: Serialize, R: DeserializeOwned>(
+ &mut self,
+ handle: &WasiFn<A, R>,
+ arg: A,
+ ) -> Result<R, Error> {
+ let mut plugin_memory = self
+ .instance
+ .get_memory(&mut self.store, "memory")
+ .ok_or_else(|| anyhow!("Could not grab slice of plugin memory"))?;
+
+ // write the argument to linear memory
+ // this returns a (ptr, lentgh) pair
+ let arg_buffer = Self::bytes_to_buffer(
+ self.store.data().alloc_buffer(),
+ &mut plugin_memory,
+ &mut self.store,
+ Self::serialize_to_bytes(arg)?,
+ )
+ .await?;
+
+ // call the function, passing in the buffer and its length
+ // this returns a ptr to a (ptr, lentgh) pair
+ let result_buffer = handle
+ .function
+ .call_async(&mut self.store, arg_buffer.into_u64())
+ .await?;
+
+ Self::buffer_to_type(
+ &mut plugin_memory,
+ &mut self.store,
+ &WasiBuffer::from_u64(result_buffer),
+ )
+ }
+}
@@ -389,7 +389,7 @@ impl LspCommand for GetDefinition {
this.open_local_buffer_via_lsp(
target_uri,
language_server.server_id(),
- lsp_adapter.name(),
+ lsp_adapter.name.clone(),
cx,
)
})
@@ -610,7 +610,7 @@ impl LspCommand for GetReferences {
this.open_local_buffer_via_lsp(
lsp_location.uri,
language_server.server_id(),
- lsp_adapter.name(),
+ lsp_adapter.name.clone(),
cx,
)
})
@@ -23,9 +23,9 @@ use language::{
deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
serialize_version,
},
- range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
- Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
- Language, LanguageRegistry, LanguageServerName, LineEnding, LocalFile, LspAdapter,
+ range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
+ CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
+ File as _, Language, LanguageRegistry, LanguageServerName, LineEnding, LocalFile,
OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16,
Transaction,
};
@@ -124,6 +124,7 @@ pub struct Project {
buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
nonce: u128,
initialized_persistent_state: bool,
+ _maintain_buffer_languages: Task<()>,
}
#[derive(Error, Debug)]
@@ -199,7 +200,7 @@ pub enum Event {
pub enum LanguageServerState {
Starting(Task<Option<Arc<LanguageServer>>>),
Running {
- adapter: Arc<dyn LspAdapter>,
+ adapter: Arc<CachedLspAdapter>,
server: Arc<LanguageServer>,
},
}
@@ -472,6 +473,7 @@ impl Project {
opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
client_subscriptions: Vec::new(),
_subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
+ _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
active_entry: None,
languages,
client,
@@ -549,6 +551,7 @@ impl Project {
loading_local_worktrees: Default::default(),
active_entry: None,
collaborators: Default::default(),
+ _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
languages,
user_store: user_store.clone(),
project_store,
@@ -733,9 +736,9 @@ impl Project {
for language in self.languages.to_vec() {
if let Some(lsp_adapter) = language.lsp_adapter() {
if !settings.enable_language_server(Some(&language.name())) {
- let lsp_name = lsp_adapter.name();
+ let lsp_name = &lsp_adapter.name;
for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
- if lsp_name == *started_lsp_name {
+ if lsp_name == started_lsp_name {
language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
}
}
@@ -1628,6 +1631,7 @@ impl Project {
})
}
+ /// LanguageServerName is owned, because it is inserted into a map
fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
@@ -1817,10 +1821,10 @@ impl Project {
if let Some(language) = buffer.language() {
let worktree_id = file.worktree_id(cx);
if let Some(adapter) = language.lsp_adapter() {
- language_id = adapter.id_for_language(language.name().as_ref());
+ language_id = adapter.id_for_language.clone();
language_server = self
.language_server_ids
- .get(&(worktree_id, adapter.name()))
+ .get(&(worktree_id, adapter.name.clone()))
.and_then(|id| self.language_servers.get(&id))
.and_then(|server_state| {
if let LanguageServerState::Running { server, .. } = server_state {
@@ -1984,10 +1988,7 @@ impl Project {
// that don't support a disk-based progress token.
let (lsp_adapter, language_server) =
self.language_server_for_buffer(buffer.read(cx), cx)?;
- if lsp_adapter
- .disk_based_diagnostics_progress_token()
- .is_none()
- {
+ if lsp_adapter.disk_based_diagnostics_progress_token.is_none() {
let server_id = language_server.server_id();
self.disk_based_diagnostics_finished(server_id, cx);
self.broadcast_language_server_update(
@@ -2007,7 +2008,7 @@ impl Project {
fn language_servers_for_worktree(
&self,
worktree_id: WorktreeId,
- ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
+ ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
self.language_server_ids
.iter()
.filter_map(move |((language_server_worktree_id, _), id)| {
@@ -2022,6 +2023,34 @@ impl Project {
})
}
+ fn maintain_buffer_languages(
+ languages: &LanguageRegistry,
+ cx: &mut ModelContext<Project>,
+ ) -> Task<()> {
+ let mut subscription = languages.subscribe();
+ cx.spawn_weak(|project, mut cx| async move {
+ while let Some(()) = subscription.next().await {
+ if let Some(project) = project.upgrade(&cx) {
+ project.update(&mut cx, |project, cx| {
+ let mut buffers_without_language = Vec::new();
+ for buffer in project.opened_buffers.values() {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ if buffer.read(cx).language().is_none() {
+ buffers_without_language.push(buffer);
+ }
+ }
+ }
+
+ for buffer in buffers_without_language {
+ project.assign_language_to_buffer(&buffer, cx);
+ project.register_buffer_with_language_server(&buffer, cx);
+ }
+ });
+ }
+ }
+ })
+ }
+
fn assign_language_to_buffer(
&mut self,
buffer: &ModelHandle<Buffer>,
@@ -2062,7 +2091,7 @@ impl Project {
} else {
return;
};
- let key = (worktree_id, adapter.name());
+ let key = (worktree_id, adapter.name.clone());
self.language_server_ids
.entry(key.clone())
@@ -2080,25 +2109,33 @@ impl Project {
LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
let language_server = language_server?.await.log_err()?;
let language_server = language_server
- .initialize(adapter.initialization_options())
+ .initialize(adapter.initialization_options.clone())
.await
.log_err()?;
let this = this.upgrade(&cx)?;
- let disk_based_diagnostics_progress_token =
- adapter.disk_based_diagnostics_progress_token();
language_server
.on_notification::<lsp::notification::PublishDiagnostics, _>({
let this = this.downgrade();
let adapter = adapter.clone();
- move |params, mut cx| {
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, cx| {
- this.on_lsp_diagnostics_published(
- server_id, params, &adapter, cx,
- );
- });
- }
+ move |mut params, cx| {
+ let this = this.clone();
+ let adapter = adapter.clone();
+ cx.spawn(|mut cx| async move {
+ adapter.process_diagnostics(&mut params).await;
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.update_diagnostics(
+ server_id,
+ params,
+ &adapter.disk_based_diagnostic_sources,
+ cx,
+ )
+ .log_err();
+ });
+ }
+ })
+ .detach();
}
})
.detach();
@@ -2178,6 +2215,9 @@ impl Project {
})
.detach();
+ let disk_based_diagnostics_progress_token =
+ adapter.disk_based_diagnostics_progress_token.clone();
+
language_server
.on_notification::<lsp::notification::Progress, _>({
let this = this.downgrade();
@@ -2187,7 +2227,7 @@ impl Project {
this.on_lsp_progress(
params,
server_id,
- disk_based_diagnostics_progress_token,
+ disk_based_diagnostics_progress_token.clone(),
cx,
);
});
@@ -2261,7 +2301,7 @@ impl Project {
continue;
};
if file.worktree.read(cx).id() != key.0
- || language.lsp_adapter().map(|a| a.name())
+ || language.lsp_adapter().map(|a| a.name.clone())
!= Some(key.1.clone())
{
continue;
@@ -2274,14 +2314,15 @@ impl Project {
.or_insert_with(|| vec![(0, buffer.text_snapshot())]);
let (version, initial_snapshot) = versions.last().unwrap();
let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
- let language_id =
- adapter.id_for_language(language.name().as_ref());
language_server
.notify::<lsp::notification::DidOpenTextDocument>(
lsp::DidOpenTextDocumentParams {
text_document: lsp::TextDocumentItem::new(
uri,
- language_id.unwrap_or_default(),
+ adapter
+ .id_for_language
+ .clone()
+ .unwrap_or_default(),
*version,
initial_snapshot.text(),
),
@@ -2407,7 +2448,7 @@ impl Project {
return;
};
- let server_name = adapter.name();
+ let server_name = adapter.name.clone();
let stop = self.stop_language_server(worktree_id, server_name.clone(), cx);
cx.spawn_weak(|this, mut cx| async move {
let (original_root_path, orphaned_worktrees) = stop.await;
@@ -2440,28 +2481,11 @@ impl Project {
.detach();
}
- fn on_lsp_diagnostics_published(
- &mut self,
- server_id: usize,
- mut params: lsp::PublishDiagnosticsParams,
- adapter: &Arc<dyn LspAdapter>,
- cx: &mut ModelContext<Self>,
- ) {
- adapter.process_diagnostics(&mut params);
- self.update_diagnostics(
- server_id,
- params,
- adapter.disk_based_diagnostic_sources(),
- cx,
- )
- .log_err();
- }
-
fn on_lsp_progress(
&mut self,
progress: lsp::ProgressParams,
server_id: usize,
- disk_based_diagnostics_progress_token: Option<&str>,
+ disk_based_diagnostics_progress_token: Option<String>,
cx: &mut ModelContext<Self>,
) {
let token = match progress.token {
@@ -2485,9 +2509,12 @@ impl Project {
return;
}
+ let is_disk_based_diagnostics_progress =
+ Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_ref().map(|x| &**x);
+
match progress {
lsp::WorkDoneProgress::Begin(report) => {
- if Some(token.as_str()) == disk_based_diagnostics_progress_token {
+ if is_disk_based_diagnostics_progress {
language_server_status.has_pending_diagnostic_updates = true;
self.disk_based_diagnostics_started(server_id, cx);
self.broadcast_language_server_update(
@@ -2518,7 +2545,7 @@ impl Project {
}
}
lsp::WorkDoneProgress::Report(report) => {
- if Some(token.as_str()) != disk_based_diagnostics_progress_token {
+ if !is_disk_based_diagnostics_progress {
self.on_lsp_work_progress(
server_id,
token.clone(),
@@ -2544,7 +2571,7 @@ impl Project {
lsp::WorkDoneProgress::End(_) => {
language_server_status.progress_tokens.remove(&token);
- if Some(token.as_str()) == disk_based_diagnostics_progress_token {
+ if is_disk_based_diagnostics_progress {
language_server_status.has_pending_diagnostic_updates = false;
self.disk_based_diagnostics_finished(server_id, cx);
self.broadcast_language_server_update(
@@ -2622,7 +2649,7 @@ impl Project {
this: WeakModelHandle<Self>,
params: lsp::ApplyWorkspaceEditParams,
server_id: usize,
- adapter: Arc<dyn LspAdapter>,
+ adapter: Arc<CachedLspAdapter>,
language_server: Arc<LanguageServer>,
mut cx: AsyncAppContext,
) -> Result<lsp::ApplyWorkspaceEditResponse> {
@@ -2693,7 +2720,7 @@ impl Project {
&mut self,
language_server_id: usize,
params: lsp::PublishDiagnosticsParams,
- disk_based_sources: &[&str],
+ disk_based_sources: &[String],
cx: &mut ModelContext<Self>,
) -> Result<()> {
let abs_path = params
@@ -2735,9 +2762,8 @@ impl Project {
);
} else {
let group_id = post_inc(&mut self.next_diagnostic_group_id);
- let is_disk_based = source.map_or(false, |source| {
- disk_based_sources.contains(&source.as_str())
- });
+ let is_disk_based =
+ source.map_or(false, |source| disk_based_sources.contains(&source));
sources_by_group_id.insert(group_id, source);
primary_diagnostic_group_ids
@@ -3241,7 +3267,6 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<DocumentHighlight>>> {
let position = position.to_point_utf16(buffer.read(cx));
-
self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
}
@@ -3288,7 +3313,7 @@ impl Project {
} else {
return Ok(Default::default());
};
- this.read_with(&cx, |this, cx| {
+ let symbols = this.read_with(&cx, |this, cx| {
let mut symbols = Vec::new();
for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
@@ -3304,30 +3329,38 @@ impl Project {
path = relativize_path(&worktree_abs_path, &abs_path);
}
- let label = this
- .languages
- .select_language(&path)
- .and_then(|language| {
- language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
- })
- .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
let signature = this.symbol_signature(worktree_id, &path);
+ let language = this.languages.select_language(&path);
+ let language_server_name = adapter.name.clone();
- Some(Symbol {
- source_worktree_id,
- worktree_id,
- language_server_name: adapter.name(),
- name: lsp_symbol.name,
- kind: lsp_symbol.kind,
- label,
- path,
- range: range_from_lsp(lsp_symbol.location.range),
- signature,
+ Some(async move {
+ let label = if let Some(language) = language {
+ language
+ .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
+ .await
+ } else {
+ None
+ };
+
+ Symbol {
+ source_worktree_id,
+ worktree_id,
+ language_server_name,
+ label: label.unwrap_or_else(|| {
+ CodeLabel::plain(lsp_symbol.name.clone(), None)
+ }),
+ kind: lsp_symbol.kind,
+ name: lsp_symbol.name,
+ path,
+ range: range_from_lsp(lsp_symbol.location.range),
+ signature,
+ }
})
}));
}
- Ok(symbols)
- })
+ symbols
+ });
+ Ok(futures::future::join_all(symbols).await)
})
} else if let Some(project_id) = self.remote_id() {
let request = self.client.request(proto::GetProjectSymbols {
@@ -3338,14 +3371,18 @@ impl Project {
let response = request.await?;
let mut symbols = Vec::new();
if let Some(this) = this.upgrade(&cx) {
- this.read_with(&cx, |this, _| {
- symbols.extend(
- response
- .symbols
- .into_iter()
- .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
- );
- })
+ let new_symbols = this.read_with(&cx, |this, _| {
+ response
+ .symbols
+ .into_iter()
+ .map(|symbol| this.deserialize_symbol(symbol))
+ .collect::<Vec<_>>()
+ });
+ symbols = futures::future::join_all(new_symbols)
+ .await
+ .into_iter()
+ .filter_map(|symbol| symbol.log_err())
+ .collect::<Vec<_>>();
}
Ok(symbols)
})
@@ -3475,92 +3512,95 @@ impl Project {
Default::default()
};
- source_buffer_handle.read_with(&cx, |this, _| {
+ let completions = source_buffer_handle.read_with(&cx, |this, _| {
let snapshot = this.snapshot();
let clipped_position = this.clip_point_utf16(position, Bias::Left);
let mut range_for_token = None;
- Ok(completions
- .into_iter()
- .filter_map(|lsp_completion| {
- // For now, we can only handle additional edits if they are returned
- // when resolving the completion, not if they are present initially.
- if lsp_completion
- .additional_text_edits
- .as_ref()
- .map_or(false, |edits| !edits.is_empty())
- {
- return None;
- }
+ completions.into_iter().filter_map(move |lsp_completion| {
+ // For now, we can only handle additional edits if they are returned
+ // when resolving the completion, not if they are present initially.
+ if lsp_completion
+ .additional_text_edits
+ .as_ref()
+ .map_or(false, |edits| !edits.is_empty())
+ {
+ return None;
+ }
- let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref()
- {
- // If the language server provides a range to overwrite, then
- // check that the range is valid.
- Some(lsp::CompletionTextEdit::Edit(edit)) => {
- let range = range_from_lsp(edit.range);
- let start = snapshot.clip_point_utf16(range.start, Bias::Left);
- let end = snapshot.clip_point_utf16(range.end, Bias::Left);
- if start != range.start || end != range.end {
- log::info!("completion out of expected range");
- return None;
- }
- (
- snapshot.anchor_before(start)..snapshot.anchor_after(end),
- edit.new_text.clone(),
- )
- }
- // If the language server does not provide a range, then infer
- // the range based on the syntax tree.
- None => {
- if position != clipped_position {
- log::info!("completion out of expected range");
- return None;
- }
- let Range { start, end } = range_for_token
- .get_or_insert_with(|| {
- let offset = position.to_offset(&snapshot);
- let (range, kind) = snapshot.surrounding_word(offset);
- if kind == Some(CharKind::Word) {
- range
- } else {
- offset..offset
- }
- })
- .clone();
- let text = lsp_completion
- .insert_text
- .as_ref()
- .unwrap_or(&lsp_completion.label)
- .clone();
- (
- snapshot.anchor_before(start)..snapshot.anchor_after(end),
- text.clone(),
- )
+ let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
+ // If the language server provides a range to overwrite, then
+ // check that the range is valid.
+ Some(lsp::CompletionTextEdit::Edit(edit)) => {
+ let range = range_from_lsp(edit.range);
+ let start = snapshot.clip_point_utf16(range.start, Bias::Left);
+ let end = snapshot.clip_point_utf16(range.end, Bias::Left);
+ if start != range.start || end != range.end {
+ log::info!("completion out of expected range");
+ return None;
}
- Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
- log::info!("unsupported insert/replace completion");
+ (
+ snapshot.anchor_before(start)..snapshot.anchor_after(end),
+ edit.new_text.clone(),
+ )
+ }
+ // If the language server does not provide a range, then infer
+ // the range based on the syntax tree.
+ None => {
+ if position != clipped_position {
+ log::info!("completion out of expected range");
return None;
}
- };
+ let Range { start, end } = range_for_token
+ .get_or_insert_with(|| {
+ let offset = position.to_offset(&snapshot);
+ let (range, kind) = snapshot.surrounding_word(offset);
+ if kind == Some(CharKind::Word) {
+ range
+ } else {
+ offset..offset
+ }
+ })
+ .clone();
+ let text = lsp_completion
+ .insert_text
+ .as_ref()
+ .unwrap_or(&lsp_completion.label)
+ .clone();
+ (
+ snapshot.anchor_before(start)..snapshot.anchor_after(end),
+ text.clone(),
+ )
+ }
+ Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
+ log::info!("unsupported insert/replace completion");
+ return None;
+ }
+ };
- LineEnding::normalize(&mut new_text);
- Some(Completion {
+ LineEnding::normalize(&mut new_text);
+ let language = language.clone();
+ Some(async move {
+ let label = if let Some(language) = language {
+ language.label_for_completion(&lsp_completion).await
+ } else {
+ None
+ };
+ Completion {
old_range,
new_text,
- label: language
- .as_ref()
- .and_then(|l| l.label_for_completion(&lsp_completion))
- .unwrap_or_else(|| {
- CodeLabel::plain(
- lsp_completion.label.clone(),
- lsp_completion.filter_text.as_deref(),
- )
- }),
+ label: label.unwrap_or_else(|| {
+ CodeLabel::plain(
+ lsp_completion.label.clone(),
+ lsp_completion.filter_text.as_deref(),
+ )
+ }),
lsp_completion,
- })
+ }
})
- .collect())
- })
+ })
+ });
+
+ Ok(futures::future::join_all(completions).await)
})
} else if let Some(project_id) = self.remote_id() {
let rpc = self.client.clone();
@@ -3579,13 +3619,10 @@ impl Project {
})
.await;
- response
- .completions
- .into_iter()
- .map(|completion| {
- language::proto::deserialize_completion(completion, language.as_ref())
- })
- .collect()
+ let completions = response.completions.into_iter().map(|completion| {
+ language::proto::deserialize_completion(completion, language.clone())
+ });
+ futures::future::try_join_all(completions).await
})
} else {
Task::ready(Ok(Default::default()))
@@ -3881,7 +3918,7 @@ impl Project {
this: ModelHandle<Self>,
edit: lsp::WorkspaceEdit,
push_to_history: bool,
- lsp_adapter: Arc<dyn LspAdapter>,
+ lsp_adapter: Arc<CachedLspAdapter>,
language_server: Arc<LanguageServer>,
cx: &mut AsyncAppContext,
) -> Result<ProjectTransaction> {
@@ -3959,7 +3996,7 @@ impl Project {
this.open_local_buffer_via_lsp(
op.text_document.uri,
language_server.server_id(),
- lsp_adapter.name(),
+ lsp_adapter.name.clone(),
cx,
)
})
@@ -5190,7 +5227,7 @@ impl Project {
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
- let apply_additional_edits = this.update(&mut cx, |this, cx| {
+ let (buffer, completion) = this.update(&mut cx, |this, cx| {
let buffer = this
.opened_buffers
.get(&envelope.payload.buffer_id)
@@ -5202,13 +5239,17 @@ impl Project {
.payload
.completion
.ok_or_else(|| anyhow!("invalid completion"))?,
- language,
- )?;
- Ok::<_, anyhow::Error>(
- this.apply_additional_edits_for_completion(buffer, completion, false, cx),
- )
+ language.cloned(),
+ );
+ Ok::<_, anyhow::Error>((buffer, completion))
})?;
+ let completion = completion.await?;
+
+ let apply_additional_edits = this.update(&mut cx, |this, cx| {
+ this.apply_additional_edits_for_completion(buffer, completion, false, cx)
+ });
+
Ok(proto::ApplyCompletionAdditionalEditsResponse {
transaction: apply_additional_edits
.await?
@@ -5390,8 +5431,10 @@ impl Project {
.payload
.symbol
.ok_or_else(|| anyhow!("invalid symbol"))?;
+ let symbol = this
+ .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
+ .await?;
let symbol = this.read_with(&cx, |this, _| {
- let symbol = this.deserialize_symbol(symbol)?;
let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
if signature == symbol.signature {
Ok(symbol)
@@ -5596,34 +5639,52 @@ impl Project {
})
}
- fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
- let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
- let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
- let start = serialized_symbol
- .start
- .ok_or_else(|| anyhow!("invalid start"))?;
- let end = serialized_symbol
- .end
- .ok_or_else(|| anyhow!("invalid end"))?;
- let kind = unsafe { mem::transmute(serialized_symbol.kind) };
- let path = PathBuf::from(serialized_symbol.path);
- let language = self.languages.select_language(&path);
- Ok(Symbol {
- source_worktree_id,
- worktree_id,
- language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
- label: language
- .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
- .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
- name: serialized_symbol.name,
- path,
- range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
- kind,
- signature: serialized_symbol
- .signature
- .try_into()
- .map_err(|_| anyhow!("invalid signature"))?,
- })
+ fn deserialize_symbol(
+ &self,
+ serialized_symbol: proto::Symbol,
+ ) -> impl Future<Output = Result<Symbol>> {
+ let languages = self.languages.clone();
+ async move {
+ let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
+ let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
+ let start = serialized_symbol
+ .start
+ .ok_or_else(|| anyhow!("invalid start"))?;
+ let end = serialized_symbol
+ .end
+ .ok_or_else(|| anyhow!("invalid end"))?;
+ let kind = unsafe { mem::transmute(serialized_symbol.kind) };
+ let path = PathBuf::from(serialized_symbol.path);
+ let language = languages.select_language(&path);
+ Ok(Symbol {
+ source_worktree_id,
+ worktree_id,
+ language_server_name: LanguageServerName(
+ serialized_symbol.language_server_name.into(),
+ ),
+ label: {
+ match language {
+ Some(language) => {
+ language
+ .label_for_symbol(&serialized_symbol.name, kind)
+ .await
+ }
+ None => None,
+ }
+ .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
+ },
+
+ name: serialized_symbol.name,
+ path,
+ range: PointUtf16::new(start.row, start.column)
+ ..PointUtf16::new(end.row, end.column),
+ kind,
+ signature: serialized_symbol
+ .signature
+ .try_into()
+ .map_err(|_| anyhow!("invalid signature"))?,
+ })
+ }
}
async fn handle_buffer_saved(
@@ -5830,10 +5891,11 @@ impl Project {
&self,
buffer: &Buffer,
cx: &AppContext,
- ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
+ ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
+ let name = language.lsp_adapter()?.name.clone();
let worktree_id = file.worktree_id(cx);
- let key = (worktree_id, language.lsp_adapter()?.name());
+ let key = (worktree_id, name);
if let Some(server_id) = self.language_server_ids.get(&key) {
if let Some(LanguageServerState::Running { adapter, server }) =
@@ -49,7 +49,10 @@ async fn test_symlinks(cx: &mut gpui::TestAppContext) {
}
#[gpui::test]
-async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
+async fn test_managing_language_servers(
+ deterministic: Arc<Deterministic>,
+ cx: &mut gpui::TestAppContext,
+) {
cx.foreground().forbid_parking();
let mut rust_language = Language::new(
@@ -68,28 +71,32 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
},
None,
);
- let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
- name: "the-rust-language-server",
- capabilities: lsp::ServerCapabilities {
- completion_provider: Some(lsp::CompletionOptions {
- trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
+ let mut fake_rust_servers = rust_language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-rust-language-server",
+ capabilities: lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
+ ..Default::default()
+ }),
..Default::default()
- }),
+ },
..Default::default()
- },
- ..Default::default()
- });
- let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
- name: "the-json-language-server",
- capabilities: lsp::ServerCapabilities {
- completion_provider: Some(lsp::CompletionOptions {
- trigger_characters: Some(vec![":".to_string()]),
+ }))
+ .await;
+ let mut fake_json_servers = json_language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-json-language-server",
+ capabilities: lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![":".to_string()]),
+ ..Default::default()
+ }),
..Default::default()
- }),
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -104,10 +111,6 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await;
let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
- project.update(cx, |project, _| {
- project.languages.add(Arc::new(rust_language));
- project.languages.add(Arc::new(json_language));
- });
// Open a buffer without an associated language server.
let toml_buffer = project
@@ -117,13 +120,27 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.unwrap();
- // Open a buffer with an associated language server.
+ // Open a buffer with an associated language server before the language for it has been loaded.
let rust_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/the-root/test.rs", cx)
})
.await
.unwrap();
+ rust_buffer.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.language().map(|l| l.name()), None);
+ });
+
+ // Now we add the languages to the project, and ensure they get assigned to all
+ // the relevant open buffers.
+ project.update(cx, |project, _| {
+ project.languages.add(Arc::new(json_language));
+ project.languages.add(Arc::new(rust_language));
+ });
+ deterministic.run_until_parked();
+ rust_buffer.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
+ });
// A server is started up, and it is notified about Rust files.
let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
@@ -593,11 +610,13 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- disk_based_diagnostics_progress_token: Some(progress_token),
- disk_based_diagnostics_sources: &["disk"],
- ..Default::default()
- });
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ disk_based_diagnostics_progress_token: Some(progress_token.into()),
+ disk_based_diagnostics_sources: vec!["disk".into()],
+ ..Default::default()
+ }))
+ .await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -716,11 +735,13 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
},
None,
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- disk_based_diagnostics_sources: &["disk"],
- disk_based_diagnostics_progress_token: Some(progress_token),
- ..Default::default()
- });
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ disk_based_diagnostics_sources: vec!["disk".into()],
+ disk_based_diagnostics_progress_token: Some(progress_token.into()),
+ ..Default::default()
+ }))
+ .await;
let fs = FakeFs::new(cx.background());
fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
@@ -795,10 +816,12 @@ async fn test_toggling_enable_language_server(
},
None,
);
- let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
- name: "rust-lsp",
- ..Default::default()
- });
+ let mut fake_rust_servers = rust
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "rust-lsp",
+ ..Default::default()
+ }))
+ .await;
let mut js = Language::new(
LanguageConfig {
name: Arc::from("JavaScript"),
@@ -807,10 +830,12 @@ async fn test_toggling_enable_language_server(
},
None,
);
- let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
- name: "js-lsp",
- ..Default::default()
- });
+ let mut fake_js_servers = js
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "js-lsp",
+ ..Default::default()
+ }))
+ .await;
let fs = FakeFs::new(cx.background());
fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
@@ -916,10 +941,12 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- disk_based_diagnostics_sources: &["disk"],
- ..Default::default()
- });
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ disk_based_diagnostics_sources: vec!["disk".into()],
+ ..Default::default()
+ }))
+ .await;
let text = "
fn a() { A }
@@ -1258,7 +1285,7 @@ async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
let text = "
fn a() {
@@ -1637,7 +1664,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -1736,7 +1763,7 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_typescript::language_typescript()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -1820,7 +1847,7 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_typescript::language_typescript()),
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -1873,7 +1900,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
},
None,
);
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
+ let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -2801,16 +2828,18 @@ async fn test_rename(cx: &mut gpui::TestAppContext) {
},
Some(tree_sitter_rust::language()),
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
- capabilities: lsp::ServerCapabilities {
- rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
- prepare_provider: Some(true),
- work_done_progress_options: Default::default(),
- })),
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
+ prepare_provider: Some(true),
+ work_done_progress_options: Default::default(),
+ })),
+ ..Default::default()
+ },
..Default::default()
- },
- ..Default::default()
- });
+ }))
+ .await;
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -290,7 +290,9 @@ mod tests {
},
None,
);
- let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter::default());
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::<FakeLspAdapter>::default())
+ .await;
let fs = FakeFs::new(cx.background());
fs.insert_tree("/dir", json!({ "test.rs": "" })).await;
@@ -28,7 +28,7 @@ rsa = "0.4"
serde = { version = "1.0", features = ["derive", "rc"] }
smol-timeout = "0.6"
tracing = { version = "0.1.34", features = ["log"] }
-zstd = "0.9"
+zstd = "0.11"
[build-dependencies]
prost-build = "0.9"
@@ -39,6 +39,7 @@ journal = { path = "../journal" }
language = { path = "../language" }
lsp = { path = "../lsp" }
outline = { path = "../outline" }
+plugin_runtime = { path = "../plugin_runtime" }
project = { path = "../project" }
project_panel = { path = "../project_panel" }
project_symbols = { path = "../project_symbols" }
@@ -1,12 +1,13 @@
-use gpui::Task;
+use gpui::executor::Background;
pub use language::*;
use rust_embed::RustEmbed;
use std::{borrow::Cow, str, sync::Arc};
+use util::ResultExt;
mod c;
mod go;
mod installation;
-mod json;
+mod language_plugin;
mod python;
mod rust;
mod typescript;
@@ -16,28 +17,30 @@ mod typescript;
#[exclude = "*.rs"]
struct LanguageDir;
-pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegistry {
- let languages = LanguageRegistry::new(login_shell_env_loaded);
+pub async fn init(languages: Arc<LanguageRegistry>, executor: Arc<Background>) {
for (name, grammar, lsp_adapter) in [
(
"c",
tree_sitter_c::language(),
- Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
+ Some(CachedLspAdapter::new(c::CLspAdapter).await),
),
(
"cpp",
tree_sitter_cpp::language(),
- Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
+ Some(CachedLspAdapter::new(c::CLspAdapter).await),
),
(
"go",
tree_sitter_go::language(),
- Some(Arc::new(go::GoLspAdapter) as Arc<dyn LspAdapter>),
+ Some(CachedLspAdapter::new(go::GoLspAdapter).await),
),
(
"json",
tree_sitter_json::language(),
- Some(Arc::new(json::JsonLspAdapter)),
+ match language_plugin::new_json(executor).await.log_err() {
+ Some(lang) => Some(CachedLspAdapter::new(lang).await),
+ None => None,
+ },
),
(
"markdown",
@@ -47,12 +50,12 @@ pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegi
(
"python",
tree_sitter_python::language(),
- Some(Arc::new(python::PythonLspAdapter)),
+ Some(CachedLspAdapter::new(python::PythonLspAdapter).await),
),
(
"rust",
tree_sitter_rust::language(),
- Some(Arc::new(rust::RustLspAdapter)),
+ Some(CachedLspAdapter::new(rust::RustLspAdapter).await),
),
(
"toml",
@@ -62,28 +65,27 @@ pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegi
(
"tsx",
tree_sitter_typescript::language_tsx(),
- Some(Arc::new(typescript::TypeScriptLspAdapter)),
+ Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
),
(
"typescript",
tree_sitter_typescript::language_typescript(),
- Some(Arc::new(typescript::TypeScriptLspAdapter)),
+ Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
),
(
"javascript",
tree_sitter_typescript::language_tsx(),
- Some(Arc::new(typescript::TypeScriptLspAdapter)),
+ Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
),
] {
languages.add(Arc::new(language(name, grammar, lsp_adapter)));
}
- languages
}
pub(crate) fn language(
name: &str,
grammar: tree_sitter::Language,
- lsp_adapter: Option<Arc<dyn LspAdapter>>,
+ lsp_adapter: Option<Arc<CachedLspAdapter>>,
) -> Language {
let config = toml::from_slice(
&LanguageDir::get(&format!("{}/config.toml", name))
@@ -1,102 +1,91 @@
use super::installation::{latest_github_release, GitHubLspBinaryVersion};
use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
use client::http::HttpClient;
-use futures::{future::BoxFuture, FutureExt, StreamExt};
+use futures::StreamExt;
pub use language::*;
use smol::fs::{self, File};
-use std::{
- any::Any,
- path::{Path, PathBuf},
- sync::Arc,
-};
-use util::{ResultExt, TryFutureExt};
+use std::{any::Any, path::PathBuf, sync::Arc};
+use util::ResultExt;
pub struct CLspAdapter;
+#[async_trait]
impl super::LspAdapter for CLspAdapter {
- fn name(&self) -> LanguageServerName {
+ async fn name(&self) -> LanguageServerName {
LanguageServerName("clangd".into())
}
- fn fetch_latest_server_version(
+ async fn fetch_latest_server_version(
&self,
http: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
- async move {
- let release = latest_github_release("clangd/clangd", http).await?;
- let asset_name = format!("clangd-mac-{}.zip", release.name);
- let asset = release
- .assets
- .iter()
- .find(|asset| asset.name == asset_name)
- .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
- let version = GitHubLspBinaryVersion {
- name: release.name,
- url: asset.browser_download_url.clone(),
- };
- Ok(Box::new(version) as Box<_>)
- }
- .boxed()
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ let release = latest_github_release("clangd/clangd", http).await?;
+ let asset_name = format!("clangd-mac-{}.zip", release.name);
+ let asset = release
+ .assets
+ .iter()
+ .find(|asset| asset.name == asset_name)
+ .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
+ let version = GitHubLspBinaryVersion {
+ name: release.name,
+ url: asset.browser_download_url.clone(),
+ };
+ Ok(Box::new(version) as Box<_>)
}
- fn fetch_server_binary(
+ async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
- async move {
- let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
- let version_dir = container_dir.join(format!("clangd_{}", version.name));
- let binary_path = version_dir.join("bin/clangd");
+ let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
+ let version_dir = container_dir.join(format!("clangd_{}", version.name));
+ let binary_path = version_dir.join("bin/clangd");
- if fs::metadata(&binary_path).await.is_err() {
- let mut response = http
- .get(&version.url, Default::default(), true)
- .await
- .context("error downloading release")?;
- let mut file = File::create(&zip_path).await?;
- if !response.status().is_success() {
- Err(anyhow!(
- "download failed with status {}",
- response.status().to_string()
- ))?;
- }
- futures::io::copy(response.body_mut(), &mut file).await?;
+ if fs::metadata(&binary_path).await.is_err() {
+ let mut response = http
+ .get(&version.url, Default::default(), true)
+ .await
+ .context("error downloading release")?;
+ let mut file = File::create(&zip_path).await?;
+ if !response.status().is_success() {
+ Err(anyhow!(
+ "download failed with status {}",
+ response.status().to_string()
+ ))?;
+ }
+ futures::io::copy(response.body_mut(), &mut file).await?;
- let unzip_status = smol::process::Command::new("unzip")
- .current_dir(&container_dir)
- .arg(&zip_path)
- .output()
- .await?
- .status;
- if !unzip_status.success() {
- Err(anyhow!("failed to unzip clangd archive"))?;
- }
+ let unzip_status = smol::process::Command::new("unzip")
+ .current_dir(&container_dir)
+ .arg(&zip_path)
+ .output()
+ .await?
+ .status;
+ if !unzip_status.success() {
+ Err(anyhow!("failed to unzip clangd archive"))?;
+ }
- if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
- while let Some(entry) = entries.next().await {
- if let Some(entry) = entry.log_err() {
- let entry_path = entry.path();
- if entry_path.as_path() != version_dir {
- fs::remove_dir_all(&entry_path).await.log_err();
- }
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != version_dir {
+ fs::remove_dir_all(&entry_path).await.log_err();
}
}
}
}
-
- Ok(binary_path)
}
- .boxed()
+
+ Ok(binary_path)
}
- fn cached_server_binary(
- &self,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Option<PathBuf>> {
- async move {
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ (|| async move {
let mut last_clangd_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
@@ -115,12 +104,12 @@ impl super::LspAdapter for CLspAdapter {
clangd_dir
))
}
- }
+ })()
+ .await
.log_err()
- .boxed()
}
- fn label_for_completion(
+ async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
language: &Language,
@@ -197,7 +186,7 @@ impl super::LspAdapter for CLspAdapter {
Some(CodeLabel::plain(label.to_string(), None))
}
- fn label_for_symbol(
+ async fn label_for_symbol(
&self,
name: &str,
kind: lsp::SymbolKind,
@@ -1,19 +1,14 @@
use super::installation::latest_github_release;
use anyhow::{anyhow, Result};
+use async_trait::async_trait;
use client::http::HttpClient;
-use futures::{future::BoxFuture, FutureExt, StreamExt};
+use futures::StreamExt;
pub use language::*;
use lazy_static::lazy_static;
use regex::Regex;
use smol::{fs, process};
-use std::{
- any::Any,
- ops::Range,
- path::{Path, PathBuf},
- str,
- sync::Arc,
-};
-use util::{ResultExt, TryFutureExt};
+use std::{any::Any, ops::Range, path::PathBuf, str, sync::Arc};
+use util::ResultExt;
#[derive(Copy, Clone)]
pub struct GoLspAdapter;
@@ -22,104 +17,96 @@ lazy_static! {
static ref GOPLS_VERSION_REGEX: Regex = Regex::new(r"\d+\.\d+\.\d+").unwrap();
}
+#[async_trait]
impl super::LspAdapter for GoLspAdapter {
- fn name(&self) -> LanguageServerName {
+ async fn name(&self) -> LanguageServerName {
LanguageServerName("gopls".into())
}
- fn server_args(&self) -> &[&str] {
- &["-mode=stdio"]
+ async fn server_args(&self) -> Vec<String> {
+ vec!["-mode=stdio".into()]
}
- fn fetch_latest_server_version(
+ async fn fetch_latest_server_version(
&self,
http: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
- async move {
- let release = latest_github_release("golang/tools", http).await?;
- let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
- if version.is_none() {
- log::warn!(
- "couldn't infer gopls version from github release name '{}'",
- release.name
- );
- }
- Ok(Box::new(version) as Box<_>)
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ let release = latest_github_release("golang/tools", http).await?;
+ let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
+ if version.is_none() {
+ log::warn!(
+ "couldn't infer gopls version from github release name '{}'",
+ release.name
+ );
}
- .boxed()
+ Ok(Box::new(version) as Box<_>)
}
- fn fetch_server_binary(
+ async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
let version = version.downcast::<Option<String>>().unwrap();
let this = *self;
- async move {
- if let Some(version) = *version {
- let binary_path = container_dir.join(&format!("gopls_{version}"));
- if let Ok(metadata) = fs::metadata(&binary_path).await {
- if metadata.is_file() {
- if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
- while let Some(entry) = entries.next().await {
- if let Some(entry) = entry.log_err() {
- let entry_path = entry.path();
- if entry_path.as_path() != binary_path
- && entry.file_name() != "gobin"
- {
- fs::remove_file(&entry_path).await.log_err();
- }
+ if let Some(version) = *version {
+ let binary_path = container_dir.join(&format!("gopls_{version}"));
+ if let Ok(metadata) = fs::metadata(&binary_path).await {
+ if metadata.is_file() {
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != binary_path
+ && entry.file_name() != "gobin"
+ {
+ fs::remove_file(&entry_path).await.log_err();
}
}
}
-
- return Ok(binary_path.to_path_buf());
}
+
+ return Ok(binary_path.to_path_buf());
}
- } else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
- return Ok(path.to_path_buf());
}
+ } else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
+ return Ok(path.to_path_buf());
+ }
- let gobin_dir = container_dir.join("gobin");
- fs::create_dir_all(&gobin_dir).await?;
- let install_output = process::Command::new("go")
- .env("GO111MODULE", "on")
- .env("GOBIN", &gobin_dir)
- .args(["install", "golang.org/x/tools/gopls@latest"])
- .output()
- .await?;
- if !install_output.status.success() {
- Err(anyhow!("failed to install gopls. Is go installed?"))?;
- }
+ let gobin_dir = container_dir.join("gobin");
+ fs::create_dir_all(&gobin_dir).await?;
+ let install_output = process::Command::new("go")
+ .env("GO111MODULE", "on")
+ .env("GOBIN", &gobin_dir)
+ .args(["install", "golang.org/x/tools/gopls@latest"])
+ .output()
+ .await?;
+ if !install_output.status.success() {
+ Err(anyhow!("failed to install gopls. Is go installed?"))?;
+ }
- let installed_binary_path = gobin_dir.join("gopls");
- let version_output = process::Command::new(&installed_binary_path)
- .arg("version")
- .output()
- .await
- .map_err(|e| anyhow!("failed to run installed gopls binary {:?}", e))?;
- let version_stdout = str::from_utf8(&version_output.stdout)
- .map_err(|_| anyhow!("gopls version produced invalid utf8"))?;
- let version = GOPLS_VERSION_REGEX
- .find(version_stdout)
- .ok_or_else(|| anyhow!("failed to parse gopls version output"))?
- .as_str();
- let binary_path = container_dir.join(&format!("gopls_{version}"));
- fs::rename(&installed_binary_path, &binary_path).await?;
+ let installed_binary_path = gobin_dir.join("gopls");
+ let version_output = process::Command::new(&installed_binary_path)
+ .arg("version")
+ .output()
+ .await
+ .map_err(|e| anyhow!("failed to run installed gopls binary {:?}", e))?;
+ let version_stdout = str::from_utf8(&version_output.stdout)
+ .map_err(|_| anyhow!("gopls version produced invalid utf8"))?;
+ let version = GOPLS_VERSION_REGEX
+ .find(version_stdout)
+ .ok_or_else(|| anyhow!("failed to parse gopls version output"))?
+ .as_str();
+ let binary_path = container_dir.join(&format!("gopls_{version}"));
+ fs::rename(&installed_binary_path, &binary_path).await?;
- Ok(binary_path.to_path_buf())
- }
- .boxed()
+ Ok(binary_path.to_path_buf())
}
- fn cached_server_binary(
- &self,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Option<PathBuf>> {
- async move {
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ (|| async move {
let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
@@ -139,12 +126,12 @@ impl super::LspAdapter for GoLspAdapter {
} else {
Err(anyhow!("no cached binary"))
}
- }
+ })()
+ .await
.log_err()
- .boxed()
}
- fn label_for_completion(
+ async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
language: &Language,
@@ -244,7 +231,7 @@ impl super::LspAdapter for GoLspAdapter {
None
}
- fn label_for_symbol(
+ async fn label_for_symbol(
&self,
name: &str,
kind: lsp::SymbolKind,
@@ -322,12 +309,12 @@ mod tests {
use gpui::color::Color;
use theme::SyntaxTheme;
- #[test]
- fn test_go_label_for_completion() {
+ #[gpui::test]
+ async fn test_go_label_for_completion() {
let language = language(
"go",
tree_sitter_go::language(),
- Some(Arc::new(GoLspAdapter)),
+ Some(CachedLspAdapter::new(GoLspAdapter).await),
);
let theme = SyntaxTheme::new(vec![
@@ -347,12 +334,14 @@ mod tests {
let highlight_field = grammar.highlight_id_for_name("property").unwrap();
assert_eq!(
- language.label_for_completion(&lsp::CompletionItem {
- kind: Some(lsp::CompletionItemKind::FUNCTION),
- label: "Hello".to_string(),
- detail: Some("func(a B) c.D".to_string()),
- ..Default::default()
- }),
+ language
+ .label_for_completion(&lsp::CompletionItem {
+ kind: Some(lsp::CompletionItemKind::FUNCTION),
+ label: "Hello".to_string(),
+ detail: Some("func(a B) c.D".to_string()),
+ ..Default::default()
+ })
+ .await,
Some(CodeLabel {
text: "Hello(a B) c.D".to_string(),
filter_range: 0..5,
@@ -366,12 +355,14 @@ mod tests {
// Nested methods
assert_eq!(
- language.label_for_completion(&lsp::CompletionItem {
- kind: Some(lsp::CompletionItemKind::METHOD),
- label: "one.two.Three".to_string(),
- detail: Some("func() [3]interface{}".to_string()),
- ..Default::default()
- }),
+ language
+ .label_for_completion(&lsp::CompletionItem {
+ kind: Some(lsp::CompletionItemKind::METHOD),
+ label: "one.two.Three".to_string(),
+ detail: Some("func() [3]interface{}".to_string()),
+ ..Default::default()
+ })
+ .await,
Some(CodeLabel {
text: "one.two.Three() [3]interface{}".to_string(),
filter_range: 0..13,
@@ -385,12 +376,14 @@ mod tests {
// Nested fields
assert_eq!(
- language.label_for_completion(&lsp::CompletionItem {
- kind: Some(lsp::CompletionItemKind::FIELD),
- label: "two.Three".to_string(),
- detail: Some("a.Bcd".to_string()),
- ..Default::default()
- }),
+ language
+ .label_for_completion(&lsp::CompletionItem {
+ kind: Some(lsp::CompletionItemKind::FIELD),
+ label: "two.Three".to_string(),
+ detail: Some("a.Bcd".to_string()),
+ ..Default::default()
+ })
+ .await,
Some(CodeLabel {
text: "two.Three a.Bcd".to_string(),
filter_range: 0..9,
@@ -1,120 +0,0 @@
-use super::installation::{npm_install_packages, npm_package_latest_version};
-use anyhow::{anyhow, Context, Result};
-use client::http::HttpClient;
-use futures::{future::BoxFuture, FutureExt, StreamExt};
-use language::{LanguageServerName, LspAdapter};
-use serde_json::json;
-use smol::fs;
-use std::{
- any::Any,
- path::{Path, PathBuf},
- sync::Arc,
-};
-use util::{ResultExt, TryFutureExt};
-
-pub struct JsonLspAdapter;
-
-impl JsonLspAdapter {
- const BIN_PATH: &'static str =
- "node_modules/vscode-json-languageserver/bin/vscode-json-languageserver";
-}
-
-impl LspAdapter for JsonLspAdapter {
- fn name(&self) -> LanguageServerName {
- LanguageServerName("vscode-json-languageserver".into())
- }
-
- fn server_args(&self) -> &[&str] {
- &["--stdio"]
- }
-
- fn fetch_latest_server_version(
- &self,
- _: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Any + Send>>> {
- async move {
- Ok(Box::new(npm_package_latest_version("vscode-json-languageserver").await?) as Box<_>)
- }
- .boxed()
- }
-
- fn fetch_server_binary(
- &self,
- version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
- let version = version.downcast::<String>().unwrap();
- async move {
- let version_dir = container_dir.join(version.as_str());
- fs::create_dir_all(&version_dir)
- .await
- .context("failed to create version directory")?;
- let binary_path = version_dir.join(Self::BIN_PATH);
-
- if fs::metadata(&binary_path).await.is_err() {
- npm_install_packages(
- [("vscode-json-languageserver", version.as_str())],
- &version_dir,
- )
- .await?;
-
- if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
- while let Some(entry) = entries.next().await {
- if let Some(entry) = entry.log_err() {
- let entry_path = entry.path();
- if entry_path.as_path() != version_dir {
- fs::remove_dir_all(&entry_path).await.log_err();
- }
- }
- }
- }
- }
-
- Ok(binary_path)
- }
- .boxed()
- }
-
- fn cached_server_binary(
- &self,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Option<PathBuf>> {
- async move {
- let mut last_version_dir = None;
- let mut entries = fs::read_dir(&container_dir).await?;
- while let Some(entry) = entries.next().await {
- let entry = entry?;
- if entry.file_type().await?.is_dir() {
- last_version_dir = Some(entry.path());
- }
- }
- let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
- let bin_path = last_version_dir.join(Self::BIN_PATH);
- if bin_path.exists() {
- Ok(bin_path)
- } else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- last_version_dir
- ))
- }
- }
- .log_err()
- .boxed()
- }
-
- fn initialization_options(&self) -> Option<serde_json::Value> {
- Some(json!({
- "provideFormatter": true
- }))
- }
-
- fn id_for_language(&self, name: &str) -> Option<String> {
- if name == "JSON" {
- Some("jsonc".into())
- } else {
- None
- }
- }
-}
@@ -0,0 +1,145 @@
+use anyhow::{anyhow, Result};
+use async_trait::async_trait;
+use client::http::HttpClient;
+use futures::lock::Mutex;
+use gpui::executor::Background;
+use language::{LanguageServerName, LspAdapter};
+use plugin_runtime::{Plugin, PluginBuilder, WasiFn};
+use std::{any::Any, path::PathBuf, sync::Arc};
+use util::ResultExt;
+
+pub async fn new_json(executor: Arc<Background>) -> Result<PluginLspAdapter> {
+ let plugin = PluginBuilder::new_with_default_ctx()?
+ .host_function_async("command", |command: String| async move {
+ let mut args = command.split(' ');
+ let command = args.next().unwrap();
+ smol::process::Command::new(command)
+ .args(args)
+ .output()
+ .await
+ .log_err()
+ .map(|output| output.stdout)
+ })?
+ .init(
+ true,
+ include_bytes!("../../../../plugins/bin/json_language.wasm.pre"),
+ )
+ .await?;
+ PluginLspAdapter::new(plugin, executor).await
+}
+
+pub struct PluginLspAdapter {
+ name: WasiFn<(), String>,
+ server_args: WasiFn<(), Vec<String>>,
+ fetch_latest_server_version: WasiFn<(), Option<String>>,
+ fetch_server_binary: WasiFn<(PathBuf, String), Result<PathBuf, String>>,
+ cached_server_binary: WasiFn<PathBuf, Option<PathBuf>>,
+ initialization_options: WasiFn<(), String>,
+ executor: Arc<Background>,
+ runtime: Arc<Mutex<Plugin>>,
+}
+
+impl PluginLspAdapter {
+ pub async fn new(mut plugin: Plugin, executor: Arc<Background>) -> Result<Self> {
+ Ok(Self {
+ name: plugin.function("name")?,
+ server_args: plugin.function("server_args")?,
+ fetch_latest_server_version: plugin.function("fetch_latest_server_version")?,
+ fetch_server_binary: plugin.function("fetch_server_binary")?,
+ cached_server_binary: plugin.function("cached_server_binary")?,
+ initialization_options: plugin.function("initialization_options")?,
+ executor,
+ runtime: Arc::new(Mutex::new(plugin)),
+ })
+ }
+}
+
+#[async_trait]
+impl LspAdapter for PluginLspAdapter {
+ async fn name(&self) -> LanguageServerName {
+ let name: String = self
+ .runtime
+ .lock()
+ .await
+ .call(&self.name, ())
+ .await
+ .unwrap();
+ LanguageServerName(name.into())
+ }
+
+ async fn server_args<'a>(&'a self) -> Vec<String> {
+ self.runtime
+ .lock()
+ .await
+ .call(&self.server_args, ())
+ .await
+ .unwrap()
+ }
+
+ async fn fetch_latest_server_version(
+ &self,
+ _: Arc<dyn HttpClient>,
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ let runtime = self.runtime.clone();
+ let function = self.fetch_latest_server_version;
+ self.executor
+ .spawn(async move {
+ let mut runtime = runtime.lock().await;
+ let versions: Result<Option<String>> =
+ runtime.call::<_, Option<String>>(&function, ()).await;
+ versions
+ .map_err(|e| anyhow!("{}", e))?
+ .ok_or_else(|| anyhow!("Could not fetch latest server version"))
+ .map(|v| Box::new(v) as Box<_>)
+ })
+ .await
+ }
+
+ async fn fetch_server_binary(
+ &self,
+ version: Box<dyn 'static + Send + Any>,
+ _: Arc<dyn HttpClient>,
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
+ let version = *version.downcast::<String>().unwrap();
+ let runtime = self.runtime.clone();
+ let function = self.fetch_server_binary;
+ self.executor
+ .spawn(async move {
+ let mut runtime = runtime.lock().await;
+ let handle = runtime.attach_path(&container_dir)?;
+ let result: Result<PathBuf, String> =
+ runtime.call(&function, (container_dir, version)).await?;
+ runtime.remove_resource(handle)?;
+ result.map_err(|e| anyhow!("{}", e))
+ })
+ .await
+ }
+
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ let runtime = self.runtime.clone();
+ let function = self.cached_server_binary;
+
+ self.executor
+ .spawn(async move {
+ let mut runtime = runtime.lock().await;
+ let handle = runtime.attach_path(&container_dir).ok()?;
+ let result: Option<PathBuf> = runtime.call(&function, container_dir).await.ok()?;
+ runtime.remove_resource(handle).ok()?;
+ result
+ })
+ .await
+ }
+
+ async fn initialization_options(&self) -> Option<serde_json::Value> {
+ let string: String = self
+ .runtime
+ .lock()
+ .await
+ .call(&self.initialization_options, ())
+ .await
+ .log_err()?;
+
+ serde_json::from_str(&string).ok()
+ }
+}
@@ -1,15 +1,12 @@
use super::installation::{npm_install_packages, npm_package_latest_version};
use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
use client::http::HttpClient;
-use futures::{future::BoxFuture, FutureExt, StreamExt};
+use futures::StreamExt;
use language::{LanguageServerName, LspAdapter};
use smol::fs;
-use std::{
- any::Any,
- path::{Path, PathBuf},
- sync::Arc,
-};
-use util::{ResultExt, TryFutureExt};
+use std::{any::Any, path::PathBuf, sync::Arc};
+use util::ResultExt;
pub struct PythonLspAdapter;
@@ -17,61 +14,56 @@ impl PythonLspAdapter {
const BIN_PATH: &'static str = "node_modules/pyright/langserver.index.js";
}
+#[async_trait]
impl LspAdapter for PythonLspAdapter {
- fn name(&self) -> LanguageServerName {
+ async fn name(&self) -> LanguageServerName {
LanguageServerName("pyright".into())
}
- fn server_args(&self) -> &[&str] {
- &["--stdio"]
+ async fn server_args(&self) -> Vec<String> {
+ vec!["--stdio".into()]
}
- fn fetch_latest_server_version(
+ async fn fetch_latest_server_version(
&self,
_: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Any + Send>>> {
- async move { Ok(Box::new(npm_package_latest_version("pyright").await?) as Box<_>) }.boxed()
+ ) -> Result<Box<dyn 'static + Any + Send>> {
+ Ok(Box::new(npm_package_latest_version("pyright").await?) as Box<_>)
}
- fn fetch_server_binary(
+ async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
let version = version.downcast::<String>().unwrap();
- async move {
- let version_dir = container_dir.join(version.as_str());
- fs::create_dir_all(&version_dir)
- .await
- .context("failed to create version directory")?;
- let binary_path = version_dir.join(Self::BIN_PATH);
-
- if fs::metadata(&binary_path).await.is_err() {
- npm_install_packages([("pyright", version.as_str())], &version_dir).await?;
-
- if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
- while let Some(entry) = entries.next().await {
- if let Some(entry) = entry.log_err() {
- let entry_path = entry.path();
- if entry_path.as_path() != version_dir {
- fs::remove_dir_all(&entry_path).await.log_err();
- }
+ let version_dir = container_dir.join(version.as_str());
+ fs::create_dir_all(&version_dir)
+ .await
+ .context("failed to create version directory")?;
+ let binary_path = version_dir.join(Self::BIN_PATH);
+
+ if fs::metadata(&binary_path).await.is_err() {
+ npm_install_packages([("pyright", version.as_str())], &version_dir).await?;
+
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != version_dir {
+ fs::remove_dir_all(&entry_path).await.log_err();
}
}
}
}
-
- Ok(binary_path)
}
- .boxed()
+
+ Ok(binary_path)
}
- fn cached_server_binary(
- &self,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Option<PathBuf>> {
- async move {
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ (|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
@@ -90,12 +82,12 @@ impl LspAdapter for PythonLspAdapter {
last_version_dir
))
}
- }
+ })()
+ .await
.log_err()
- .boxed()
}
- fn label_for_completion(
+ async fn label_for_completion(
&self,
item: &lsp::CompletionItem,
language: &language::Language,
@@ -116,7 +108,7 @@ impl LspAdapter for PythonLspAdapter {
})
}
- fn label_for_symbol(
+ async fn label_for_symbol(
&self,
name: &str,
kind: lsp::SymbolKind,
@@ -1,116 +1,102 @@
use super::installation::{latest_github_release, GitHubLspBinaryVersion};
use anyhow::{anyhow, Result};
use async_compression::futures::bufread::GzipDecoder;
+use async_trait::async_trait;
use client::http::HttpClient;
-use futures::{future::BoxFuture, io::BufReader, FutureExt, StreamExt};
+use futures::{io::BufReader, StreamExt};
pub use language::*;
use lazy_static::lazy_static;
use regex::Regex;
use smol::fs::{self, File};
-use std::{
- any::Any,
- borrow::Cow,
- env::consts,
- path::{Path, PathBuf},
- str,
- sync::Arc,
-};
-use util::{ResultExt, TryFutureExt};
+use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
+use util::ResultExt;
pub struct RustLspAdapter;
+#[async_trait]
impl LspAdapter for RustLspAdapter {
- fn name(&self) -> LanguageServerName {
+ async fn name(&self) -> LanguageServerName {
LanguageServerName("rust-analyzer".into())
}
- fn fetch_latest_server_version(
+ async fn fetch_latest_server_version(
&self,
http: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
- async move {
- let release = latest_github_release("rust-analyzer/rust-analyzer", http).await?;
- let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
- let asset = release
- .assets
- .iter()
- .find(|asset| asset.name == asset_name)
- .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
- let version = GitHubLspBinaryVersion {
- name: release.name,
- url: asset.browser_download_url.clone(),
- };
- Ok(Box::new(version) as Box<_>)
- }
- .boxed()
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ let release = latest_github_release("rust-analyzer/rust-analyzer", http).await?;
+ let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
+ let asset = release
+ .assets
+ .iter()
+ .find(|asset| asset.name == asset_name)
+ .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
+ let version = GitHubLspBinaryVersion {
+ name: release.name,
+ url: asset.browser_download_url.clone(),
+ };
+ Ok(Box::new(version) as Box<_>)
}
- fn fetch_server_binary(
+ async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
- async move {
- let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
- let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
-
- if fs::metadata(&destination_path).await.is_err() {
- let mut response = http
- .get(&version.url, Default::default(), true)
- .await
- .map_err(|err| anyhow!("error downloading release: {}", err))?;
- let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
- let mut file = File::create(&destination_path).await?;
- futures::io::copy(decompressed_bytes, &mut file).await?;
- fs::set_permissions(
- &destination_path,
- <fs::Permissions as fs::unix::PermissionsExt>::from_mode(0o755),
- )
- .await?;
-
- if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
- while let Some(entry) = entries.next().await {
- if let Some(entry) = entry.log_err() {
- let entry_path = entry.path();
- if entry_path.as_path() != destination_path {
- fs::remove_file(&entry_path).await.log_err();
- }
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
+ let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
+ let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
+
+ if fs::metadata(&destination_path).await.is_err() {
+ let mut response = http
+ .get(&version.url, Default::default(), true)
+ .await
+ .map_err(|err| anyhow!("error downloading release: {}", err))?;
+ let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
+ let mut file = File::create(&destination_path).await?;
+ futures::io::copy(decompressed_bytes, &mut file).await?;
+ fs::set_permissions(
+ &destination_path,
+ <fs::Permissions as fs::unix::PermissionsExt>::from_mode(0o755),
+ )
+ .await?;
+
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != destination_path {
+ fs::remove_file(&entry_path).await.log_err();
}
}
}
}
-
- Ok(destination_path)
}
- .boxed()
+
+ Ok(destination_path)
}
- fn cached_server_binary(
- &self,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Option<PathBuf>> {
- async move {
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ (|| async move {
let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
last = Some(entry?.path());
}
last.ok_or_else(|| anyhow!("no cached binary"))
- }
+ })()
+ .await
.log_err()
- .boxed()
}
- fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
- &["rustc"]
+ async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
+ vec!["rustc".into()]
}
- fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
- Some("rustAnalyzer/cargo check")
+ async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
+ Some("rustAnalyzer/cargo check".into())
}
- fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
+ async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
lazy_static! {
static ref REGEX: Regex = Regex::new("(?m)`([^`]+)\n`$").unwrap();
}
@@ -130,7 +116,7 @@ impl LspAdapter for RustLspAdapter {
}
}
- fn label_for_completion(
+ async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
language: &Language,
@@ -206,7 +192,7 @@ impl LspAdapter for RustLspAdapter {
None
}
- fn label_for_symbol(
+ async fn label_for_symbol(
&self,
name: &str,
kind: lsp::SymbolKind,
@@ -269,12 +255,12 @@ impl LspAdapter for RustLspAdapter {
#[cfg(test)]
mod tests {
use super::*;
- use crate::languages::{language, LspAdapter};
+ use crate::languages::{language, CachedLspAdapter};
use gpui::{color::Color, MutableAppContext};
use theme::SyntaxTheme;
- #[test]
- fn test_process_rust_diagnostics() {
+ #[gpui::test]
+ async fn test_process_rust_diagnostics() {
let mut params = lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a").unwrap(),
version: None,
@@ -297,7 +283,7 @@ mod tests {
},
],
};
- RustLspAdapter.process_diagnostics(&mut params);
+ RustLspAdapter.process_diagnostics(&mut params).await;
assert_eq!(params.diagnostics[0].message, "use of moved value `a`");
@@ -314,12 +300,12 @@ mod tests {
);
}
- #[test]
- fn test_rust_label_for_completion() {
+ #[gpui::test]
+ async fn test_rust_label_for_completion() {
let language = language(
"rust",
tree_sitter_rust::language(),
- Some(Arc::new(RustLspAdapter)),
+ Some(CachedLspAdapter::new(RustLspAdapter).await),
);
let grammar = language.grammar().unwrap();
let theme = SyntaxTheme::new(vec![
@@ -337,12 +323,14 @@ mod tests {
let highlight_field = grammar.highlight_id_for_name("property").unwrap();
assert_eq!(
- language.label_for_completion(&lsp::CompletionItem {
- kind: Some(lsp::CompletionItemKind::FUNCTION),
- label: "hello(…)".to_string(),
- detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
- ..Default::default()
- }),
+ language
+ .label_for_completion(&lsp::CompletionItem {
+ kind: Some(lsp::CompletionItemKind::FUNCTION),
+ label: "hello(…)".to_string(),
+ detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
+ ..Default::default()
+ })
+ .await,
Some(CodeLabel {
text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
filter_range: 0..5,
@@ -358,12 +346,14 @@ mod tests {
);
assert_eq!(
- language.label_for_completion(&lsp::CompletionItem {
- kind: Some(lsp::CompletionItemKind::FIELD),
- label: "len".to_string(),
- detail: Some("usize".to_string()),
- ..Default::default()
- }),
+ language
+ .label_for_completion(&lsp::CompletionItem {
+ kind: Some(lsp::CompletionItemKind::FIELD),
+ label: "len".to_string(),
+ detail: Some("usize".to_string()),
+ ..Default::default()
+ })
+ .await,
Some(CodeLabel {
text: "len: usize".to_string(),
filter_range: 0..3,
@@ -372,12 +362,14 @@ mod tests {
);
assert_eq!(
- language.label_for_completion(&lsp::CompletionItem {
- kind: Some(lsp::CompletionItemKind::FUNCTION),
- label: "hello(…)".to_string(),
- detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
- ..Default::default()
- }),
+ language
+ .label_for_completion(&lsp::CompletionItem {
+ kind: Some(lsp::CompletionItemKind::FUNCTION),
+ label: "hello(…)".to_string(),
+ detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
+ ..Default::default()
+ })
+ .await,
Some(CodeLabel {
text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
filter_range: 0..5,
@@ -393,12 +385,12 @@ mod tests {
);
}
- #[test]
- fn test_rust_label_for_symbol() {
+ #[gpui::test]
+ async fn test_rust_label_for_symbol() {
let language = language(
"rust",
tree_sitter_rust::language(),
- Some(Arc::new(RustLspAdapter)),
+ Some(CachedLspAdapter::new(RustLspAdapter).await),
);
let grammar = language.grammar().unwrap();
let theme = SyntaxTheme::new(vec![
@@ -415,7 +407,9 @@ mod tests {
let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap();
assert_eq!(
- language.label_for_symbol("hello", lsp::SymbolKind::FUNCTION),
+ language
+ .label_for_symbol("hello", lsp::SymbolKind::FUNCTION)
+ .await,
Some(CodeLabel {
text: "fn hello".to_string(),
filter_range: 3..8,
@@ -424,7 +418,9 @@ mod tests {
);
assert_eq!(
- language.label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER),
+ language
+ .label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER)
+ .await,
Some(CodeLabel {
text: "type World".to_string(),
filter_range: 5..10,
@@ -1,16 +1,13 @@
use super::installation::{npm_install_packages, npm_package_latest_version};
use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
use client::http::HttpClient;
-use futures::{future::BoxFuture, FutureExt, StreamExt};
+use futures::StreamExt;
use language::{LanguageServerName, LspAdapter};
use serde_json::json;
use smol::fs;
-use std::{
- any::Any,
- path::{Path, PathBuf},
- sync::Arc,
-};
-use util::{ResultExt, TryFutureExt};
+use std::{any::Any, path::PathBuf, sync::Arc};
+use util::ResultExt;
pub struct TypeScriptLspAdapter;
@@ -23,80 +20,75 @@ struct Versions {
server_version: String,
}
+#[async_trait]
impl LspAdapter for TypeScriptLspAdapter {
- fn name(&self) -> LanguageServerName {
+ async fn name(&self) -> LanguageServerName {
LanguageServerName("typescript-language-server".into())
}
- fn server_args(&self) -> &[&str] {
- &["--stdio", "--tsserver-path", "node_modules/typescript/lib"]
+ async fn server_args(&self) -> Vec<String> {
+ ["--stdio", "--tsserver-path", "node_modules/typescript/lib"]
+ .into_iter()
+ .map(str::to_string)
+ .collect()
}
- fn fetch_latest_server_version(
+ async fn fetch_latest_server_version(
&self,
_: Arc<dyn HttpClient>,
- ) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
- async move {
- Ok(Box::new(Versions {
- typescript_version: npm_package_latest_version("typescript").await?,
- server_version: npm_package_latest_version("typescript-language-server").await?,
- }) as Box<_>)
- }
- .boxed()
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ Ok(Box::new(Versions {
+ typescript_version: npm_package_latest_version("typescript").await?,
+ server_version: npm_package_latest_version("typescript-language-server").await?,
+ }) as Box<_>)
}
- fn fetch_server_binary(
+ async fn fetch_server_binary(
&self,
versions: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Result<PathBuf>> {
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
let versions = versions.downcast::<Versions>().unwrap();
- async move {
- let version_dir = container_dir.join(&format!(
- "typescript-{}:server-{}",
- versions.typescript_version, versions.server_version
- ));
- fs::create_dir_all(&version_dir)
- .await
- .context("failed to create version directory")?;
- let binary_path = version_dir.join(Self::BIN_PATH);
-
- if fs::metadata(&binary_path).await.is_err() {
- npm_install_packages(
- [
- ("typescript", versions.typescript_version.as_str()),
- (
- "typescript-language-server",
- &versions.server_version.as_str(),
- ),
- ],
- &version_dir,
- )
- .await?;
-
- if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
- while let Some(entry) = entries.next().await {
- if let Some(entry) = entry.log_err() {
- let entry_path = entry.path();
- if entry_path.as_path() != version_dir {
- fs::remove_dir_all(&entry_path).await.log_err();
- }
+ let version_dir = container_dir.join(&format!(
+ "typescript-{}:server-{}",
+ versions.typescript_version, versions.server_version
+ ));
+ fs::create_dir_all(&version_dir)
+ .await
+ .context("failed to create version directory")?;
+ let binary_path = version_dir.join(Self::BIN_PATH);
+
+ if fs::metadata(&binary_path).await.is_err() {
+ npm_install_packages(
+ [
+ ("typescript", versions.typescript_version.as_str()),
+ (
+ "typescript-language-server",
+ &versions.server_version.as_str(),
+ ),
+ ],
+ &version_dir,
+ )
+ .await?;
+
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != version_dir {
+ fs::remove_dir_all(&entry_path).await.log_err();
}
}
}
}
-
- Ok(binary_path)
}
- .boxed()
+
+ Ok(binary_path)
}
- fn cached_server_binary(
- &self,
- container_dir: Arc<Path>,
- ) -> BoxFuture<'static, Option<PathBuf>> {
- async move {
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ (|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
@@ -115,12 +107,12 @@ impl LspAdapter for TypeScriptLspAdapter {
last_version_dir
))
}
- }
+ })()
+ .await
.log_err()
- .boxed()
}
- fn label_for_completion(
+ async fn label_for_completion(
&self,
item: &lsp::CompletionItem,
language: &language::Language,
@@ -143,7 +135,7 @@ impl LspAdapter for TypeScriptLspAdapter {
})
}
- fn initialization_options(&self) -> Option<serde_json::Value> {
+ async fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true
}))
@@ -21,6 +21,7 @@ use futures::{
};
use gpui::{executor::Background, App, AssetSource, AsyncAppContext, Task};
use isahc::{config::Configurable, AsyncBody, Request};
+use language::LanguageRegistry;
use log::LevelFilter;
use parking_lot::Mutex;
use project::{Fs, ProjectStore};
@@ -163,7 +164,12 @@ fn main() {
app.run(move |cx| {
let client = client::Client::new(http.clone());
- let mut languages = languages::build_language_registry(login_shell_env_loaded);
+ let mut languages = LanguageRegistry::new(login_shell_env_loaded);
+ languages.set_language_server_download_dir(zed::ROOT_PATH.clone());
+ let languages = Arc::new(languages);
+ let init_languages = cx
+ .background()
+ .spawn(languages::init(languages.clone(), cx.background().clone()));
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
context_menu::init(cx);
@@ -208,17 +214,22 @@ fn main() {
})
.detach();
- languages.set_language_server_download_dir(zed::ROOT_PATH.clone());
- let languages = Arc::new(languages);
-
cx.observe_global::<Settings, _>({
let languages = languages.clone();
move |cx| {
- languages.set_theme(&cx.global::<Settings>().theme.editor.syntax);
+ languages.set_theme(cx.global::<Settings>().theme.clone());
}
})
.detach();
cx.set_global(settings);
+ cx.spawn({
+ let languages = languages.clone();
+ |cx| async move {
+ cx.read(|cx| languages.set_theme(cx.global::<Settings>().theme.clone()));
+ init_languages.await;
+ }
+ })
+ .detach();
let project_store = cx.add_model(|_| ProjectStore::new(db.clone()));
let app_state = Arc::new(AppState {
@@ -0,0 +1,126 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "bincode"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
+
+[[package]]
+name = "json_language"
+version = "0.1.0"
+dependencies = [
+ "plugin",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "plugin"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "plugin_macros",
+ "serde",
+]
+
+[[package]]
+name = "plugin_macros"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
+
+[[package]]
+name = "serde"
+version = "1.0.137"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.137"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.82"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.96"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "test_plugin"
+version = "0.1.0"
+dependencies = [
+ "plugin",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee"
@@ -0,0 +1,2 @@
+[workspace]
+members = ["./json_language", "./test_plugin"]
@@ -0,0 +1,12 @@
+[package]
+name = "json_language"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+plugin = { path = "../../crates/plugin" }
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"
+
+[lib]
+crate-type = ["cdylib"]
@@ -0,0 +1,103 @@
+use plugin::prelude::*;
+use serde::Deserialize;
+use std::fs;
+use std::path::PathBuf;
+
+#[import]
+fn command(string: &str) -> Option<Vec<u8>>;
+
+const BIN_PATH: &'static str =
+ "node_modules/vscode-json-languageserver/bin/vscode-json-languageserver";
+
+#[export]
+pub fn name() -> &'static str {
+ "vscode-json-languageserver"
+}
+
+#[export]
+pub fn server_args() -> Vec<String> {
+ vec!["--stdio".into()]
+}
+
+#[export]
+pub fn fetch_latest_server_version() -> Option<String> {
+ #[derive(Deserialize)]
+ struct NpmInfo {
+ versions: Vec<String>,
+ }
+
+ let output =
+ command("npm info vscode-json-languageserver --json").expect("could not run command");
+ let output = String::from_utf8(output).unwrap();
+
+ let mut info: NpmInfo = serde_json::from_str(&output).ok()?;
+ info.versions.pop()
+}
+
+#[export]
+pub fn fetch_server_binary(container_dir: PathBuf, version: String) -> Result<PathBuf, String> {
+ let version_dir = container_dir.join(version.as_str());
+ fs::create_dir_all(&version_dir)
+ .map_err(|_| "failed to create version directory".to_string())?;
+ let binary_path = version_dir.join(BIN_PATH);
+
+ if fs::metadata(&binary_path).is_err() {
+ let output = command(&format!(
+ "npm install vscode-json-languageserver@{}",
+ version
+ ));
+ let output = output.map(String::from_utf8);
+ if output.is_none() {
+ return Err("failed to install vscode-json-languageserver".to_string());
+ }
+
+ if let Some(mut entries) = fs::read_dir(&container_dir).ok() {
+ while let Some(entry) = entries.next() {
+ if let Some(entry) = entry.ok() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != version_dir {
+ fs::remove_dir_all(&entry_path).ok();
+ }
+ }
+ }
+ }
+ }
+
+ Ok(binary_path)
+}
+
+#[export]
+pub fn cached_server_binary(container_dir: PathBuf) -> Option<PathBuf> {
+ let mut last_version_dir = None;
+ let mut entries = fs::read_dir(&container_dir).ok()?;
+
+ while let Some(entry) = entries.next() {
+ let entry = entry.ok()?;
+ if entry.file_type().ok()?.is_dir() {
+ last_version_dir = Some(entry.path());
+ }
+ }
+
+ let last_version_dir = last_version_dir?;
+ let bin_path = last_version_dir.join(BIN_PATH);
+ if bin_path.exists() {
+ Some(bin_path)
+ } else {
+ println!("no binary found");
+ None
+ }
+}
+
+#[export]
+pub fn initialization_options() -> Option<String> {
+ Some("{ \"provideFormatter\": true }".to_string())
+}
+
+#[export]
+pub fn id_for_language(name: String) -> Option<String> {
+ if name == "JSON" {
+ Some("jsonc".into())
+ } else {
+ None
+ }
+}
@@ -0,0 +1,10 @@
+[package]
+name = "test_plugin"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+plugin = { path = "../../crates/plugin" }
+
+[lib]
+crate-type = ["cdylib"]
@@ -0,0 +1,82 @@
+use plugin::prelude::*;
+
+#[export]
+pub fn noop() {}
+
+#[export]
+pub fn constant() -> u32 {
+ 27
+}
+
+#[export]
+pub fn identity(i: u32) -> u32 {
+ i
+}
+
+#[export]
+pub fn add(a: u32, b: u32) -> u32 {
+ a + b
+}
+
+#[export]
+pub fn swap(a: u32, b: u32) -> (u32, u32) {
+ (b, a)
+}
+
+#[export]
+pub fn sort(mut list: Vec<u32>) -> Vec<u32> {
+ list.sort();
+ list
+}
+
+#[export]
+pub fn print(string: String) {
+ println!("to stdout: {}", string);
+ eprintln!("to stderr: {}", string);
+}
+
+#[import]
+fn mystery_number(input: u32) -> u32;
+
+#[export]
+pub fn and_back(secret: u32) -> u32 {
+ mystery_number(secret)
+}
+
+#[import]
+fn import_noop() -> ();
+
+#[import]
+fn import_identity(i: u32) -> u32;
+
+#[import]
+fn import_swap(a: u32, b: u32) -> (u32, u32);
+
+#[export]
+pub fn imports(x: u32) -> u32 {
+ let a = import_identity(7);
+ import_noop();
+ let (b, c) = import_swap(a, x);
+ assert_eq!(a, c);
+ assert_eq!(x, b);
+ a + b // should be 7 + x
+}
+
+#[import]
+fn import_half(a: u32) -> u32;
+
+#[export]
+pub fn half_async(a: u32) -> u32 {
+ import_half(a)
+}
+
+#[import]
+fn command_async(command: String) -> Option<Vec<u8>>;
+
+#[export]
+pub fn echo_async(message: String) -> String {
+ let command = format!("echo {}", message);
+ let result = command_async(command);
+ let result = result.expect("Could not run command");
+ String::from_utf8_lossy(&result).to_string()
+}
@@ -6,6 +6,7 @@ export ZED_BUNDLE=true
echo "Installing cargo bundle"
cargo install cargo-bundle --version 0.5.0
+rustup target add wasm32-wasi
# Deal with versions of macOS that don't include libstdc++ headers
export CXXFLAGS="-stdlib=libc++"